Compare commits
2 Commits
py-pycm-ne
...
solver-pre
Author | SHA1 | Date | |
---|---|---|---|
![]() |
b7a2045c79 | ||
![]() |
2632106f1e |
3
.github/dependabot.yml
vendored
3
.github/dependabot.yml
vendored
@@ -12,7 +12,6 @@ updates:
|
|||||||
interval: "daily"
|
interval: "daily"
|
||||||
# Requirements to run style checks
|
# Requirements to run style checks
|
||||||
- package-ecosystem: "pip"
|
- package-ecosystem: "pip"
|
||||||
directories:
|
directory: "/.github/workflows/style"
|
||||||
- "/.github/workflows/requirements/*"
|
|
||||||
schedule:
|
schedule:
|
||||||
interval: "daily"
|
interval: "daily"
|
||||||
|
6
.github/workflows/audit.yaml
vendored
6
.github/workflows/audit.yaml
vendored
@@ -28,8 +28,8 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: ${{ matrix.system.shell }}
|
shell: ${{ matrix.system.shell }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: ${{inputs.python_version}}
|
python-version: ${{inputs.python_version}}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -61,7 +61,7 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
spack -d audit externals
|
spack -d audit externals
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
if: ${{ inputs.with_coverage == 'true' }}
|
||||||
with:
|
with:
|
||||||
flags: unittests,audits
|
flags: unittests,audits
|
||||||
|
29
.github/workflows/bootstrap.yml
vendored
29
.github/workflows/bootstrap.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
|||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
@@ -53,33 +53,27 @@ jobs:
|
|||||||
runs-on: ${{ matrix.runner }}
|
runs-on: ${{ matrix.runner }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
|
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||||
steps:
|
steps:
|
||||||
- name: Setup macOS
|
- name: Setup macOS
|
||||||
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest' }}
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
run: |
|
run: |
|
||||||
brew install cmake bison tree
|
brew install cmake bison tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: "3.12"
|
python-version: "3.12"
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
env:
|
|
||||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
|
||||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
|
||||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
|
||||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
|
||||||
run: |
|
run: |
|
||||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
source share/spack/setup-env.sh
|
||||||
spack bootstrap disable github-actions-v0.5
|
spack bootstrap disable github-actions-v0.5
|
||||||
spack bootstrap disable github-actions-v0.4
|
spack bootstrap disable github-actions-v0.4
|
||||||
spack external find --not-buildable cmake bison
|
spack external find --not-buildable cmake bison
|
||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
${{ env.VALIDATE_LAST_EXIT }}
|
tree ~/.spack/bootstrap/store/
|
||||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
gnupg-sources:
|
gnupg-sources:
|
||||||
runs-on: ${{ matrix.runner }}
|
runs-on: ${{ matrix.runner }}
|
||||||
@@ -96,7 +90,7 @@ jobs:
|
|||||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||||
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
@@ -125,10 +119,10 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: |
|
python-version: |
|
||||||
3.8
|
3.8
|
||||||
@@ -154,7 +148,7 @@ jobs:
|
|||||||
not_found=0
|
not_found=0
|
||||||
old_path="$PATH"
|
old_path="$PATH"
|
||||||
export PATH="$ver_dir:$PATH"
|
export PATH="$ver_dir:$PATH"
|
||||||
./bin/spack-tmpconfig -b ./.github/workflows/bin/bootstrap-test.sh
|
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
||||||
export PATH="$old_path"
|
export PATH="$old_path"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
@@ -168,3 +162,4 @@ jobs:
|
|||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack -d gpg list
|
spack -d gpg list
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
|
21
.github/workflows/build-containers.yml
vendored
21
.github/workflows/build-containers.yml
vendored
@@ -40,7 +40,8 @@ jobs:
|
|||||||
# 1: Platforms to build for
|
# 1: Platforms to build for
|
||||||
# 2: Base image (e.g. ubuntu:22.04)
|
# 2: Base image (e.g. ubuntu:22.04)
|
||||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
|
||||||
|
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
|
||||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||||
@@ -55,7 +56,7 @@ jobs:
|
|||||||
if: github.repository == 'spack/spack'
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
|
|
||||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||||
id: docker_meta
|
id: docker_meta
|
||||||
@@ -76,7 +77,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
|
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
|
||||||
run: |
|
run: |
|
||||||
.github/workflows/bin/generate_spack_yaml_containerize.sh
|
.github/workflows/generate_spack_yaml_containerize.sh
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
|
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
|
||||||
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
|
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
|
||||||
@@ -87,19 +88,19 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload Dockerfile
|
- name: Upload Dockerfile
|
||||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b
|
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
|
||||||
with:
|
with:
|
||||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||||
path: dockerfiles
|
path: dockerfiles
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@aa33708b10e362ff993539393ff100fa93ed6a27
|
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
|
||||||
|
|
||||||
- name: Log in to GitHub Container Registry
|
- name: Log in to GitHub Container Registry
|
||||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
@@ -107,13 +108,13 @@ jobs:
|
|||||||
|
|
||||||
- name: Log in to DockerHub
|
- name: Log in to DockerHub
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||||
uses: docker/build-push-action@5176d81f87c23d6fc96624dfdbcd9f3830bbe445
|
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
|
||||||
with:
|
with:
|
||||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||||
platforms: ${{ matrix.dockerfile[1] }}
|
platforms: ${{ matrix.dockerfile[1] }}
|
||||||
@@ -126,7 +127,7 @@ jobs:
|
|||||||
needs: deploy-images
|
needs: deploy-images
|
||||||
steps:
|
steps:
|
||||||
- name: Merge Artifacts
|
- name: Merge Artifacts
|
||||||
uses: actions/upload-artifact/merge@0b2256b8c012f0828dc542b3febcab082c67f72b
|
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
|
||||||
with:
|
with:
|
||||||
name: dockerfiles
|
name: dockerfiles
|
||||||
pattern: dockerfiles_*
|
pattern: dockerfiles_*
|
||||||
|
16
.github/workflows/ci.yaml
vendored
16
.github/workflows/ci.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
|||||||
core: ${{ steps.filter.outputs.core }}
|
core: ${{ steps.filter.outputs.core }}
|
||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
if: ${{ github.event_name == 'push' }}
|
if: ${{ github.event_name == 'push' }}
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -53,13 +53,6 @@ jobs:
|
|||||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||||
- 'var/spack/repos/builtin/packages/python/**'
|
- 'var/spack/repos/builtin/packages/python/**'
|
||||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||||
- 'var/spack/repos/builtin/packages/gnupg/**'
|
|
||||||
- 'var/spack/repos/builtin/packages/libassuan/**'
|
|
||||||
- 'var/spack/repos/builtin/packages/libgcrypt/**'
|
|
||||||
- 'var/spack/repos/builtin/packages/libgpg-error/**'
|
|
||||||
- 'var/spack/repos/builtin/packages/libksba/**'
|
|
||||||
- 'var/spack/repos/builtin/packages/npth/**'
|
|
||||||
- 'var/spack/repos/builtin/packages/pinentry/**'
|
|
||||||
- 'lib/spack/**'
|
- 'lib/spack/**'
|
||||||
- 'share/spack/**'
|
- 'share/spack/**'
|
||||||
- '.github/workflows/bootstrap.yml'
|
- '.github/workflows/bootstrap.yml'
|
||||||
@@ -84,8 +77,13 @@ jobs:
|
|||||||
needs: [ prechecks, changes ]
|
needs: [ prechecks, changes ]
|
||||||
uses: ./.github/workflows/unit_tests.yaml
|
uses: ./.github/workflows/unit_tests.yaml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
windows:
|
||||||
|
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||||
|
needs: [ prechecks ]
|
||||||
|
uses: ./.github/workflows/windows_python.yml
|
||||||
|
secrets: inherit
|
||||||
all:
|
all:
|
||||||
needs: [ unit-tests, bootstrap ]
|
needs: [ windows, unit-tests, bootstrap ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Success
|
- name: Success
|
||||||
|
8
.github/workflows/install_spack.sh
vendored
Executable file
8
.github/workflows/install_spack.sh
vendored
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/usr/bin/env sh
|
||||||
|
. share/spack/setup-env.sh
|
||||||
|
echo -e "config:\n build_jobs: 2" > etc/spack/config.yaml
|
||||||
|
spack config add "packages:all:target:[x86_64]"
|
||||||
|
spack compiler find
|
||||||
|
spack compiler info apple-clang
|
||||||
|
spack debug report
|
||||||
|
spack solve zlib
|
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
|||||||
build-paraview-deps:
|
build-paraview-deps:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
black==24.4.2
|
black==24.4.2
|
||||||
clingo==5.7.1
|
clingo==5.7.1
|
||||||
flake8==7.1.0
|
flake8==7.0.0
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
mypy==1.8.0
|
mypy==1.8.0
|
||||||
types-six==1.16.21.20240513
|
types-six==1.16.21.20240513
|
67
.github/workflows/unit_tests.yaml
vendored
67
.github/workflows/unit_tests.yaml
vendored
@@ -51,10 +51,10 @@ jobs:
|
|||||||
on_develop: false
|
on_develop: false
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -72,7 +72,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
git --version
|
git --version
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
if: ${{ matrix.concretizer == 'clingo' }}
|
if: ${{ matrix.concretizer == 'clingo' }}
|
||||||
env:
|
env:
|
||||||
@@ -91,7 +91,7 @@ jobs:
|
|||||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,${{ matrix.concretizer }}
|
flags: unittests,linux,${{ matrix.concretizer }}
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -100,10 +100,10 @@ jobs:
|
|||||||
shell:
|
shell:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -118,13 +118,13 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
git --version
|
git --version
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
- name: Run shell tests
|
- name: Run shell tests
|
||||||
env:
|
env:
|
||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-shell-tests
|
share/spack/qa/run-shell-tests
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: shelltests,linux
|
flags: shelltests,linux
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -141,13 +141,13 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
useradd spack-test
|
useradd spack-test
|
||||||
chown -R spack-test .
|
chown -R spack-test .
|
||||||
- name: Run unit tests
|
- name: Run unit tests
|
||||||
@@ -160,10 +160,10 @@ jobs:
|
|||||||
clingo-cffi:
|
clingo-cffi:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -178,14 +178,14 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
git --version
|
git --version
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
- name: Run unit tests (full suite with coverage)
|
- name: Run unit tests (full suite with coverage)
|
||||||
env:
|
env:
|
||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
SPACK_TEST_SOLVER: clingo
|
SPACK_TEST_SOLVER: clingo
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,clingo
|
flags: unittests,linux,clingo
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -198,10 +198,10 @@ jobs:
|
|||||||
os: [macos-13, macos-14]
|
os: [macos-13, macos-14]
|
||||||
python-version: ["3.11"]
|
python-version: ["3.11"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -217,45 +217,14 @@ jobs:
|
|||||||
SPACK_TEST_PARALLEL: 4
|
SPACK_TEST_PARALLEL: 4
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
$(which spack) bootstrap disable spack-install
|
$(which spack) bootstrap disable spack-install
|
||||||
$(which spack) solve zlib
|
$(which spack) solve zlib
|
||||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
with:
|
with:
|
||||||
flags: unittests,macos
|
flags: unittests,macos
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
verbose: true
|
verbose: true
|
||||||
# Run unit tests on Windows
|
|
||||||
windows:
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell:
|
|
||||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
|
||||||
runs-on: windows-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
- name: Install Python packages
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
|
||||||
- name: Create local develop
|
|
||||||
run: |
|
|
||||||
./.github/workflows/bin/setup_git.ps1
|
|
||||||
- name: Unit Test
|
|
||||||
run: |
|
|
||||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
|
||||||
coverage combine -a
|
|
||||||
coverage xml
|
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
|
||||||
with:
|
|
||||||
flags: unittests,windows
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
verbose: true
|
|
||||||
|
18
.github/workflows/valid-style.yml
vendored
18
.github/workflows/valid-style.yml
vendored
@@ -18,15 +18,15 @@ jobs:
|
|||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
- name: Install Python Packages
|
- name: Install Python Packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip setuptools
|
pip install --upgrade pip setuptools
|
||||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
pip install -r .github/workflows/style/requirements.txt
|
||||||
- name: vermin (Spack's Core)
|
- name: vermin (Spack's Core)
|
||||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||||
- name: vermin (Repositories)
|
- name: vermin (Repositories)
|
||||||
@@ -35,22 +35,22 @@ jobs:
|
|||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip setuptools
|
pip install --upgrade pip setuptools
|
||||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
pip install -r .github/workflows/style/requirements.txt
|
||||||
- name: Setup git configuration
|
- name: Setup git configuration
|
||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
git --version
|
git --version
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
- name: Run style tests
|
- name: Run style tests
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-style-tests
|
share/spack/qa/run-style-tests
|
||||||
@@ -70,13 +70,13 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
useradd spack-test
|
useradd spack-test
|
||||||
chown -R spack-test .
|
chown -R spack-test .
|
||||||
- name: Bootstrap Spack development environment
|
- name: Bootstrap Spack development environment
|
||||||
|
83
.github/workflows/windows_python.yml
vendored
Normal file
83
.github/workflows/windows_python.yml
vendored
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
name: windows
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: windows-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell:
|
||||||
|
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||||
|
jobs:
|
||||||
|
unit-tests:
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
- name: Install Python packages
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||||
|
- name: Create local develop
|
||||||
|
run: |
|
||||||
|
./.github/workflows/setup_git.ps1
|
||||||
|
- name: Unit Test
|
||||||
|
run: |
|
||||||
|
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
||||||
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
|
coverage combine -a
|
||||||
|
coverage xml
|
||||||
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
|
with:
|
||||||
|
flags: unittests,windows
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
verbose: true
|
||||||
|
unit-tests-cmd:
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
- name: Install Python packages
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip pywin32 setuptools coverage pytest-cov clingo
|
||||||
|
- name: Create local develop
|
||||||
|
run: |
|
||||||
|
./.github/workflows/setup_git.ps1
|
||||||
|
- name: Command Unit Test
|
||||||
|
run: |
|
||||||
|
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
||||||
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
|
coverage combine -a
|
||||||
|
coverage xml
|
||||||
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
|
with:
|
||||||
|
flags: unittests,windows
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
verbose: true
|
||||||
|
build-abseil:
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
- name: Install Python packages
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip pywin32 setuptools coverage
|
||||||
|
- name: Build Test
|
||||||
|
run: |
|
||||||
|
spack compiler find
|
||||||
|
spack -d external find cmake ninja
|
||||||
|
spack -d install abseil-cpp
|
321
CHANGELOG.md
321
CHANGELOG.md
@@ -1,324 +1,3 @@
|
|||||||
|
|
||||||
# v0.22.0 (2024-05-12)
|
|
||||||
|
|
||||||
`v0.22.0` is a major feature release.
|
|
||||||
|
|
||||||
## Features in this release
|
|
||||||
|
|
||||||
1. **Compiler dependencies**
|
|
||||||
|
|
||||||
We are in the process of making compilers proper dependencies in Spack, and a number
|
|
||||||
of changes in `v0.22` support that effort. You may notice nodes in your dependency
|
|
||||||
graphs for compiler runtime libraries like `gcc-runtime` or `libgfortran`, and you
|
|
||||||
may notice that Spack graphs now include `libc`. We've also begun moving compiler
|
|
||||||
configuration from `compilers.yaml` to `packages.yaml` to make it consistent with
|
|
||||||
other externals. We are trying to do this with the least disruption possible, so
|
|
||||||
your existing `compilers.yaml` files should still work. We expect to be done with
|
|
||||||
this transition by the `v0.23` release in November.
|
|
||||||
|
|
||||||
* #41104: Packages compiled with `%gcc` on Linux, macOS and FreeBSD now depend on a
|
|
||||||
new package `gcc-runtime`, which contains a copy of the shared compiler runtime
|
|
||||||
libraries. This enables gcc runtime libraries to be installed and relocated when
|
|
||||||
using a build cache. When building minimal Spack-generated container images it is
|
|
||||||
no longer necessary to install libgfortran, libgomp etc. using the system package
|
|
||||||
manager.
|
|
||||||
|
|
||||||
* #42062: Packages compiled with `%oneapi` now depend on a new package
|
|
||||||
`intel-oneapi-runtime`. This is similar to `gcc-runtime`, and the runtimes can
|
|
||||||
provide virtuals and compilers can inject dependencies on virtuals into compiled
|
|
||||||
packages. This allows us to model library soname compatibility and allows
|
|
||||||
compilers like `%oneapi` to provide virtuals like `sycl` (which can also be
|
|
||||||
provided by standalone libraries). Note that until we have an agreement in place
|
|
||||||
with intel, Intel packages are marked `redistribute(source=False, binary=False)`
|
|
||||||
and must be downloaded outside of Spack.
|
|
||||||
|
|
||||||
* #43272: changes to the optimization criteria of the solver improve the hit-rate of
|
|
||||||
buildcaches by a fair amount. The solver more relaxed compatibility rules and will
|
|
||||||
not try to strictly match compilers or targets of reused specs. Users can still
|
|
||||||
enforce the previous strict behavior with `require:` sections in `packages.yaml`.
|
|
||||||
Note that to enforce correct linking, Spack will *not* reuse old `%gcc` and
|
|
||||||
`%oneapi` specs that do not have the runtime libraries as a dependency.
|
|
||||||
|
|
||||||
* #43539: Spack will reuse specs built with compilers that are *not* explicitly
|
|
||||||
configured in `compilers.yaml`. Because we can now keep runtime libraries in build
|
|
||||||
cache, we do not require you to also have a local configured compiler to *use* the
|
|
||||||
runtime libraries. This improves reuse in buildcaches and avoids conflicts with OS
|
|
||||||
updates that happen underneath Spack.
|
|
||||||
|
|
||||||
* #43190: binary compatibility on `linux` is now based on the `libc` version,
|
|
||||||
instead of on the `os` tag. Spack builds now detect the host `libc` (`glibc` or
|
|
||||||
`musl`) and add it as an implicit external node in the dependency graph. Binaries
|
|
||||||
with a `libc` with the same name and a version less than or equal to that of the
|
|
||||||
detected `libc` can be reused. This is only on `linux`, not `macos` or `Windows`.
|
|
||||||
|
|
||||||
* #43464: each package that can provide a compiler is now detectable using `spack
|
|
||||||
external find`. External packages defining compiler paths are effectively used as
|
|
||||||
compilers, and `spack external find -t compiler` can be used as a substitute for
|
|
||||||
`spack compiler find`. More details on this transition are in
|
|
||||||
[the docs](https://spack.readthedocs.io/en/latest/getting_started.html#manual-compiler-configuration)
|
|
||||||
|
|
||||||
2. **Improved `spack find` UI for Environments**
|
|
||||||
|
|
||||||
If you're working in an enviroment, you likely care about:
|
|
||||||
|
|
||||||
* What are the roots
|
|
||||||
* Which ones are installed / not installed
|
|
||||||
* What's been added that still needs to be concretized
|
|
||||||
|
|
||||||
We've tweaked `spack find` in environments to show this information much more
|
|
||||||
clearly. Installation status is shown next to each root, so you can see what is
|
|
||||||
installed. Roots are also shown in bold in the list of installed packages. There is
|
|
||||||
also a new option for `spack find -r` / `--only-roots` that will only show env
|
|
||||||
roots, if you don't want to look at all the installed specs.
|
|
||||||
|
|
||||||
More details in #42334.
|
|
||||||
|
|
||||||
3. **Improved command-line string quoting**
|
|
||||||
|
|
||||||
We are making some breaking changes to how Spack parses specs on the CLI in order to
|
|
||||||
respect shell quoting instead of trying to fight it. If you (sadly) had to write
|
|
||||||
something like this on the command line:
|
|
||||||
|
|
||||||
```
|
|
||||||
spack install zlib cflags=\"-O2 -g\"
|
|
||||||
```
|
|
||||||
|
|
||||||
That will now result in an error, but you can now write what you probably expected
|
|
||||||
to work in the first place:
|
|
||||||
|
|
||||||
```
|
|
||||||
spack install zlib cflags="-O2 -g"
|
|
||||||
```
|
|
||||||
|
|
||||||
Quoted can also now include special characters, so you can supply flags like:
|
|
||||||
|
|
||||||
```
|
|
||||||
spack intall zlib ldflags='-Wl,-rpath=$ORIGIN/_libs'
|
|
||||||
```
|
|
||||||
|
|
||||||
To reduce ambiguity in parsing, we now require that you *not* put spaces around `=`
|
|
||||||
and `==` when for flags or variants. This would not have broken before but will now
|
|
||||||
result in an error:
|
|
||||||
|
|
||||||
```
|
|
||||||
spack install zlib cflags = "-O2 -g"
|
|
||||||
```
|
|
||||||
|
|
||||||
More details and discussion in #30634.
|
|
||||||
|
|
||||||
4. **Revert default `spack install` behavior to `--reuse`**
|
|
||||||
|
|
||||||
We changed the default concretizer behavior from `--reuse` to `--reuse-deps` in
|
|
||||||
#30990 (in `v0.20`), which meant that *every* `spack install` invocation would
|
|
||||||
attempt to build a new version of the requested package / any environment roots.
|
|
||||||
While this is a common ask for *upgrading* and for *developer* workflows, we don't
|
|
||||||
think it should be the default for a package manager.
|
|
||||||
|
|
||||||
We are going to try to stick to this policy:
|
|
||||||
1. Prioritize reuse and build as little as possible by default.
|
|
||||||
2. Only upgrade or install duplicates if they are explicitly asked for, or if there
|
|
||||||
is a known security issue that necessitates an upgrade.
|
|
||||||
|
|
||||||
With the install command you now have three options:
|
|
||||||
|
|
||||||
* `--reuse` (default): reuse as many existing installations as possible.
|
|
||||||
* `--reuse-deps` / `--fresh-roots`: upgrade (freshen) roots but reuse dependencies if possible.
|
|
||||||
* `--fresh`: install fresh versions of requested packages (roots) and their dependencies.
|
|
||||||
|
|
||||||
We've also introduced `--fresh-roots` as an alias for `--reuse-deps` to make it more clear
|
|
||||||
that it may give you fresh versions. More details in #41302 and #43988.
|
|
||||||
|
|
||||||
5. **More control over reused specs**
|
|
||||||
|
|
||||||
You can now control which packages to reuse and how. There is a new
|
|
||||||
`concretizer:reuse` config option, which accepts the following properties:
|
|
||||||
|
|
||||||
- `roots`: `true` to reuse roots, `false` to reuse just dependencies
|
|
||||||
- `exclude`: list of constraints used to select which specs *not* to reuse
|
|
||||||
- `include`: list of constraints used to select which specs *to* reuse
|
|
||||||
- `from`: list of sources for reused specs (some combination of `local`,
|
|
||||||
`buildcache`, or `external`)
|
|
||||||
|
|
||||||
For example, to reuse only specs compiled with GCC, you could write:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
concretizer:
|
|
||||||
reuse:
|
|
||||||
roots: true
|
|
||||||
include:
|
|
||||||
- "%gcc"
|
|
||||||
```
|
|
||||||
|
|
||||||
Or, if `openmpi` must be used from externals, and it must be the only external used:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
concretizer:
|
|
||||||
reuse:
|
|
||||||
roots: true
|
|
||||||
from:
|
|
||||||
- type: local
|
|
||||||
exclude: ["openmpi"]
|
|
||||||
- type: buildcache
|
|
||||||
exclude: ["openmpi"]
|
|
||||||
- type: external
|
|
||||||
include: ["openmpi"]
|
|
||||||
```
|
|
||||||
|
|
||||||
6. **New `redistribute()` directive**
|
|
||||||
|
|
||||||
Some packages can't be redistributed in source or binary form. We need an explicit
|
|
||||||
way to say that in a package.
|
|
||||||
|
|
||||||
Now there is a `redistribute()` directive so that package authors can write:
|
|
||||||
|
|
||||||
```python
|
|
||||||
class MyPackage(Package):
|
|
||||||
redistribute(source=False, binary=False)
|
|
||||||
```
|
|
||||||
|
|
||||||
Like other directives, this works with `when=`:
|
|
||||||
|
|
||||||
```python
|
|
||||||
class MyPackage(Package):
|
|
||||||
# 12.0 and higher are proprietary
|
|
||||||
redistribute(source=False, binary=False, when="@12.0:")
|
|
||||||
|
|
||||||
# can't redistribute when we depend on some proprietary dependency
|
|
||||||
redistribute(source=False, binary=False, when="^proprietary-dependency")
|
|
||||||
```
|
|
||||||
|
|
||||||
More in #20185.
|
|
||||||
|
|
||||||
7. **New `conflict:` and `prefer:` syntax for package preferences**
|
|
||||||
|
|
||||||
Previously, you could express conflicts and preferences in `packages.yaml` through
|
|
||||||
some contortions with `require:`:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
packages:
|
|
||||||
zlib-ng:
|
|
||||||
require:
|
|
||||||
- one_of: ["%clang", "@:"] # conflict on %clang
|
|
||||||
- any_of: ["+shared", "@:"] # strong preference for +shared
|
|
||||||
```
|
|
||||||
|
|
||||||
You can now use `require:` and `prefer:` for a much more readable configuration:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
packages:
|
|
||||||
zlib-ng:
|
|
||||||
conflict:
|
|
||||||
- "%clang"
|
|
||||||
prefer:
|
|
||||||
- "+shared"
|
|
||||||
```
|
|
||||||
|
|
||||||
See [the documentation](https://spack.readthedocs.io/en/latest/packages_yaml.html#conflicts-and-strong-preferences)
|
|
||||||
and #41832 for more details.
|
|
||||||
|
|
||||||
8. **`include_concrete` in environments**
|
|
||||||
|
|
||||||
You may want to build on the *concrete* contents of another environment without
|
|
||||||
changing that environment. You can now include the concrete specs from another
|
|
||||||
environment's `spack.lock` with `include_concrete`:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
spack:
|
|
||||||
specs: []
|
|
||||||
concretizer:
|
|
||||||
unify: true
|
|
||||||
include_concrete:
|
|
||||||
- /path/to/environment1
|
|
||||||
- /path/to/environment2
|
|
||||||
```
|
|
||||||
|
|
||||||
Now, when *this* environment is concretized, it will bring in the already concrete
|
|
||||||
specs from `environment1` and `environment2`, and build on top of them without
|
|
||||||
changing them. This is useful if you have phased deployments, where old deployments
|
|
||||||
should not be modified but you want to use as many of them as possible. More details
|
|
||||||
in #33768.
|
|
||||||
|
|
||||||
9. **`python-venv` isolation**
|
|
||||||
|
|
||||||
Spack has unique requirements for Python because it:
|
|
||||||
1. installs every package in its own independent directory, and
|
|
||||||
2. allows users to register *external* python installations.
|
|
||||||
|
|
||||||
External installations may contain their own installed packages that can interfere
|
|
||||||
with Spack installations, and some distributions (Debian and Ubuntu) even change the
|
|
||||||
`sysconfig` in ways that alter the installation layout of installed Python packages
|
|
||||||
(e.g., with the addition of a `/local` prefix on Debian or Ubuntu). To isolate Spack
|
|
||||||
from these and other issues, we now insert a small `python-venv` package in between
|
|
||||||
`python` and packages that need to install Python code. This isolates Spack's build
|
|
||||||
environment, isolates Spack from any issues with an external python, and resolves a
|
|
||||||
large number of issues we've had with Python installations.
|
|
||||||
|
|
||||||
See #40773 for further details.
|
|
||||||
|
|
||||||
## New commands, options, and directives
|
|
||||||
|
|
||||||
* Allow packages to be pushed to build cache after install from source (#42423)
|
|
||||||
* `spack develop`: stage build artifacts in same root as non-dev builds #41373
|
|
||||||
* Don't delete `spack develop` build artifacts after install (#43424)
|
|
||||||
* `spack find`: add options for local/upstream only (#42999)
|
|
||||||
* `spack logs`: print log files for packages (either partially built or installed) (#42202)
|
|
||||||
* `patch`: support reversing patches (#43040)
|
|
||||||
* `develop`: Add -b/--build-directory option to set build_directory package attribute (#39606)
|
|
||||||
* `spack list`: add `--namesapce` / `--repo` option (#41948)
|
|
||||||
* directives: add `checked_by` field to `license()`, add some license checks
|
|
||||||
* `spack gc`: add options for environments and build dependencies (#41731)
|
|
||||||
* Add `--create` to `spack env activate` (#40896)
|
|
||||||
|
|
||||||
## Performance improvements
|
|
||||||
|
|
||||||
* environment.py: fix excessive re-reads (#43746)
|
|
||||||
* ruamel yaml: fix quadratic complexity bug (#43745)
|
|
||||||
* Refactor to improve `spec format` speed (#43712)
|
|
||||||
* Do not acquire a write lock on the env post install if no views (#43505)
|
|
||||||
* asp.py: fewer calls to `spec.copy()` (#43715)
|
|
||||||
* spec.py: early return in `__str__`
|
|
||||||
* avoid `jinja2` import at startup unless needed (#43237)
|
|
||||||
|
|
||||||
## Other new features of note
|
|
||||||
|
|
||||||
* `archspec`: update to `v0.2.4`: support for Windows, bugfixes for `neoverse-v1` and
|
|
||||||
`neoverse-v2` detection.
|
|
||||||
* `spack config get`/`blame`: with no args, show entire config
|
|
||||||
* `spack env create <env>`: dir if dir-like (#44024)
|
|
||||||
* ASP-based solver: update os compatibility for macOS (#43862)
|
|
||||||
* Add handling of custom ssl certs in urllib ops (#42953)
|
|
||||||
* Add ability to rename environments (#43296)
|
|
||||||
* Add config option and compiler support to reuse across OS's (#42693)
|
|
||||||
* Support for prereleases (#43140)
|
|
||||||
* Only reuse externals when configured (#41707)
|
|
||||||
* Environments: Add support for including views (#42250)
|
|
||||||
|
|
||||||
## Binary caches
|
|
||||||
* Build cache: make signed/unsigned a mirror property (#41507)
|
|
||||||
* tools stack
|
|
||||||
|
|
||||||
## Removals, deprecations, and syntax changes
|
|
||||||
* remove `dpcpp` compiler and package (#43418)
|
|
||||||
* spack load: remove --only argument (#42120)
|
|
||||||
|
|
||||||
## Notable Bugfixes
|
|
||||||
* repo.py: drop deleted packages from provider cache (#43779)
|
|
||||||
* Allow `+` in module file names (#41999)
|
|
||||||
* `cmd/python`: use runpy to allow multiprocessing in scripts (#41789)
|
|
||||||
* Show extension commands with spack -h (#41726)
|
|
||||||
* Support environment variable expansion inside module projections (#42917)
|
|
||||||
* Alert user to failed concretizations (#42655)
|
|
||||||
* shell: fix zsh color formatting for PS1 in environments (#39497)
|
|
||||||
* spack mirror create --all: include patches (#41579)
|
|
||||||
|
|
||||||
## Spack community stats
|
|
||||||
|
|
||||||
* 7,994 total packages; 525 since `v0.21.0`
|
|
||||||
* 178 new Python packages, 5 new R packages
|
|
||||||
* 358 people contributed to this release
|
|
||||||
* 344 committers to packages
|
|
||||||
* 45 committers to core
|
|
||||||
|
|
||||||
# v0.21.2 (2024-03-01)
|
# v0.21.2 (2024-03-01)
|
||||||
|
|
||||||
## Bugfixes
|
## Bugfixes
|
||||||
|
@@ -22,4 +22,4 @@
|
|||||||
#
|
#
|
||||||
# This is compatible across platforms.
|
# This is compatible across platforms.
|
||||||
#
|
#
|
||||||
exec spack python "$@"
|
exec /usr/bin/env spack python "$@"
|
||||||
|
@@ -188,27 +188,25 @@ if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
|||||||
goto :end_switch
|
goto :end_switch
|
||||||
|
|
||||||
:case_load
|
:case_load
|
||||||
if NOT defined _sp_args (
|
:: If args contain --sh, --csh, or -h/--help: just execute.
|
||||||
exit /B 0
|
if defined _sp_args (
|
||||||
)
|
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||||
|
goto :default_case
|
||||||
:: If args contain --bat, or -h/--help: just execute.
|
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
goto :default_case
|
||||||
goto :default_case
|
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
goto :default_case
|
||||||
goto :default_case
|
)
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
|
||||||
goto :default_case
|
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args:--list=%" (
|
|
||||||
goto :default_case
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for /f "tokens=* USEBACKQ" %%I in (
|
for /f "tokens=* USEBACKQ" %%I in (
|
||||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`
|
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
|
||||||
) do %%I
|
|
||||||
|
|
||||||
goto :end_switch
|
goto :end_switch
|
||||||
|
|
||||||
|
:case_unload
|
||||||
|
goto :case_load
|
||||||
|
|
||||||
:default_case
|
:default_case
|
||||||
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
|
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
|
||||||
goto :end_switch
|
goto :end_switch
|
||||||
|
@@ -203,9 +203,12 @@ The OS that are currently supported are summarized in the table below:
|
|||||||
* - Ubuntu 24.04
|
* - Ubuntu 24.04
|
||||||
- ``ubuntu:24.04``
|
- ``ubuntu:24.04``
|
||||||
- ``spack/ubuntu-noble``
|
- ``spack/ubuntu-noble``
|
||||||
* - CentOS Stream9
|
* - CentOS 7
|
||||||
- ``quay.io/centos/centos:stream9``
|
- ``centos:7``
|
||||||
- ``spack/centos-stream9``
|
- ``spack/centos7``
|
||||||
|
* - CentOS Stream
|
||||||
|
- ``quay.io/centos/centos:stream``
|
||||||
|
- ``spack/centos-stream``
|
||||||
* - openSUSE Leap
|
* - openSUSE Leap
|
||||||
- ``opensuse/leap``
|
- ``opensuse/leap``
|
||||||
- ``spack/leap15``
|
- ``spack/leap15``
|
||||||
|
@@ -931,84 +931,32 @@ This allows for a much-needed reduction in redundancy between packages
|
|||||||
and constraints.
|
and constraints.
|
||||||
|
|
||||||
|
|
||||||
-----------------
|
----------------
|
||||||
Environment Views
|
Filesystem Views
|
||||||
-----------------
|
----------------
|
||||||
|
|
||||||
Spack Environments can have an associated filesystem view, which is a directory
|
Spack Environments can define filesystem views, which provide a direct access point
|
||||||
with a more traditional structure ``<view>/bin``, ``<view>/lib``, ``<view>/include``
|
for software similar to the directory hierarchy that might exist under ``/usr/local``.
|
||||||
in which all files of the installed packages are linked.
|
Filesystem views are updated every time the environment is written out to the lock
|
||||||
|
file ``spack.lock``, so the concrete environment and the view are always compatible.
|
||||||
By default a view is created for each environment, thanks to the ``view: true``
|
The files of the view's installed packages are brought into the view by symbolic or
|
||||||
option in the ``spack.yaml`` manifest file:
|
hard links, referencing the original Spack installation, or by copy.
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
spack:
|
|
||||||
specs: [perl, python]
|
|
||||||
view: true
|
|
||||||
|
|
||||||
The view is created in a hidden directory ``.spack-env/view`` relative to the environment.
|
|
||||||
If you've used ``spack env activate``, you may have already interacted with this view. Spack
|
|
||||||
prepends its ``<view>/bin`` dir to ``PATH`` when the environment is activated, so that
|
|
||||||
you can directly run executables from all installed packages in the environment.
|
|
||||||
|
|
||||||
Views are highly customizable: you can control where they are put, modify their structure,
|
|
||||||
include and exclude specs, change how files are linked, and you can even generate multiple
|
|
||||||
views for a single environment.
|
|
||||||
|
|
||||||
.. _configuring_environment_views:
|
.. _configuring_environment_views:
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Minimal view configuration
|
Configuration in ``spack.yaml``
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
The minimal configuration
|
The Spack Environment manifest file has a top-level keyword
|
||||||
|
``view``. Each entry under that heading is a **view descriptor**, headed
|
||||||
.. code-block:: yaml
|
by a name. Any number of views may be defined under the ``view`` heading.
|
||||||
|
The view descriptor contains the root of the view, and
|
||||||
spack:
|
optionally the projections for the view, ``select`` and
|
||||||
# ...
|
``exclude`` lists for the view and link information via ``link`` and
|
||||||
view: true
|
|
||||||
|
|
||||||
lets Spack generate a single view with default settings under the
|
|
||||||
``.spack-env/view`` directory of the environment.
|
|
||||||
|
|
||||||
Another short way to configure a view is to specify just where to put it:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
spack:
|
|
||||||
# ...
|
|
||||||
view: /path/to/view
|
|
||||||
|
|
||||||
Views can also be disabled by setting ``view: false``.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Advanced view configuration
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
One or more **view descriptors** can be defined under ``view``, keyed by a name.
|
|
||||||
The example from the previous section with ``view: /path/to/view`` is equivalent
|
|
||||||
to defining a view descriptor named ``default`` with a ``root`` attribute:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
spack:
|
|
||||||
# ...
|
|
||||||
view:
|
|
||||||
default: # name of the view
|
|
||||||
root: /path/to/view # view descriptor attribute
|
|
||||||
|
|
||||||
The ``default`` view descriptor name is special: when you ``spack env activate`` your
|
|
||||||
environment, this view will be used to update (among other things) your ``PATH``
|
|
||||||
variable.
|
|
||||||
|
|
||||||
View descriptors must contain the root of the view, and optionally projections,
|
|
||||||
``select`` and ``exclude`` lists and link information via ``link`` and
|
|
||||||
``link_type``.
|
``link_type``.
|
||||||
|
|
||||||
As a more advanced example, in the following manifest
|
For example, in the following manifest
|
||||||
file snippet we define a view named ``mpis``, rooted at
|
file snippet we define a view named ``mpis``, rooted at
|
||||||
``/path/to/view`` in which all projections use the package name,
|
``/path/to/view`` in which all projections use the package name,
|
||||||
version, and compiler name to determine the path for a given
|
version, and compiler name to determine the path for a given
|
||||||
@@ -1053,10 +1001,59 @@ of ``hardlink`` or ``copy``.
|
|||||||
when the environment is not activated, and linked libraries will be located
|
when the environment is not activated, and linked libraries will be located
|
||||||
*outside* of the view thanks to rpaths.
|
*outside* of the view thanks to rpaths.
|
||||||
|
|
||||||
|
|
||||||
|
There are two shorthands for environments with a single view. If the
|
||||||
|
environment at ``/path/to/env`` has a single view, with a root at
|
||||||
|
``/path/to/env/.spack-env/view``, with default selection and exclusion
|
||||||
|
and the default projection, we can put ``view: True`` in the
|
||||||
|
environment manifest. Similarly, if the environment has a view with a
|
||||||
|
different root, but default selection, exclusion, and projections, the
|
||||||
|
manifest can say ``view: /path/to/view``. These views are
|
||||||
|
automatically named ``default``, so that
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
# ...
|
||||||
|
view: True
|
||||||
|
|
||||||
|
is equivalent to
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
# ...
|
||||||
|
view:
|
||||||
|
default:
|
||||||
|
root: .spack-env/view
|
||||||
|
|
||||||
|
and
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
# ...
|
||||||
|
view: /path/to/view
|
||||||
|
|
||||||
|
is equivalent to
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
# ...
|
||||||
|
view:
|
||||||
|
default:
|
||||||
|
root: /path/to/view
|
||||||
|
|
||||||
|
By default, Spack environments are configured with ``view: True`` in
|
||||||
|
the manifest. Environments can be configured without views using
|
||||||
|
``view: False``. For backwards compatibility reasons, environments
|
||||||
|
with no ``view`` key are treated the same as ``view: True``.
|
||||||
|
|
||||||
From the command line, the ``spack env create`` command takes an
|
From the command line, the ``spack env create`` command takes an
|
||||||
argument ``--with-view [PATH]`` that sets the path for a single, default
|
argument ``--with-view [PATH]`` that sets the path for a single, default
|
||||||
view. If no path is specified, the default path is used (``view:
|
view. If no path is specified, the default path is used (``view:
|
||||||
true``). The argument ``--without-view`` can be used to create an
|
True``). The argument ``--without-view`` can be used to create an
|
||||||
environment without any view configured.
|
environment without any view configured.
|
||||||
|
|
||||||
The ``spack env view`` command can be used to change the manage views
|
The ``spack env view`` command can be used to change the manage views
|
||||||
@@ -1122,18 +1119,11 @@ the projection under ``all`` before reaching those entries.
|
|||||||
Activating environment views
|
Activating environment views
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
The ``spack env activate <env>`` has two effects:
|
The ``spack env activate`` command will put the default view for the
|
||||||
|
environment into the user's path, in addition to activating the
|
||||||
1. It activates the environment so that further Spack commands such
|
environment for Spack commands. The arguments ``-v,--with-view`` and
|
||||||
as ``spack install`` will run in the context of the environment.
|
``-V,--without-view`` can be used to tune this behavior. The default
|
||||||
2. It activates the view so that environment variables such as
|
behavior is to activate with the environment view if there is one.
|
||||||
``PATH`` are updated to include the view.
|
|
||||||
|
|
||||||
Without further arguments, the ``default`` view of the environment is
|
|
||||||
activated. If a view with a different name has to be activated,
|
|
||||||
``spack env activate --with-view <name> <env>`` can be
|
|
||||||
used instead. You can also activate the environment without modifying
|
|
||||||
further environment variables using ``--without-view``.
|
|
||||||
|
|
||||||
The environment variables affected by the ``spack env activate``
|
The environment variables affected by the ``spack env activate``
|
||||||
command and the paths that are used to update them are determined by
|
command and the paths that are used to update them are determined by
|
||||||
@@ -1156,8 +1146,8 @@ relevant variable if the path exists. For this reason, it is not
|
|||||||
recommended to use non-default projections with the default view of an
|
recommended to use non-default projections with the default view of an
|
||||||
environment.
|
environment.
|
||||||
|
|
||||||
The ``spack env deactivate`` command will remove the active view of
|
The ``spack env deactivate`` command will remove the default view of
|
||||||
the Spack environment from the user's environment variables.
|
the environment from the user's path.
|
||||||
|
|
||||||
|
|
||||||
.. _env-generate-depfile:
|
.. _env-generate-depfile:
|
||||||
@@ -1316,7 +1306,7 @@ index once every package is pushed. Note how this target uses the generated
|
|||||||
example/push/%: example/install/%
|
example/push/%: example/install/%
|
||||||
@mkdir -p $(dir $@)
|
@mkdir -p $(dir $@)
|
||||||
$(info About to push $(SPEC) to a buildcache)
|
$(info About to push $(SPEC) to a buildcache)
|
||||||
$(SPACK) -e . buildcache push --only=package $(BUILDCACHE_DIR) /$(HASH)
|
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||||
@touch $@
|
@touch $@
|
||||||
|
|
||||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||||
|
@@ -2344,27 +2344,6 @@ you set ``parallel`` to ``False`` at the package level, then each call
|
|||||||
to ``make()`` will be sequential by default, but packagers can call
|
to ``make()`` will be sequential by default, but packagers can call
|
||||||
``make(parallel=True)`` to override it.
|
``make(parallel=True)`` to override it.
|
||||||
|
|
||||||
Note that the ``--jobs`` option works out of the box for all standard
|
|
||||||
build systems. If you are using a non-standard build system instead, you
|
|
||||||
can use the variable ``make_jobs`` to extract the number of jobs specified
|
|
||||||
by the ``--jobs`` option:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
:emphasize-lines: 7, 11
|
|
||||||
:linenos:
|
|
||||||
|
|
||||||
class Xios(Package):
|
|
||||||
...
|
|
||||||
def install(self, spec, prefix):
|
|
||||||
...
|
|
||||||
options = [
|
|
||||||
...
|
|
||||||
'--jobs', str(make_jobs),
|
|
||||||
]
|
|
||||||
...
|
|
||||||
make_xios = Executable("./make_xios")
|
|
||||||
make_xios(*options)
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Install-level build parallelism
|
Install-level build parallelism
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
@@ -5194,6 +5173,12 @@ installed executable. The check is implemented as follows:
|
|||||||
reframe = Executable(self.prefix.bin.reframe)
|
reframe = Executable(self.prefix.bin.reframe)
|
||||||
reframe("-l")
|
reframe("-l")
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
The API for adding tests is not yet considered stable and may change
|
||||||
|
in future releases.
|
||||||
|
|
||||||
|
|
||||||
""""""""""""""""""""""""""""""""
|
""""""""""""""""""""""""""""""""
|
||||||
Checking build-time test results
|
Checking build-time test results
|
||||||
""""""""""""""""""""""""""""""""
|
""""""""""""""""""""""""""""""""
|
||||||
@@ -5231,42 +5216,38 @@ be left in the build stage directory as illustrated below:
|
|||||||
Stand-alone tests
|
Stand-alone tests
|
||||||
^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
While build-time tests are integrated with the installation process, stand-alone
|
While build-time tests are integrated with the build process, stand-alone
|
||||||
tests are expected to run days, weeks, even months after the software is
|
tests are expected to run days, weeks, even months after the software is
|
||||||
installed. The goal is to provide a mechanism for gaining confidence that
|
installed. The goal is to provide a mechanism for gaining confidence that
|
||||||
packages work as installed **and** *continue* to work as the underlying
|
packages work as installed **and** *continue* to work as the underlying
|
||||||
software evolves. Packages can add and inherit stand-alone tests. The
|
software evolves. Packages can add and inherit stand-alone tests. The
|
||||||
``spack test`` command is used for stand-alone testing.
|
`spack test`` command is used to manage stand-alone testing.
|
||||||
|
|
||||||
.. admonition:: Stand-alone test methods should complete within a few minutes.
|
.. note::
|
||||||
|
|
||||||
Execution speed is important since these tests are intended to quickly
|
Execution speed is important since these tests are intended to quickly
|
||||||
assess whether installed specs work on the system. Spack cannot spare
|
assess whether installed specs work on the system. Consequently, they
|
||||||
resources for more extensive testing of packages included in CI stacks.
|
should run relatively quickly -- as in on the order of at most a few
|
||||||
|
minutes -- while ideally executing all, or at least key aspects of the
|
||||||
|
installed software.
|
||||||
|
|
||||||
Consequently, stand-alone tests should run relatively quickly -- as in
|
.. note::
|
||||||
on the order of at most a few minutes -- while testing at least key aspects
|
|
||||||
of the installed software. Save more extensive testing for other tools.
|
Failing stand-alone tests indicate problems with the installation and,
|
||||||
|
therefore, there is no reason to proceed with more resource-intensive
|
||||||
|
tests until those have been investigated.
|
||||||
|
|
||||||
|
Passing stand-alone tests indicate that more thorough testing, such
|
||||||
|
as running extensive unit or regression tests, or tests that run at
|
||||||
|
scale can proceed without wasting resources on a problematic installation.
|
||||||
|
|
||||||
Tests are defined in the package using methods with names beginning ``test_``.
|
Tests are defined in the package using methods with names beginning ``test_``.
|
||||||
This allows Spack to support multiple independent checks, or parts. Files
|
This allows Spack to support multiple independent checks, or parts. Files
|
||||||
needed for testing, such as source, data, and expected outputs, may be saved
|
needed for testing, such as source, data, and expected outputs, may be saved
|
||||||
from the build and or stored with the package in the repository. Regardless
|
from the build and or stored with the package in the repository. Regardless
|
||||||
of origin, these files are automatically copied to the spec's test stage
|
of origin, these files are automatically copied to the spec's test stage
|
||||||
directory prior to execution of the test method(s). Spack also provides helper
|
directory prior to execution of the test method(s). Spack also provides some
|
||||||
functions to facilitate common processing.
|
helper functions to facilitate processing.
|
||||||
|
|
||||||
.. tip::
|
|
||||||
|
|
||||||
**The status of stand-alone tests can be used to guide follow-up testing efforts.**
|
|
||||||
|
|
||||||
Passing stand-alone tests justify performing more thorough testing, such
|
|
||||||
as running extensive unit or regression tests or tests that run at scale,
|
|
||||||
when available. These tests are outside of the scope of Spack packaging.
|
|
||||||
|
|
||||||
Failing stand-alone tests indicate problems with the installation and,
|
|
||||||
therefore, no reason to proceed with more resource-intensive tests until
|
|
||||||
the failures have been investigated.
|
|
||||||
|
|
||||||
.. _configure-test-stage:
|
.. _configure-test-stage:
|
||||||
|
|
||||||
@@ -5274,26 +5255,30 @@ functions to facilitate common processing.
|
|||||||
Configuring the test stage directory
|
Configuring the test stage directory
|
||||||
""""""""""""""""""""""""""""""""""""
|
""""""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
Stand-alone tests utilize a test stage directory to build, run, and track
|
Stand-alone tests utilize a test stage directory for building, running,
|
||||||
tests in the same way Spack uses a build stage directory to install software.
|
and tracking results in the same way Spack uses a build stage directory.
|
||||||
The default test stage root directory, ``$HOME/.spack/test``, is defined in
|
The default test stage root directory, ``~/.spack/test``, is defined in
|
||||||
:ref:`config.yaml <config-yaml>`. This location is customizable by adding or
|
:ref:`etc/spack/defaults/config.yaml <config-yaml>`. This location is
|
||||||
changing the ``test_stage`` path such that:
|
customizable by adding or changing the ``test_stage`` path in the high-level
|
||||||
|
``config`` of the appropriate ``config.yaml`` file such that:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
config:
|
config:
|
||||||
test_stage: /path/to/test/stage
|
test_stage: /path/to/test/stage
|
||||||
|
|
||||||
Packages can use the ``self.test_suite.stage`` property to access the path.
|
Packages can use the ``self.test_suite.stage`` property to access this setting.
|
||||||
|
Other package properties that provide access to spec-specific subdirectories
|
||||||
|
and files are described in :ref:`accessing staged files <accessing-files>`.
|
||||||
|
|
||||||
.. admonition:: Each spec being tested has its own test stage directory.
|
.. note::
|
||||||
|
|
||||||
The ``config:test_stage`` option is the path to the root of a
|
The test stage path is the root directory for the **entire suite**.
|
||||||
**test suite**'s stage directories.
|
In other words, it is the root directory for **all specs** being
|
||||||
|
tested by the ``spack test run`` command. Each spec gets its own
|
||||||
|
stage subdirectory. Use ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||||
|
to access the spec-specific test stage directory.
|
||||||
|
|
||||||
Other package properties that provide paths to spec-specific subdirectories
|
|
||||||
and files are described in :ref:`accessing-files`.
|
|
||||||
|
|
||||||
.. _adding-standalone-tests:
|
.. _adding-standalone-tests:
|
||||||
|
|
||||||
@@ -5306,144 +5291,61 @@ Test recipes are defined in the package using methods with names beginning
|
|||||||
Each method has access to the information Spack tracks on the package, such
|
Each method has access to the information Spack tracks on the package, such
|
||||||
as options, compilers, and dependencies, supporting the customization of tests
|
as options, compilers, and dependencies, supporting the customization of tests
|
||||||
to the build. Standard python ``assert`` statements and other error reporting
|
to the build. Standard python ``assert`` statements and other error reporting
|
||||||
mechanisms can be used. These exceptions are automatically caught and reported
|
mechanisms are available. Such exceptions are automatically caught and reported
|
||||||
as test failures.
|
as test failures.
|
||||||
|
|
||||||
Each test method is an *implicit test part* named by the method. Its purpose
|
Each test method is an implicit test part named by the method and whose
|
||||||
is the method's docstring. Providing a meaningful purpose for the test gives
|
purpose is the method's docstring. Providing a purpose gives context for
|
||||||
context that can aid debugging. Spack outputs both the name and purpose at the
|
aiding debugging. A test method may contain embedded test parts. Spack
|
||||||
start of test execution so it's also important that the docstring/purpose be
|
outputs the test name and purpose prior to running each test method and
|
||||||
brief.
|
any embedded test parts. For example, ``MyPackage`` below provides two basic
|
||||||
|
examples of installation tests: ``test_always_fails`` and ``test_example``.
|
||||||
.. tip::
|
As the name indicates, the first always fails. The second simply runs the
|
||||||
|
installed example.
|
||||||
We recommend naming test methods so it is clear *what* is being tested.
|
|
||||||
For example, if a test method is building and or running an executable
|
|
||||||
called ``example``, then call the method ``test_example``. This, together
|
|
||||||
with a similarly meaningful test purpose, will aid test comprehension,
|
|
||||||
debugging, and maintainability.
|
|
||||||
|
|
||||||
Stand-alone tests run in an environment that provides access to information
|
|
||||||
on the installed software, such as build options, dependencies, and compilers.
|
|
||||||
Build options and dependencies are accessed using the same spec checks used
|
|
||||||
by build recipes. Examples of checking :ref:`variant settings <variants>` and
|
|
||||||
:ref:`spec constraints <testing-specs>` can be found at the provided links.
|
|
||||||
|
|
||||||
.. admonition:: Spack automatically sets up the test stage directory and environment.
|
|
||||||
|
|
||||||
Spack automatically creates the test stage directory and copies
|
|
||||||
relevant files *prior to* running tests. It can also ensure build
|
|
||||||
dependencies are available **if** necessary.
|
|
||||||
|
|
||||||
The path to the test stage is configurable (see :ref:`configure-test-stage`).
|
|
||||||
|
|
||||||
Files that Spack knows to copy are those saved from the build (see
|
|
||||||
:ref:`cache_extra_test_sources`) and those added to the package repository
|
|
||||||
(see :ref:`cache_custom_files`).
|
|
||||||
|
|
||||||
Spack will use the value of the ``test_requires_compiler`` property to
|
|
||||||
determine whether it needs to also set up build dependencies (see
|
|
||||||
:ref:`test-build-tests`).
|
|
||||||
|
|
||||||
The ``MyPackage`` package below provides two basic test examples:
|
|
||||||
``test_example`` and ``test_example2``. The first runs the installed
|
|
||||||
``example`` and ensures its output contains an expected string. The second
|
|
||||||
runs ``example2`` without checking output so is only concerned with confirming
|
|
||||||
the executable runs successfully. If the installed spec is not expected to have
|
|
||||||
``example2``, then the check at the top of the method will raise a special
|
|
||||||
``SkipTest`` exception, which is captured to facilitate reporting skipped test
|
|
||||||
parts to tools like CDash.
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
class MyPackage(Package):
|
class MyPackage(Package):
|
||||||
...
|
...
|
||||||
|
|
||||||
|
def test_always_fails(self):
|
||||||
|
"""use assert to always fail"""
|
||||||
|
assert False
|
||||||
|
|
||||||
def test_example(self):
|
def test_example(self):
|
||||||
"""ensure installed example works"""
|
"""run installed example"""
|
||||||
expected = "Done."
|
|
||||||
example = which(self.prefix.bin.example)
|
example = which(self.prefix.bin.example)
|
||||||
|
example()
|
||||||
# Capture stdout and stderr from running the Executable
|
|
||||||
# and check that the expected output was produced.
|
|
||||||
out = example(output=str.split, error=str.split)
|
|
||||||
assert expected in out, f"Expected '{expected}' in the output"
|
|
||||||
|
|
||||||
def test_example2(self):
|
|
||||||
"""run installed example2"""
|
|
||||||
if self.spec.satisfies("@:1.0"):
|
|
||||||
# Raise SkipTest to ensure flagging the test as skipped for
|
|
||||||
# test reporting purposes.
|
|
||||||
raise SkipTest("Test is only available for v1.1 on")
|
|
||||||
|
|
||||||
example2 = which(self.prefix.bin.example2)
|
|
||||||
example2()
|
|
||||||
|
|
||||||
Output showing the identification of each test part after running the tests
|
Output showing the identification of each test part after running the tests
|
||||||
is illustrated below.
|
is illustrated below.
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack test run --alias mypackage mypackage@2.0
|
$ spack test run --alias mypackage mypackage@1.0
|
||||||
==> Spack test mypackage
|
==> Spack test mypackage
|
||||||
...
|
...
|
||||||
$ spack test results -l mypackage
|
$ spack test results -l mypackage
|
||||||
==> Results for test suite 'mypackage':
|
==> Results for test suite 'mypackage':
|
||||||
...
|
...
|
||||||
==> [2024-03-10-16:03:56.625439] test: test_example: ensure installed example works
|
==> [2023-03-10-16:03:56.625204] test: test_always_fails: use assert to always fail
|
||||||
...
|
...
|
||||||
PASSED: MyPackage::test_example
|
FAILED
|
||||||
==> [2024-03-10-16:03:56.625439] test: test_example2: run installed example2
|
==> [2023-03-10-16:03:56.625439] test: test_example: run installed example
|
||||||
...
|
...
|
||||||
PASSED: MyPackage::test_example2
|
PASSED
|
||||||
|
|
||||||
.. admonition:: Do NOT implement tests that must run in the installation prefix.
|
|
||||||
|
|
||||||
Use of the package spec's installation prefix for building and running
|
.. note::
|
||||||
tests is **strongly discouraged**. Doing so causes permission errors for
|
|
||||||
shared spack instances *and* facilities that install the software in
|
|
||||||
read-only file systems or directories.
|
|
||||||
|
|
||||||
Instead, start these test methods by explicitly copying the needed files
|
If ``MyPackage`` were a recipe for a library, the tests should build
|
||||||
from the installation prefix to the test stage directory. Note the test
|
an example or test program that is then executed.
|
||||||
stage directory is the current directory when the test is executed with
|
|
||||||
the ``spack test run`` command.
|
|
||||||
|
|
||||||
.. admonition:: Test methods for library packages should build test executables.
|
A test method can include test parts using the ``test_part`` context manager.
|
||||||
|
Each part is treated as an independent check to allow subsequent test parts
|
||||||
|
to execute even after a test part fails.
|
||||||
|
|
||||||
Stand-alone tests for library packages *should* build test executables
|
.. _test-part:
|
||||||
that utilize the *installed* library. Doing so ensures the tests follow
|
|
||||||
a similar build process that users of the library would follow.
|
|
||||||
|
|
||||||
For more information on how to do this, see :ref:`test-build-tests`.
|
|
||||||
|
|
||||||
.. tip::
|
|
||||||
|
|
||||||
If you want to see more examples from packages with stand-alone tests, run
|
|
||||||
``spack pkg grep "def\stest" | sed "s/\/package.py.*//g" | sort -u``
|
|
||||||
from the command line to get a list of the packages.
|
|
||||||
|
|
||||||
.. _adding-standalone-test-parts:
|
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""
|
|
||||||
Adding stand-alone test parts
|
|
||||||
"""""""""""""""""""""""""""""
|
|
||||||
|
|
||||||
Sometimes dependencies between steps of a test lend themselves to being
|
|
||||||
broken into parts. Tracking the pass/fail status of each part may aid
|
|
||||||
debugging. Spack provides a ``test_part`` context manager for use within
|
|
||||||
test methods.
|
|
||||||
|
|
||||||
Each test part is independently run, tracked, and reported. Test parts are
|
|
||||||
executed in the order they appear. If one fails, subsequent test parts are
|
|
||||||
still performed even if they would also fail. This allows tools like CDash
|
|
||||||
to track and report the status of test parts across runs. The pass/fail status
|
|
||||||
of the enclosing test is derived from the statuses of the embedded test parts.
|
|
||||||
|
|
||||||
.. admonition:: Test method and test part names **must** be unique.
|
|
||||||
|
|
||||||
Test results reporting requires that test methods and embedded test parts
|
|
||||||
within a package have unique names.
|
|
||||||
|
|
||||||
The signature for ``test_part`` is:
|
The signature for ``test_part`` is:
|
||||||
|
|
||||||
@@ -5465,68 +5367,40 @@ where each argument has the following meaning:
|
|||||||
* ``work_dir`` is the path to the directory in which the test will run.
|
* ``work_dir`` is the path to the directory in which the test will run.
|
||||||
|
|
||||||
The default of ``None``, or ``"."``, corresponds to the the spec's test
|
The default of ``None``, or ``"."``, corresponds to the the spec's test
|
||||||
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``).
|
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``.
|
||||||
|
|
||||||
.. admonition:: Start test part names with the name of the enclosing test.
|
.. admonition:: Tests should **not** run under the installation directory.
|
||||||
|
|
||||||
We **highly recommend** starting the names of test parts with the name
|
Use of the package spec's installation directory for building and running
|
||||||
of the enclosing test. Doing so helps with the comprehension, readability
|
tests is **strongly** discouraged. Doing so causes permission errors for
|
||||||
and debugging of test results.
|
shared spack instances *and* facilities that install the software in
|
||||||
|
read-only file systems or directories.
|
||||||
|
|
||||||
Suppose ``MyPackage`` installs multiple executables that need to run in a
|
Suppose ``MyPackage`` actually installs two examples we want to use for tests.
|
||||||
specific order since the outputs from one are inputs of others. Further suppose
|
These checks can be implemented as separate checks or, as illustrated below,
|
||||||
we want to add an integration test that runs the executables in order. We can
|
embedded test parts.
|
||||||
accomplish this goal by implementing a stand-alone test method consisting of
|
|
||||||
test parts for each executable as follows:
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
class MyPackage(Package):
|
class MyPackage(Package):
|
||||||
...
|
...
|
||||||
|
|
||||||
def test_series(self):
|
def test_example(self):
|
||||||
"""run setup, perform, and report"""
|
"""run installed examples"""
|
||||||
|
for example in ["ex1", "ex2"]:
|
||||||
|
with test_part(
|
||||||
|
self,
|
||||||
|
f"test_example_{example}",
|
||||||
|
purpose=f"run installed {example}",
|
||||||
|
):
|
||||||
|
exe = which(join_path(self.prefix.bin, example))
|
||||||
|
exe()
|
||||||
|
|
||||||
with test_part(self, "test_series_setup", purpose="setup operation"):
|
In this case, there will be an implicit test part for ``test_example``
|
||||||
exe = which(self.prefix.bin.setup))
|
and separate sub-parts for ``ex1`` and ``ex2``. The second sub-part
|
||||||
exe()
|
will be executed regardless of whether the first passes. The test
|
||||||
|
log for a run where the first executable fails and the second passes
|
||||||
with test_part(self, "test_series_run", purpose="perform operation"):
|
is illustrated below.
|
||||||
exe = which(self.prefix.bin.run))
|
|
||||||
exe()
|
|
||||||
|
|
||||||
with test_part(self, "test_series_report", purpose="generate report"):
|
|
||||||
exe = which(self.prefix.bin.report))
|
|
||||||
exe()
|
|
||||||
|
|
||||||
The result is ``test_series`` runs the following executable in order: ``setup``,
|
|
||||||
``run``, and ``report``. In this case no options are passed to any of the
|
|
||||||
executables and no outputs from running them are checked. Consequently, the
|
|
||||||
implementation could be simplified with a for-loop as follows:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class MyPackage(Package):
|
|
||||||
...
|
|
||||||
|
|
||||||
def test_series(self):
|
|
||||||
"""execute series setup, run, and report"""
|
|
||||||
|
|
||||||
for exe, reason in [
|
|
||||||
("setup", "setup operation"),
|
|
||||||
("run", "perform operation"),
|
|
||||||
("report", "generate report")
|
|
||||||
]:
|
|
||||||
with test_part(self, f"test_series_{exe}", purpose=reason):
|
|
||||||
exe = which(self.prefix.bin.join(exe))
|
|
||||||
exe()
|
|
||||||
|
|
||||||
In both cases, since we're using a context manager, each test part in
|
|
||||||
``test_series`` will execute regardless of the status of the other test
|
|
||||||
parts.
|
|
||||||
|
|
||||||
Now let's look at the output from running the stand-alone tests where
|
|
||||||
the second test part, ``test_series_run``, fails.
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -5536,68 +5410,50 @@ the second test part, ``test_series_run``, fails.
|
|||||||
$ spack test results -l mypackage
|
$ spack test results -l mypackage
|
||||||
==> Results for test suite 'mypackage':
|
==> Results for test suite 'mypackage':
|
||||||
...
|
...
|
||||||
==> [2024-03-10-16:03:56.625204] test: test_series: execute series setup, run, and report
|
==> [2023-03-10-16:03:56.625204] test: test_example: run installed examples
|
||||||
==> [2024-03-10-16:03:56.625439] test: test_series_setup: setup operation
|
==> [2023-03-10-16:03:56.625439] test: test_example_ex1: run installed ex1
|
||||||
...
|
...
|
||||||
PASSED: MyPackage::test_series_setup
|
FAILED
|
||||||
==> [2024-03-10-16:03:56.625555] test: test_series_run: perform operation
|
==> [2023-03-10-16:03:56.625555] test: test_example_ex2: run installed ex2
|
||||||
...
|
...
|
||||||
FAILED: MyPackage::test_series_run
|
PASSED
|
||||||
==> [2024-03-10-16:03:57.003456] test: test_series_report: generate report
|
|
||||||
...
|
|
||||||
FAILED: MyPackage::test_series_report
|
|
||||||
FAILED: MyPackage::test_series
|
|
||||||
...
|
...
|
||||||
|
|
||||||
Since test parts depended on the success of previous parts, we see that the
|
.. warning::
|
||||||
failure of one results in the failure of subsequent checks and the overall
|
|
||||||
result of the test method, ``test_series``, is failure.
|
|
||||||
|
|
||||||
.. tip::
|
Test results reporting requires that each test method and embedded
|
||||||
|
test part for a package have a unique name.
|
||||||
|
|
||||||
If you want to see more examples from packages using ``test_part``, run
|
Stand-alone tests run in an environment that provides access to information
|
||||||
``spack pkg grep "test_part(" | sed "s/\/package.py.*//g" | sort -u``
|
Spack has on how the software was built, such as build options, dependencies,
|
||||||
from the command line to get a list of the packages.
|
and compilers. Build options and dependencies are accessed with the normal
|
||||||
|
spec checks. Examples of checking :ref:`variant settings <variants>` and
|
||||||
|
:ref:`spec constraints <testing-specs>` can be found at the provided links.
|
||||||
|
Accessing compilers in stand-alone tests that are used by the build requires
|
||||||
|
setting a package property as described :ref:`below <test-compilation>`.
|
||||||
|
|
||||||
.. _test-build-tests:
|
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""""""""""
|
.. _test-compilation:
|
||||||
Building and running test executables
|
|
||||||
"""""""""""""""""""""""""""""""""""""
|
|
||||||
|
|
||||||
.. admonition:: Re-use build-time sources and (small) input data sets when possible.
|
"""""""""""""""""""""""""
|
||||||
|
Enabling test compilation
|
||||||
|
"""""""""""""""""""""""""
|
||||||
|
|
||||||
We **highly recommend** re-using build-time test sources and pared down
|
If you want to build and run binaries in tests, then you'll need to tell
|
||||||
input files for testing installed software. These files are easier
|
Spack to load the package's compiler configuration. This is accomplished
|
||||||
to keep synchronized with software capabilities when they reside
|
by setting the package's ``test_requires_compiler`` property to ``True``.
|
||||||
within the software's repository. More information on saving files from
|
|
||||||
the installation process can be found at :ref:`cache_extra_test_sources`.
|
|
||||||
|
|
||||||
If that is not possible, you can add test-related files to the package
|
Setting the property to ``True`` ensures access to the compiler through
|
||||||
repository (see :ref:`cache_custom_files`). It will be important to
|
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
|
||||||
remember to maintain them so they work across listed or supported versions
|
It also gives access to build dependencies like ``cmake`` through their
|
||||||
of the package.
|
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake``).
|
||||||
|
|
||||||
Packages that build libraries are good examples of cases where you'll want
|
.. note::
|
||||||
to build test executables from the installed software before running them.
|
|
||||||
Doing so requires you to let Spack know it needs to load the package's
|
|
||||||
compiler configuration. This is accomplished by setting the package's
|
|
||||||
``test_requires_compiler`` property to ``True``.
|
|
||||||
|
|
||||||
.. admonition:: ``test_requires_compiler = True`` is required to build test executables.
|
The ``test_requires_compiler`` property should be added at the top of
|
||||||
|
the package near other attributes, such as the ``homepage`` and ``url``.
|
||||||
|
|
||||||
Setting the property to ``True`` ensures access to the compiler through
|
Below illustrates using this feature to compile an example.
|
||||||
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
|
|
||||||
It also gives access to build dependencies like ``cmake`` through their
|
|
||||||
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake`` for the
|
|
||||||
path or ``self.spec["cmake"].command`` for the ``Executable`` instance).
|
|
||||||
|
|
||||||
Be sure to add the property at the top of the package class under other
|
|
||||||
properties like the ``homepage``.
|
|
||||||
|
|
||||||
The example below, which ignores how ``cxx-example.cpp`` is acquired,
|
|
||||||
illustrates the basic process of compiling a test executable using the
|
|
||||||
installed library before running it.
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
@@ -5621,22 +5477,28 @@ installed library before running it.
|
|||||||
cxx_example = which(exe)
|
cxx_example = which(exe)
|
||||||
cxx_example()
|
cxx_example()
|
||||||
|
|
||||||
Typically the files used to build and or run test executables are either
|
|
||||||
cached from the installation (see :ref:`cache_extra_test_sources`) or added
|
|
||||||
to the package repository (see :ref:`cache_custom_files`). There is nothing
|
|
||||||
preventing the use of both.
|
|
||||||
|
|
||||||
.. _cache_extra_test_sources:
|
.. _cache_extra_test_sources:
|
||||||
|
|
||||||
""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""
|
||||||
Saving build- and install-time files
|
Saving build-time files
|
||||||
""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""
|
||||||
|
|
||||||
You can use the ``cache_extra_test_sources`` helper routine to copy
|
.. note::
|
||||||
directories and or files from the source build stage directory to the
|
|
||||||
package's installation directory. Spack will automatically copy these
|
We highly recommend re-using build-time test sources and pared down
|
||||||
files for you when it sets up the test stage directory and before it
|
input files for testing installed software. These files are easier
|
||||||
begins running the tests.
|
to keep synchronized with software capabilities since they reside
|
||||||
|
within the software's repository.
|
||||||
|
|
||||||
|
If that is not possible, you can add test-related files to the package
|
||||||
|
repository (see :ref:`adding custom files <cache_custom_files>`). It
|
||||||
|
will be important to maintain them so they work across listed or supported
|
||||||
|
versions of the package.
|
||||||
|
|
||||||
|
You can use the ``cache_extra_test_sources`` helper to copy directories
|
||||||
|
and or files from the source build stage directory to the package's
|
||||||
|
installation directory.
|
||||||
|
|
||||||
The signature for ``cache_extra_test_sources`` is:
|
The signature for ``cache_extra_test_sources`` is:
|
||||||
|
|
||||||
@@ -5651,69 +5513,46 @@ where each argument has the following meaning:
|
|||||||
* ``srcs`` is a string *or* a list of strings corresponding to the
|
* ``srcs`` is a string *or* a list of strings corresponding to the
|
||||||
paths of subdirectories and or files needed for stand-alone testing.
|
paths of subdirectories and or files needed for stand-alone testing.
|
||||||
|
|
||||||
.. warning::
|
The paths must be relative to the staged source directory. Contents of
|
||||||
|
subdirectories and files are copied to a special test cache subdirectory
|
||||||
|
of the installation prefix. They are automatically copied to the appropriate
|
||||||
|
relative paths under the test stage directory prior to executing stand-alone
|
||||||
|
tests.
|
||||||
|
|
||||||
Paths provided in the ``srcs`` argument **must be relative** to the
|
For example, a package method for copying everything in the ``tests``
|
||||||
staged source directory. They will be copied to the equivalent relative
|
subdirectory plus the ``foo.c`` and ``bar.c`` files from ``examples``
|
||||||
location under the test stage directory prior to test execution.
|
and using ``foo.c`` in a test method is illustrated below.
|
||||||
|
|
||||||
Contents of subdirectories and files are copied to a special test cache
|
|
||||||
subdirectory of the installation prefix. They are automatically copied to
|
|
||||||
the appropriate relative paths under the test stage directory prior to
|
|
||||||
executing stand-alone tests.
|
|
||||||
|
|
||||||
.. tip::
|
|
||||||
|
|
||||||
*Perform test-related conversions once when copying files.*
|
|
||||||
|
|
||||||
If one or more of the copied files needs to be modified to reference
|
|
||||||
the installed software, it is recommended that those changes be made
|
|
||||||
to the cached files **once** in the post-``install`` copy method
|
|
||||||
**after** the call to ``cache_extra_test_sources``. This will reduce
|
|
||||||
the amount of unnecessary work in the test method **and** avoid problems
|
|
||||||
running stand-alone tests in shared instances and facility deployments.
|
|
||||||
|
|
||||||
The ``filter_file`` function can be quite useful for such changes
|
|
||||||
(see :ref:`file-filtering`).
|
|
||||||
|
|
||||||
Below is a basic example of a test that relies on files from the installation.
|
|
||||||
This package method re-uses the contents of the ``examples`` subdirectory,
|
|
||||||
which is assumed to have all of the files implemented to allow ``make`` to
|
|
||||||
compile and link ``foo.c`` and ``bar.c`` against the package's installed
|
|
||||||
library.
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
class MyLibPackage(MakefilePackage):
|
class MyLibPackage(Package):
|
||||||
...
|
...
|
||||||
|
|
||||||
@run_after("install")
|
@run_after("install")
|
||||||
def copy_test_files(self):
|
def copy_test_files(self):
|
||||||
cache_extra_test_sources(self, "examples")
|
srcs = ["tests",
|
||||||
|
join_path("examples", "foo.c"),
|
||||||
|
join_path("examples", "bar.c")]
|
||||||
|
cache_extra_test_sources(self, srcs)
|
||||||
|
|
||||||
def test_example(self):
|
def test_foo(self):
|
||||||
"""build and run the examples"""
|
exe = "foo"
|
||||||
examples_dir = self.test_suite.current_test_cache_dir.examples
|
src_dir = self.test_suite.current_test_cache_dir.examples
|
||||||
with working_dir(examples_dir):
|
with working_dir(src_dir):
|
||||||
make = which("make")
|
cc = which(os.environ["CC"])
|
||||||
make()
|
cc(
|
||||||
|
f"-L{self.prefix.lib}",
|
||||||
|
f"-I{self.prefix.include}",
|
||||||
|
f"{exe}.c",
|
||||||
|
"-o", exe
|
||||||
|
)
|
||||||
|
foo = which(exe)
|
||||||
|
foo()
|
||||||
|
|
||||||
for program in ["foo", "bar"]:
|
In this case, the method copies the associated files from the build
|
||||||
with test_part(
|
stage, **after** the software is installed, to the package's test
|
||||||
self,
|
cache directory. Then ``test_foo`` builds ``foo`` using ``foo.c``
|
||||||
f"test_example_{program}",
|
before running the program.
|
||||||
purpose=f"ensure {program} runs"
|
|
||||||
):
|
|
||||||
exe = Executable(program)
|
|
||||||
exe()
|
|
||||||
|
|
||||||
In this case, ``copy_test_files`` copies the associated files from the
|
|
||||||
build stage to the package's test cache directory under the installation
|
|
||||||
prefix. Running ``spack test run`` for the package results in Spack copying
|
|
||||||
the directory and its contents to the the test stage directory. The
|
|
||||||
``working_dir`` context manager ensures the commands within it are executed
|
|
||||||
from the ``examples_dir``. The test builds the software using ``make`` before
|
|
||||||
running each executable, ``foo`` and ``bar``, as independent test parts.
|
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
@@ -5722,18 +5561,43 @@ running each executable, ``foo`` and ``bar``, as independent test parts.
|
|||||||
|
|
||||||
The key to copying files for stand-alone testing at build time is use
|
The key to copying files for stand-alone testing at build time is use
|
||||||
of the ``run_after`` directive, which ensures the associated files are
|
of the ``run_after`` directive, which ensures the associated files are
|
||||||
copied **after** the provided build stage (``install``) when the installation
|
copied **after** the provided build stage where the files **and**
|
||||||
prefix **and** files are available.
|
installation prefix are available.
|
||||||
|
|
||||||
The test method uses the path contained in the package's
|
These paths are **automatically copied** from cache to the test stage
|
||||||
``self.test_suite.current_test_cache_dir`` property for the root directory
|
directory prior to the execution of any stand-alone tests. Tests access
|
||||||
of the copied files. In this case, that's the ``examples`` subdirectory.
|
the files using the ``self.test_suite.current_test_cache_dir`` property.
|
||||||
|
In our example above, test methods can use the following paths to reference
|
||||||
|
the copy of each entry listed in ``srcs``, respectively:
|
||||||
|
|
||||||
.. tip::
|
* ``self.test_suite.current_test_cache_dir.tests``
|
||||||
|
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||||
|
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
|
||||||
|
|
||||||
|
.. admonition:: Library packages should build stand-alone tests
|
||||||
|
|
||||||
|
Library developers will want to build the associated tests
|
||||||
|
against their **installed** libraries before running them.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
While source and input files are generally recommended, binaries
|
||||||
|
**may** also be cached by the build process. Only you, as the package
|
||||||
|
writer or maintainer, know whether these files would be appropriate
|
||||||
|
for testing the installed software weeks to months later.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
If one or more of the copied files needs to be modified to reference
|
||||||
|
the installed software, it is recommended that those changes be made
|
||||||
|
to the cached files **once** in the ``copy_test_sources`` method and
|
||||||
|
***after** the call to ``cache_extra_test_sources()``. This will
|
||||||
|
reduce the amount of unnecessary work in the test method **and** avoid
|
||||||
|
problems testing in shared instances and facility deployments.
|
||||||
|
|
||||||
|
The ``filter_file`` function can be quite useful for such changes.
|
||||||
|
See :ref:`file manipulation <file-manipulation>`.
|
||||||
|
|
||||||
If you want to see more examples from packages that cache build files, run
|
|
||||||
``spack pkg grep cache_extra_test_sources | sed "s/\/package.py.*//g" | sort -u``
|
|
||||||
from the command line to get a list of the packages.
|
|
||||||
|
|
||||||
.. _cache_custom_files:
|
.. _cache_custom_files:
|
||||||
|
|
||||||
@@ -5741,9 +5605,8 @@ running each executable, ``foo`` and ``bar``, as independent test parts.
|
|||||||
Adding custom files
|
Adding custom files
|
||||||
"""""""""""""""""""
|
"""""""""""""""""""
|
||||||
|
|
||||||
Sometimes it is helpful or necessary to include custom files for building and
|
In some cases it can be useful to have files that can be used to build or
|
||||||
or checking the results of tests as part of the package. Examples of the types
|
check the results of tests. Examples include:
|
||||||
of files that might be useful are:
|
|
||||||
|
|
||||||
- test source files
|
- test source files
|
||||||
- test input files
|
- test input files
|
||||||
@@ -5751,15 +5614,17 @@ of files that might be useful are:
|
|||||||
- expected test outputs
|
- expected test outputs
|
||||||
|
|
||||||
While obtaining such files from the software repository is preferred (see
|
While obtaining such files from the software repository is preferred (see
|
||||||
:ref:`cache_extra_test_sources`), there are circumstances where doing so is not
|
:ref:`adding build-time files <cache_extra_test_sources>`), there are
|
||||||
feasible such as when the software is not being actively maintained. When test
|
circumstances where that is not feasible (e.g., the software is not being
|
||||||
files cannot be obtained from the repository or there is a need to supplement
|
actively maintained). When test files can't be obtained from the repository
|
||||||
files that can, Spack supports the inclusion of additional files under the
|
or as a supplement to files that can, Spack supports the inclusion of
|
||||||
``test`` subdirectory of the package in the Spack repository.
|
additional files under the ``test`` subdirectory of the package in the
|
||||||
|
Spack repository.
|
||||||
|
|
||||||
The following example assumes a ``custom-example.c`` is saved in ``MyLibary``
|
Spack **automatically copies** the contents of that directory to the
|
||||||
package's ``test`` subdirectory. It also assumes the program simply needs to
|
test staging directory prior to running stand-alone tests. Test methods
|
||||||
be compiled and linked against the installed ``MyLibrary`` software.
|
access those files using the ``self.test_suite.current_test_data_dir``
|
||||||
|
property as shown below.
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
@@ -5769,29 +5634,17 @@ be compiled and linked against the installed ``MyLibrary`` software.
|
|||||||
test_requires_compiler = True
|
test_requires_compiler = True
|
||||||
...
|
...
|
||||||
|
|
||||||
def test_custom_example(self):
|
def test_example(self):
|
||||||
"""build and run custom-example"""
|
"""build and run custom-example"""
|
||||||
src_dir = self.test_suite.current_test_data_dir
|
data_dir = self.test_suite.current_test_data_dir
|
||||||
exe = "custom-example"
|
exe = "custom-example"
|
||||||
|
src = datadir.join(f"{exe}.cpp")
|
||||||
|
...
|
||||||
|
# TODO: Build custom-example using src and exe
|
||||||
|
...
|
||||||
|
custom_example = which(exe)
|
||||||
|
custom_example()
|
||||||
|
|
||||||
with working_dir(src_dir):
|
|
||||||
cc = which(os.environ["CC"])
|
|
||||||
cc(
|
|
||||||
f"-L{self.prefix.lib}",
|
|
||||||
f"-I{self.prefix.include}",
|
|
||||||
f"{exe}.cpp",
|
|
||||||
"-o", exe
|
|
||||||
)
|
|
||||||
|
|
||||||
custom_example = Executable(exe)
|
|
||||||
custom_example()
|
|
||||||
|
|
||||||
In this case, ``spack test run`` for the package results in Spack copying
|
|
||||||
the contents of the ``test`` subdirectory to the test stage directory path
|
|
||||||
in ``self.test_suite.current_test_data_dir`` before calling
|
|
||||||
``test_custom_example``. Use of the ``working_dir`` context manager
|
|
||||||
ensures the commands to build and run the program are performed from
|
|
||||||
within the appropriate subdirectory of the test stage.
|
|
||||||
|
|
||||||
.. _expected_test_output_from_file:
|
.. _expected_test_output_from_file:
|
||||||
|
|
||||||
@@ -5800,8 +5653,9 @@ Reading expected output from a file
|
|||||||
"""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
The helper function ``get_escaped_text_output`` is available for packages
|
The helper function ``get_escaped_text_output`` is available for packages
|
||||||
to retrieve properly formatted text from a file potentially containing
|
to retrieve and properly format the text from a file that contains the
|
||||||
special characters.
|
expected output from running an executable that may contain special
|
||||||
|
characters.
|
||||||
|
|
||||||
The signature for ``get_escaped_text_output`` is:
|
The signature for ``get_escaped_text_output`` is:
|
||||||
|
|
||||||
@@ -5811,13 +5665,10 @@ The signature for ``get_escaped_text_output`` is:
|
|||||||
|
|
||||||
where ``filename`` is the path to the file containing the expected output.
|
where ``filename`` is the path to the file containing the expected output.
|
||||||
|
|
||||||
The path provided to ``filename`` for one of the copied custom files
|
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
|
||||||
(:ref:`custom file <cache_custom_files>`) is in the path rooted at
|
accessed by tests using the ``self.test_suite.current_test_data_dir``
|
||||||
``self.test_suite.current_test_data_dir``.
|
property. The example below illustrates how to read a file that was
|
||||||
|
added to the package's ``test`` subdirectory.
|
||||||
The example below shows how to reference both the custom database
|
|
||||||
(``packages.db``) and expected output (``dump.out``) files Spack copies
|
|
||||||
to the test stage:
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
@@ -5839,9 +5690,8 @@ to the test stage:
|
|||||||
for exp in expected:
|
for exp in expected:
|
||||||
assert re.search(exp, out), f"Expected '{exp}' in output"
|
assert re.search(exp, out), f"Expected '{exp}' in output"
|
||||||
|
|
||||||
If the files were instead cached from installing the software, the paths to the
|
If the file was instead copied from the ``tests`` subdirectory of the staged
|
||||||
two files would be found under the ``self.test_suite.current_test_cache_dir``
|
source code, the path would be obtained as shown below.
|
||||||
directory as shown below:
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
@@ -5849,24 +5699,17 @@ directory as shown below:
|
|||||||
"""check example table dump"""
|
"""check example table dump"""
|
||||||
test_cache_dir = self.test_suite.current_test_cache_dir
|
test_cache_dir = self.test_suite.current_test_cache_dir
|
||||||
db_filename = test_cache_dir.join("packages.db")
|
db_filename = test_cache_dir.join("packages.db")
|
||||||
..
|
|
||||||
expected = get_escaped_text_output(test_cache_dir.join("dump.out"))
|
|
||||||
...
|
|
||||||
|
|
||||||
Alternatively, if both files had been installed by the software into the
|
Alternatively, if the file was copied to the ``share/tests`` subdirectory
|
||||||
``share/tests`` subdirectory of the installation prefix, the paths to the
|
as part of the installation process, the test could access the path as
|
||||||
two files would be referenced as follows:
|
follows:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
def test_example(self):
|
def test_example(self):
|
||||||
"""check example table dump"""
|
"""check example table dump"""
|
||||||
db_filename = self.prefix.share.tests.join("packages.db")
|
db_filename = join_path(self.prefix.share.tests, "packages.db")
|
||||||
..
|
|
||||||
expected = get_escaped_text_output(
|
|
||||||
self.prefix.share.tests.join("dump.out")
|
|
||||||
)
|
|
||||||
...
|
|
||||||
|
|
||||||
.. _check_outputs:
|
.. _check_outputs:
|
||||||
|
|
||||||
@@ -5874,9 +5717,9 @@ two files would be referenced as follows:
|
|||||||
Comparing expected to actual outputs
|
Comparing expected to actual outputs
|
||||||
""""""""""""""""""""""""""""""""""""
|
""""""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
The ``check_outputs`` helper routine is available for packages to ensure
|
The helper function ``check_outputs`` is available for packages to ensure
|
||||||
multiple expected outputs from running an executable are contained within
|
the expected outputs from running an executable are contained within the
|
||||||
the actual outputs.
|
actual outputs.
|
||||||
|
|
||||||
The signature for ``check_outputs`` is:
|
The signature for ``check_outputs`` is:
|
||||||
|
|
||||||
@@ -5902,17 +5745,11 @@ Invoking the method is the equivalent of:
|
|||||||
if errors:
|
if errors:
|
||||||
raise RuntimeError("\n ".join(errors))
|
raise RuntimeError("\n ".join(errors))
|
||||||
|
|
||||||
.. tip::
|
|
||||||
|
|
||||||
If you want to see more examples from packages that use this helper, run
|
|
||||||
``spack pkg grep check_outputs | sed "s/\/package.py.*//g" | sort -u``
|
|
||||||
from the command line to get a list of the packages.
|
|
||||||
|
|
||||||
|
|
||||||
.. _accessing-files:
|
.. _accessing-files:
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""""""""
|
||||||
Finding package- and test-related files
|
Accessing package- and test-related files
|
||||||
"""""""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
You may need to access files from one or more locations when writing
|
You may need to access files from one or more locations when writing
|
||||||
@@ -5921,7 +5758,8 @@ include test source files or includes them but has no way to build the
|
|||||||
executables using the installed headers and libraries. In these cases
|
executables using the installed headers and libraries. In these cases
|
||||||
you may need to reference the files relative to one or more root directory.
|
you may need to reference the files relative to one or more root directory.
|
||||||
The table below lists relevant path properties and provides additional
|
The table below lists relevant path properties and provides additional
|
||||||
examples of their use. See :ref:`expected_test_output_from_file` for
|
examples of their use.
|
||||||
|
:ref:`Reading expected output <expected_test_output_from_file>` provides
|
||||||
examples of accessing files saved from the software repository, package
|
examples of accessing files saved from the software repository, package
|
||||||
repository, and installation.
|
repository, and installation.
|
||||||
|
|
||||||
@@ -5950,6 +5788,7 @@ repository, and installation.
|
|||||||
- ``self.test_suite.current_test_data_dir``
|
- ``self.test_suite.current_test_data_dir``
|
||||||
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
||||||
|
|
||||||
|
|
||||||
.. _inheriting-tests:
|
.. _inheriting-tests:
|
||||||
|
|
||||||
""""""""""""""""""""""""""""
|
""""""""""""""""""""""""""""
|
||||||
@@ -5992,7 +5831,7 @@ maintainers provide additional stand-alone tests customized to the package.
|
|||||||
.. warning::
|
.. warning::
|
||||||
|
|
||||||
Any package that implements a test method with the same name as an
|
Any package that implements a test method with the same name as an
|
||||||
inherited method will override the inherited method. If that is not the
|
inherited method overrides the inherited method. If that is not the
|
||||||
goal and you are not explicitly calling and adding functionality to
|
goal and you are not explicitly calling and adding functionality to
|
||||||
the inherited method for the test, then make sure that all test methods
|
the inherited method for the test, then make sure that all test methods
|
||||||
and embedded test parts have unique test names.
|
and embedded test parts have unique test names.
|
||||||
@@ -6157,8 +5996,6 @@ running:
|
|||||||
This is already part of the boilerplate for packages created with
|
This is already part of the boilerplate for packages created with
|
||||||
``spack create``.
|
``spack create``.
|
||||||
|
|
||||||
.. _file-filtering:
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^
|
||||||
Filtering functions
|
Filtering functions
|
||||||
^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^
|
||||||
|
@@ -253,6 +253,17 @@ can easily happen if it is not updated frequently, this behavior ensures that
|
|||||||
spack has a way to know for certain about the status of any concrete spec on
|
spack has a way to know for certain about the status of any concrete spec on
|
||||||
the remote mirror, but can slow down pipeline generation significantly.
|
the remote mirror, but can slow down pipeline generation significantly.
|
||||||
|
|
||||||
|
The ``--optimize`` argument is experimental and runs the generated pipeline
|
||||||
|
document through a series of optimization passes designed to reduce the size
|
||||||
|
of the generated file.
|
||||||
|
|
||||||
|
The ``--dependencies`` is also experimental and disables what in Gitlab is
|
||||||
|
referred to as DAG scheduling, internally using the ``dependencies`` keyword
|
||||||
|
rather than ``needs`` to list dependency jobs. The drawback of using this option
|
||||||
|
is that before any job can begin, all jobs in previous stages must first
|
||||||
|
complete. The benefit is that Gitlab allows more dependencies to be listed
|
||||||
|
when using ``dependencies`` instead of ``needs``.
|
||||||
|
|
||||||
The optional ``--output-file`` argument should be an absolute path (including
|
The optional ``--output-file`` argument should be an absolute path (including
|
||||||
file name) to the generated pipeline, and if not given, the default is
|
file name) to the generated pipeline, and if not given, the default is
|
||||||
``./.gitlab-ci.yml``.
|
``./.gitlab-ci.yml``.
|
||||||
|
@@ -1,13 +1,13 @@
|
|||||||
sphinx==7.4.7
|
sphinx==7.2.6
|
||||||
sphinxcontrib-programoutput==0.17
|
sphinxcontrib-programoutput==0.17
|
||||||
sphinx_design==0.6.0
|
sphinx_design==0.5.0
|
||||||
sphinx-rtd-theme==2.0.0
|
sphinx-rtd-theme==2.0.0
|
||||||
python-levenshtein==0.25.1
|
python-levenshtein==0.25.1
|
||||||
docutils==0.20.1
|
docutils==0.20.1
|
||||||
pygments==2.18.0
|
pygments==2.18.0
|
||||||
urllib3==2.2.2
|
urllib3==2.2.1
|
||||||
pytest==8.3.1
|
pytest==8.2.1
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
black==24.4.2
|
black==24.4.2
|
||||||
flake8==7.1.0
|
flake8==7.0.0
|
||||||
mypy==1.11.0
|
mypy==1.10.0
|
||||||
|
@@ -33,23 +33,8 @@
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
esc, bell, lbracket, bslash, newline = r"\x1b", r"\x07", r"\[", r"\\", r"\n"
|
|
||||||
# Ansi Control Sequence Introducers (CSI) are a well-defined format
|
|
||||||
# Standard ECMA-48: Control Functions for Character-Imaging I/O Devices, section 5.4
|
|
||||||
# https://www.ecma-international.org/wp-content/uploads/ECMA-48_5th_edition_june_1991.pdf
|
|
||||||
csi_pre = f"{esc}{lbracket}"
|
|
||||||
csi_param, csi_inter, csi_post = r"[0-?]", r"[ -/]", r"[@-~]"
|
|
||||||
ansi_csi = f"{csi_pre}{csi_param}*{csi_inter}*{csi_post}"
|
|
||||||
# General ansi escape sequences have well-defined prefixes,
|
|
||||||
# but content and suffixes are less reliable.
|
|
||||||
# Conservatively assume they end with either "<ESC>\" or "<BELL>",
|
|
||||||
# with no intervening "<ESC>"/"<BELL>" keys or newlines
|
|
||||||
esc_pre = f"{esc}[@-_]"
|
|
||||||
esc_content = f"[^{esc}{bell}{newline}]"
|
|
||||||
esc_post = f"(?:{esc}{bslash}|{bell})"
|
|
||||||
ansi_esc = f"{esc_pre}{esc_content}*{esc_post}"
|
|
||||||
# Use this to strip escape sequences
|
# Use this to strip escape sequences
|
||||||
_escape = re.compile(f"{ansi_csi}|{ansi_esc}")
|
_escape = re.compile(r"\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07")
|
||||||
|
|
||||||
# control characters for enabling/disabling echo
|
# control characters for enabling/disabling echo
|
||||||
#
|
#
|
||||||
|
@@ -791,7 +791,7 @@ def check_virtual_with_variants(spec, msg):
|
|||||||
return
|
return
|
||||||
error = error_cls(
|
error = error_cls(
|
||||||
f"{pkg_name}: {msg}",
|
f"{pkg_name}: {msg}",
|
||||||
[f"remove variants from '{spec}' in depends_on directive in {filename}"],
|
f"remove variants from '{spec}' in depends_on directive in {filename}",
|
||||||
)
|
)
|
||||||
errors.append(error)
|
errors.append(error)
|
||||||
|
|
||||||
|
@@ -129,10 +129,10 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
|||||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||||
for name, path in configuration_paths:
|
for name, path in configuration_paths:
|
||||||
platform = spack.platforms.host().name
|
platform = spack.platforms.host().name
|
||||||
platform_scope = spack.config.DirectoryConfigScope(
|
platform_scope = spack.config.ConfigScope(
|
||||||
f"{name}/{platform}", os.path.join(path, platform)
|
"/".join([name, platform]), os.path.join(path, platform)
|
||||||
)
|
)
|
||||||
generic_scope = spack.config.DirectoryConfigScope(name, path)
|
generic_scope = spack.config.ConfigScope(name, path)
|
||||||
config_scopes.extend([generic_scope, platform_scope])
|
config_scopes.extend([generic_scope, platform_scope])
|
||||||
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
|
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
|
||||||
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
||||||
|
@@ -72,7 +72,6 @@
|
|||||||
import spack.store
|
import spack.store
|
||||||
import spack.subprocess_context
|
import spack.subprocess_context
|
||||||
import spack.user_environment
|
import spack.user_environment
|
||||||
import spack.util.executable
|
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.pattern
|
import spack.util.pattern
|
||||||
from spack import traverse
|
from spack import traverse
|
||||||
@@ -459,7 +458,10 @@ def set_wrapper_variables(pkg, env):
|
|||||||
|
|
||||||
# Find ccache binary and hand it to build environment
|
# Find ccache binary and hand it to build environment
|
||||||
if spack.config.get("config:ccache"):
|
if spack.config.get("config:ccache"):
|
||||||
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
|
ccache = Executable("ccache")
|
||||||
|
if not ccache:
|
||||||
|
raise RuntimeError("No ccache binary found in PATH")
|
||||||
|
env.set(SPACK_CCACHE_BINARY, ccache)
|
||||||
|
|
||||||
# Gather information about various types of dependencies
|
# Gather information about various types of dependencies
|
||||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||||
@@ -738,9 +740,7 @@ def get_rpaths(pkg):
|
|||||||
# Second module is our compiler mod name. We use that to get rpaths from
|
# Second module is our compiler mod name. We use that to get rpaths from
|
||||||
# module show output.
|
# module show output.
|
||||||
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
||||||
mod_rpath = path_from_modules([pkg.compiler.modules[1]])
|
rpaths.append(path_from_modules([pkg.compiler.modules[1]]))
|
||||||
if mod_rpath:
|
|
||||||
rpaths.append(mod_rpath)
|
|
||||||
return list(dedupe(filter_system_paths(rpaths)))
|
return list(dedupe(filter_system_paths(rpaths)))
|
||||||
|
|
||||||
|
|
||||||
@@ -1473,7 +1473,7 @@ def long_message(self):
|
|||||||
out.write(" {0}\n".format(self.log_name))
|
out.write(" {0}\n".format(self.log_name))
|
||||||
|
|
||||||
# Also output the test log path IF it exists
|
# Also output the test log path IF it exists
|
||||||
if self.context != "test" and have_log:
|
if self.context != "test":
|
||||||
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
||||||
if os.path.isfile(test_log):
|
if os.path.isfile(test_log):
|
||||||
out.write("\nSee test log for details:\n")
|
out.write("\nSee test log for details:\n")
|
||||||
|
@@ -162,9 +162,7 @@ def initconfig_compiler_entries(self):
|
|||||||
ld_flags = " ".join(flags["ldflags"])
|
ld_flags = " ".join(flags["ldflags"])
|
||||||
ld_format_string = "CMAKE_{0}_LINKER_FLAGS"
|
ld_format_string = "CMAKE_{0}_LINKER_FLAGS"
|
||||||
# CMake has separate linker arguments for types of builds.
|
# CMake has separate linker arguments for types of builds.
|
||||||
# 'ldflags' should not be used with CMAKE_STATIC_LINKER_FLAGS which
|
for ld_type in ["EXE", "MODULE", "SHARED", "STATIC"]:
|
||||||
# is used by the archiver, so don't include "STATIC" in this loop:
|
|
||||||
for ld_type in ["EXE", "MODULE", "SHARED"]:
|
|
||||||
ld_string = ld_format_string.format(ld_type)
|
ld_string = ld_format_string.format(ld_type)
|
||||||
entries.append(cmake_cache_string(ld_string, ld_flags))
|
entries.append(cmake_cache_string(ld_string, ld_flags))
|
||||||
|
|
||||||
|
@@ -124,8 +124,6 @@ def cuda_flags(arch_list):
|
|||||||
# minimum supported versions
|
# minimum supported versions
|
||||||
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
|
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
|
||||||
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
|
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
|
||||||
conflicts("%gcc@:7.2", when="+cuda ^cuda@12.4:")
|
|
||||||
conflicts("%clang@:6", when="+cuda ^cuda@12.2:")
|
|
||||||
|
|
||||||
# maximum supported version
|
# maximum supported version
|
||||||
# NOTE:
|
# NOTE:
|
||||||
@@ -138,14 +136,14 @@ def cuda_flags(arch_list):
|
|||||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.5")
|
conflicts("%gcc@14:", when="+cuda ^cuda@:12.4")
|
||||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||||
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
||||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
conflicts("%clang@18:", when="+cuda ^cuda@:12.4")
|
||||||
|
|
||||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||||
@@ -213,16 +211,12 @@ def cuda_flags(arch_list):
|
|||||||
conflicts("%intel@19.0:", when="+cuda ^cuda@:10.0")
|
conflicts("%intel@19.0:", when="+cuda ^cuda@:10.0")
|
||||||
conflicts("%intel@19.1:", when="+cuda ^cuda@:10.1")
|
conflicts("%intel@19.1:", when="+cuda ^cuda@:10.1")
|
||||||
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
|
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
|
||||||
conflicts("%intel@2021:", when="+cuda ^cuda@:11.4.0")
|
|
||||||
|
|
||||||
# XL is mostly relevant for ppc64le Linux
|
# XL is mostly relevant for ppc64le Linux
|
||||||
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
|
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
|
||||||
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
|
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
|
||||||
conflicts("%xl@:12,17:", when="+cuda ^cuda@:11.1.0")
|
conflicts("%xl@:12,17:", when="+cuda ^cuda@:11.1.0")
|
||||||
|
|
||||||
# PowerPC.
|
|
||||||
conflicts("target=ppc64le", when="+cuda ^cuda@12.5:")
|
|
||||||
|
|
||||||
# Darwin.
|
# Darwin.
|
||||||
# TODO: add missing conflicts for %apple-clang cuda@:10
|
# TODO: add missing conflicts for %apple-clang cuda@:10
|
||||||
conflicts("platform=darwin", when="+cuda ^cuda@11.0.2:")
|
conflicts("platform=darwin", when="+cuda ^cuda@11.0.2: ")
|
||||||
|
@@ -72,7 +72,7 @@ def build_directory(self):
|
|||||||
def build_args(self):
|
def build_args(self):
|
||||||
"""Arguments for ``go build``."""
|
"""Arguments for ``go build``."""
|
||||||
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
||||||
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
return ["-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def check_args(self):
|
def check_args(self):
|
||||||
|
@@ -34,8 +34,6 @@ def _misc_cache():
|
|||||||
return spack.util.file_cache.FileCache(path)
|
return spack.util.file_cache.FileCache(path)
|
||||||
|
|
||||||
|
|
||||||
FileCacheType = Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton]
|
|
||||||
|
|
||||||
#: Spack's cache for small data
|
#: Spack's cache for small data
|
||||||
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
||||||
llnl.util.lang.Singleton(_misc_cache)
|
llnl.util.lang.Singleton(_misc_cache)
|
||||||
|
@@ -22,8 +22,6 @@
|
|||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
from urllib.request import HTTPHandler, Request, build_opener
|
from urllib.request import HTTPHandler, Request, build_opener
|
||||||
|
|
||||||
import ruamel.yaml
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import memoized
|
from llnl.util.lang import memoized
|
||||||
@@ -553,9 +551,10 @@ def generate_gitlab_ci_yaml(
|
|||||||
env,
|
env,
|
||||||
print_summary,
|
print_summary,
|
||||||
output_file,
|
output_file,
|
||||||
*,
|
|
||||||
prune_dag=False,
|
prune_dag=False,
|
||||||
check_index_only=False,
|
check_index_only=False,
|
||||||
|
run_optimizer=False,
|
||||||
|
use_dependencies=False,
|
||||||
artifacts_root=None,
|
artifacts_root=None,
|
||||||
remote_mirror_override=None,
|
remote_mirror_override=None,
|
||||||
):
|
):
|
||||||
@@ -576,6 +575,12 @@ def generate_gitlab_ci_yaml(
|
|||||||
this mode results in faster yaml generation time). Otherwise, also
|
this mode results in faster yaml generation time). Otherwise, also
|
||||||
check each spec directly by url (useful if there is no index or it
|
check each spec directly by url (useful if there is no index or it
|
||||||
might be out of date).
|
might be out of date).
|
||||||
|
run_optimizer (bool): If True, post-process the generated yaml to try
|
||||||
|
try to reduce the size (attempts to collect repeated configuration
|
||||||
|
and replace with definitions).)
|
||||||
|
use_dependencies (bool): If true, use "dependencies" rather than "needs"
|
||||||
|
("needs" allows DAG scheduling). Useful if gitlab instance cannot
|
||||||
|
be configured to handle more than a few "needs" per job.
|
||||||
artifacts_root (str): Path where artifacts like logs, environment
|
artifacts_root (str): Path where artifacts like logs, environment
|
||||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||||
requires this to be within the project directory.
|
requires this to be within the project directory.
|
||||||
@@ -809,8 +814,7 @@ def ensure_expected_target_path(path):
|
|||||||
cli_scopes = [
|
cli_scopes = [
|
||||||
os.path.relpath(s.path, concrete_env_dir)
|
os.path.relpath(s.path, concrete_env_dir)
|
||||||
for s in cfg.scopes().values()
|
for s in cfg.scopes().values()
|
||||||
if not s.writable
|
if isinstance(s, cfg.ImmutableConfigScope)
|
||||||
and isinstance(s, (cfg.DirectoryConfigScope))
|
|
||||||
and s.path not in env_includes
|
and s.path not in env_includes
|
||||||
and os.path.exists(s.path)
|
and os.path.exists(s.path)
|
||||||
]
|
]
|
||||||
@@ -1267,6 +1271,17 @@ def main_script_replacements(cmd):
|
|||||||
with open(copy_specs_file, "w") as fd:
|
with open(copy_specs_file, "w") as fd:
|
||||||
fd.write(json.dumps(buildcache_copies))
|
fd.write(json.dumps(buildcache_copies))
|
||||||
|
|
||||||
|
# TODO(opadron): remove this or refactor
|
||||||
|
if run_optimizer:
|
||||||
|
import spack.ci_optimization as ci_opt
|
||||||
|
|
||||||
|
output_object = ci_opt.optimizer(output_object)
|
||||||
|
|
||||||
|
# TODO(opadron): remove this or refactor
|
||||||
|
if use_dependencies:
|
||||||
|
import spack.ci_needs_workaround as cinw
|
||||||
|
|
||||||
|
output_object = cinw.needs_to_dependencies(output_object)
|
||||||
else:
|
else:
|
||||||
# No jobs were generated
|
# No jobs were generated
|
||||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||||
@@ -1295,11 +1310,8 @@ def main_script_replacements(cmd):
|
|||||||
if not rebuild_everything:
|
if not rebuild_everything:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Minimize yaml output size through use of anchors
|
with open(output_file, "w") as outf:
|
||||||
syaml.anchorify(sorted_output)
|
outf.write(syaml.dump(sorted_output, default_flow_style=True))
|
||||||
|
|
||||||
with open(output_file, "w") as f:
|
|
||||||
ruamel.yaml.YAML().dump(sorted_output, f)
|
|
||||||
|
|
||||||
|
|
||||||
def _url_encode_string(input_string):
|
def _url_encode_string(input_string):
|
||||||
|
34
lib/spack/spack/ci_needs_workaround.py
Normal file
34
lib/spack/spack/ci_needs_workaround.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import collections.abc
|
||||||
|
|
||||||
|
get_job_name = lambda needs_entry: (
|
||||||
|
needs_entry.get("job")
|
||||||
|
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
|
||||||
|
else needs_entry if isinstance(needs_entry, str) else None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_job(job_entry):
|
||||||
|
if not isinstance(job_entry, collections.abc.Mapping):
|
||||||
|
return job_entry
|
||||||
|
|
||||||
|
needs = job_entry.get("needs")
|
||||||
|
if needs is None:
|
||||||
|
return job_entry
|
||||||
|
|
||||||
|
new_job = {}
|
||||||
|
new_job.update(job_entry)
|
||||||
|
del new_job["needs"]
|
||||||
|
|
||||||
|
new_job["dependencies"] = list(
|
||||||
|
filter((lambda x: x is not None), (get_job_name(needs_entry) for needs_entry in needs))
|
||||||
|
)
|
||||||
|
|
||||||
|
return new_job
|
||||||
|
|
||||||
|
|
||||||
|
def needs_to_dependencies(yaml):
|
||||||
|
return dict((k, convert_job(v)) for k, v in yaml.items())
|
363
lib/spack/spack/ci_optimization.py
Normal file
363
lib/spack/spack/ci_optimization.py
Normal file
@@ -0,0 +1,363 @@
|
|||||||
|
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import collections
|
||||||
|
import collections.abc
|
||||||
|
import copy
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
|
|
||||||
|
def sort_yaml_obj(obj):
|
||||||
|
if isinstance(obj, collections.abc.Mapping):
|
||||||
|
return syaml.syaml_dict(
|
||||||
|
(k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0])))
|
||||||
|
)
|
||||||
|
|
||||||
|
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||||
|
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
|
||||||
|
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
def matches(obj, proto):
|
||||||
|
"""Returns True if the test object "obj" matches the prototype object
|
||||||
|
"proto".
|
||||||
|
|
||||||
|
If obj and proto are mappings, obj matches proto if (key in obj) and
|
||||||
|
(obj[key] matches proto[key]) for every key in proto.
|
||||||
|
|
||||||
|
If obj and proto are sequences, obj matches proto if they are of the same
|
||||||
|
length and (a matches b) for every (a,b) in zip(obj, proto).
|
||||||
|
|
||||||
|
Otherwise, obj matches proto if obj == proto.
|
||||||
|
|
||||||
|
Precondition: proto must not have any reference cycles
|
||||||
|
"""
|
||||||
|
if isinstance(obj, collections.abc.Mapping):
|
||||||
|
if not isinstance(proto, collections.abc.Mapping):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
|
||||||
|
|
||||||
|
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||||
|
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if len(obj) != len(proto):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return all(matches(obj[index], val) for index, val in enumerate(proto))
|
||||||
|
|
||||||
|
return obj == proto
|
||||||
|
|
||||||
|
|
||||||
|
def subkeys(obj, proto):
|
||||||
|
"""Returns the test mapping "obj" after factoring out the items it has in
|
||||||
|
common with the prototype mapping "proto".
|
||||||
|
|
||||||
|
Consider a recursive merge operation, merge(a, b) on mappings a and b, that
|
||||||
|
returns a mapping, m, whose keys are the union of the keys of a and b, and
|
||||||
|
for every such key, "k", its corresponding value is:
|
||||||
|
|
||||||
|
- merge(a[key], b[key]) if a[key] and b[key] are mappings, or
|
||||||
|
- b[key] if (key in b) and not matches(a[key], b[key]),
|
||||||
|
or
|
||||||
|
- a[key] otherwise
|
||||||
|
|
||||||
|
|
||||||
|
If obj and proto are mappings, the returned object is the smallest object,
|
||||||
|
"a", such that merge(a, proto) matches obj.
|
||||||
|
|
||||||
|
Otherwise, obj is returned.
|
||||||
|
"""
|
||||||
|
if not (
|
||||||
|
isinstance(obj, collections.abc.Mapping) and isinstance(proto, collections.abc.Mapping)
|
||||||
|
):
|
||||||
|
return obj
|
||||||
|
|
||||||
|
new_obj = {}
|
||||||
|
for key, value in obj.items():
|
||||||
|
if key not in proto:
|
||||||
|
new_obj[key] = value
|
||||||
|
continue
|
||||||
|
|
||||||
|
if matches(value, proto[key]) and matches(proto[key], value):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if isinstance(value, collections.abc.Mapping):
|
||||||
|
new_obj[key] = subkeys(value, proto[key])
|
||||||
|
continue
|
||||||
|
|
||||||
|
new_obj[key] = value
|
||||||
|
|
||||||
|
return new_obj
|
||||||
|
|
||||||
|
|
||||||
|
def add_extends(yaml, key):
|
||||||
|
"""Modifies the given object "yaml" so that it includes an "extends" key
|
||||||
|
whose value features "key".
|
||||||
|
|
||||||
|
If "extends" is not in yaml, then yaml is modified such that
|
||||||
|
yaml["extends"] == key.
|
||||||
|
|
||||||
|
If yaml["extends"] is a str, then yaml is modified such that
|
||||||
|
yaml["extends"] == [yaml["extends"], key]
|
||||||
|
|
||||||
|
If yaml["extends"] is a list that does not include key, then key is
|
||||||
|
appended to the list.
|
||||||
|
|
||||||
|
Otherwise, yaml is left unchanged.
|
||||||
|
"""
|
||||||
|
|
||||||
|
has_key = "extends" in yaml
|
||||||
|
extends = yaml.get("extends")
|
||||||
|
|
||||||
|
if has_key and not isinstance(extends, (str, collections.abc.Sequence)):
|
||||||
|
return
|
||||||
|
|
||||||
|
if extends is None:
|
||||||
|
yaml["extends"] = key
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(extends, str):
|
||||||
|
if extends != key:
|
||||||
|
yaml["extends"] = [extends, key]
|
||||||
|
return
|
||||||
|
|
||||||
|
if key not in extends:
|
||||||
|
extends.append(key)
|
||||||
|
|
||||||
|
|
||||||
|
def common_subobject(yaml, sub):
|
||||||
|
"""Factor prototype object "sub" out of the values of mapping "yaml".
|
||||||
|
|
||||||
|
Consider a modified copy of yaml, "new", where for each key, "key" in yaml:
|
||||||
|
|
||||||
|
- If yaml[key] matches sub, then new[key] = subkeys(yaml[key], sub).
|
||||||
|
- Otherwise, new[key] = yaml[key].
|
||||||
|
|
||||||
|
If the above match criteria is not satisfied for any such key, then (yaml,
|
||||||
|
None) is returned. The yaml object is returned unchanged.
|
||||||
|
|
||||||
|
Otherwise, each matching value in new is modified as in
|
||||||
|
add_extends(new[key], common_key), and then new[common_key] is set to sub.
|
||||||
|
The common_key value is chosen such that it does not match any preexisting
|
||||||
|
key in new. In this case, (new, common_key) is returned.
|
||||||
|
"""
|
||||||
|
match_list = set(k for k, v in yaml.items() if matches(v, sub))
|
||||||
|
|
||||||
|
if not match_list:
|
||||||
|
return yaml, None
|
||||||
|
|
||||||
|
common_prefix = ".c"
|
||||||
|
common_index = 0
|
||||||
|
|
||||||
|
while True:
|
||||||
|
common_key = "".join((common_prefix, str(common_index)))
|
||||||
|
if common_key not in yaml:
|
||||||
|
break
|
||||||
|
common_index += 1
|
||||||
|
|
||||||
|
new_yaml = {}
|
||||||
|
|
||||||
|
for key, val in yaml.items():
|
||||||
|
new_yaml[key] = copy.deepcopy(val)
|
||||||
|
|
||||||
|
if not matches(val, sub):
|
||||||
|
continue
|
||||||
|
|
||||||
|
new_yaml[key] = subkeys(new_yaml[key], sub)
|
||||||
|
add_extends(new_yaml[key], common_key)
|
||||||
|
|
||||||
|
new_yaml[common_key] = sub
|
||||||
|
|
||||||
|
return new_yaml, common_key
|
||||||
|
|
||||||
|
|
||||||
|
def print_delta(name, old, new, applied=None):
|
||||||
|
delta = new - old
|
||||||
|
reldelta = (1000 * delta) // old
|
||||||
|
reldelta = (reldelta // 10, reldelta % 10)
|
||||||
|
|
||||||
|
if applied is None:
|
||||||
|
applied = new <= old
|
||||||
|
|
||||||
|
print(
|
||||||
|
"\n".join(
|
||||||
|
(
|
||||||
|
"{0} {1}:",
|
||||||
|
" before: {2: 10d}",
|
||||||
|
" after : {3: 10d}",
|
||||||
|
" delta : {4:+10d} ({5:=+3d}.{6}%)",
|
||||||
|
)
|
||||||
|
).format(name, ("+" if applied else "x"), old, new, delta, reldelta[0], reldelta[1])
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def try_optimization_pass(name, yaml, optimization_pass, *args, **kwargs):
|
||||||
|
"""Try applying an optimization pass and return information about the
|
||||||
|
result
|
||||||
|
|
||||||
|
"name" is a string describing the nature of the pass. If it is a non-empty
|
||||||
|
string, summary statistics are also printed to stdout.
|
||||||
|
|
||||||
|
"yaml" is the object to apply the pass to.
|
||||||
|
|
||||||
|
"optimization_pass" is the function implementing the pass to be applied.
|
||||||
|
|
||||||
|
"args" and "kwargs" are the additional arguments to pass to optimization
|
||||||
|
pass. The pass is applied as
|
||||||
|
|
||||||
|
>>> (new_yaml, *other_results) = optimization_pass(yaml, *args, **kwargs)
|
||||||
|
|
||||||
|
The pass's results are greedily rejected if it does not modify the original
|
||||||
|
yaml document, or if it produces a yaml document that serializes to a
|
||||||
|
larger string.
|
||||||
|
|
||||||
|
Returns (new_yaml, yaml, applied, other_results) if applied, or
|
||||||
|
(yaml, new_yaml, applied, other_results) otherwise.
|
||||||
|
"""
|
||||||
|
result = optimization_pass(yaml, *args, **kwargs)
|
||||||
|
new_yaml, other_results = result[0], result[1:]
|
||||||
|
|
||||||
|
if new_yaml is yaml:
|
||||||
|
# pass was not applied
|
||||||
|
return (yaml, new_yaml, False, other_results)
|
||||||
|
|
||||||
|
pre_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||||
|
post_size = len(syaml.dump_config(sort_yaml_obj(new_yaml), default_flow_style=True))
|
||||||
|
|
||||||
|
# pass makes the size worse: not applying
|
||||||
|
applied = post_size <= pre_size
|
||||||
|
if applied:
|
||||||
|
yaml, new_yaml = new_yaml, yaml
|
||||||
|
|
||||||
|
if name:
|
||||||
|
print_delta(name, pre_size, post_size, applied)
|
||||||
|
|
||||||
|
return (yaml, new_yaml, applied, other_results)
|
||||||
|
|
||||||
|
|
||||||
|
def build_histogram(iterator, key):
|
||||||
|
"""Builds a histogram of values given an iterable of mappings and a key.
|
||||||
|
|
||||||
|
For each mapping "m" with key "key" in iterator, the value m[key] is
|
||||||
|
considered.
|
||||||
|
|
||||||
|
Returns a list of tuples (hash, count, proportion, value), where
|
||||||
|
|
||||||
|
- "hash" is a sha1sum hash of the value.
|
||||||
|
- "count" is the number of occurences of values that hash to "hash".
|
||||||
|
- "proportion" is the proportion of all values considered above that
|
||||||
|
hash to "hash".
|
||||||
|
- "value" is one of the values considered above that hash to "hash".
|
||||||
|
Which value is chosen when multiple values hash to the same "hash" is
|
||||||
|
undefined.
|
||||||
|
|
||||||
|
The list is sorted in descending order by count, yielding the most
|
||||||
|
frequently occuring hashes first.
|
||||||
|
"""
|
||||||
|
buckets = collections.defaultdict(int)
|
||||||
|
values = {}
|
||||||
|
|
||||||
|
num_objects = 0
|
||||||
|
for obj in iterator:
|
||||||
|
num_objects += 1
|
||||||
|
|
||||||
|
try:
|
||||||
|
val = obj[key]
|
||||||
|
except (KeyError, TypeError):
|
||||||
|
continue
|
||||||
|
|
||||||
|
value_hash = hashlib.sha1()
|
||||||
|
value_hash.update(syaml.dump_config(sort_yaml_obj(val)).encode())
|
||||||
|
value_hash = value_hash.hexdigest()
|
||||||
|
|
||||||
|
buckets[value_hash] += 1
|
||||||
|
values[value_hash] = val
|
||||||
|
|
||||||
|
return [
|
||||||
|
(h, buckets[h], float(buckets[h]) / num_objects, values[h])
|
||||||
|
for h in sorted(buckets.keys(), key=lambda k: -buckets[k])
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def optimizer(yaml):
|
||||||
|
original_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||||
|
|
||||||
|
# try factoring out commonly repeated portions
|
||||||
|
common_job = {
|
||||||
|
"variables": {"SPACK_COMPILER_ACTION": "NONE"},
|
||||||
|
"after_script": ['rm -rf "./spack"'],
|
||||||
|
"artifacts": {"paths": ["jobs_scratch_dir", "cdash_report"], "when": "always"},
|
||||||
|
}
|
||||||
|
|
||||||
|
# look for a list of tags that appear frequently
|
||||||
|
_, count, proportion, tags = next(iter(build_histogram(yaml.values(), "tags")), (None,) * 4)
|
||||||
|
|
||||||
|
# If a list of tags is found, and there are more than one job that uses it,
|
||||||
|
# *and* the jobs that do use it represent at least 70% of all jobs, then
|
||||||
|
# add the list to the prototype object.
|
||||||
|
if tags and count > 1 and proportion >= 0.70:
|
||||||
|
common_job["tags"] = tags
|
||||||
|
|
||||||
|
# apply common object factorization
|
||||||
|
yaml, other, applied, rest = try_optimization_pass(
|
||||||
|
"general common object factorization", yaml, common_subobject, common_job
|
||||||
|
)
|
||||||
|
|
||||||
|
# look for a common script, and try factoring that out
|
||||||
|
_, count, proportion, script = next(
|
||||||
|
iter(build_histogram(yaml.values(), "script")), (None,) * 4
|
||||||
|
)
|
||||||
|
|
||||||
|
if script and count > 1 and proportion >= 0.70:
|
||||||
|
yaml, other, applied, rest = try_optimization_pass(
|
||||||
|
"script factorization", yaml, common_subobject, {"script": script}
|
||||||
|
)
|
||||||
|
|
||||||
|
# look for a common before_script, and try factoring that out
|
||||||
|
_, count, proportion, script = next(
|
||||||
|
iter(build_histogram(yaml.values(), "before_script")), (None,) * 4
|
||||||
|
)
|
||||||
|
|
||||||
|
if script and count > 1 and proportion >= 0.70:
|
||||||
|
yaml, other, applied, rest = try_optimization_pass(
|
||||||
|
"before_script factorization", yaml, common_subobject, {"before_script": script}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Look specifically for the SPACK_ROOT_SPEC environment variables.
|
||||||
|
# Try to factor them out.
|
||||||
|
h = build_histogram(
|
||||||
|
(getattr(val, "get", lambda *args: {})("variables") for val in yaml.values()),
|
||||||
|
"SPACK_ROOT_SPEC",
|
||||||
|
)
|
||||||
|
|
||||||
|
# In this case, we try to factor out *all* instances of the SPACK_ROOT_SPEC
|
||||||
|
# environment variable; not just the one that appears with the greatest
|
||||||
|
# frequency. We only require that more than 1 job uses a given instance's
|
||||||
|
# value, because we expect the value to be very large, and so expect even
|
||||||
|
# few-to-one factorizations to yield large space savings.
|
||||||
|
counter = 0
|
||||||
|
for _, count, proportion, spec in h:
|
||||||
|
if count <= 1:
|
||||||
|
continue
|
||||||
|
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
yaml, other, applied, rest = try_optimization_pass(
|
||||||
|
"SPACK_ROOT_SPEC factorization ({count})".format(count=counter),
|
||||||
|
yaml,
|
||||||
|
common_subobject,
|
||||||
|
{"variables": {"SPACK_ROOT_SPEC": spec}},
|
||||||
|
)
|
||||||
|
|
||||||
|
new_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||||
|
|
||||||
|
print("\n")
|
||||||
|
print_delta("overall summary", original_size, new_size)
|
||||||
|
print("\n")
|
||||||
|
return yaml
|
@@ -336,7 +336,6 @@ def display_specs(specs, args=None, **kwargs):
|
|||||||
groups (bool): display specs grouped by arch/compiler (default True)
|
groups (bool): display specs grouped by arch/compiler (default True)
|
||||||
decorator (typing.Callable): function to call to decorate specs
|
decorator (typing.Callable): function to call to decorate specs
|
||||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||||
status_fn (typing.Callable): if provided, prepend install-status info
|
|
||||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@@ -360,7 +359,6 @@ def get_arg(name, default=None):
|
|||||||
groups = get_arg("groups", True)
|
groups = get_arg("groups", True)
|
||||||
all_headers = get_arg("all_headers", False)
|
all_headers = get_arg("all_headers", False)
|
||||||
output = get_arg("output", sys.stdout)
|
output = get_arg("output", sys.stdout)
|
||||||
status_fn = get_arg("status_fn", None)
|
|
||||||
|
|
||||||
decorator = get_arg("decorator", None)
|
decorator = get_arg("decorator", None)
|
||||||
if decorator is None:
|
if decorator is None:
|
||||||
@@ -388,13 +386,6 @@ def get_arg(name, default=None):
|
|||||||
def fmt(s, depth=0):
|
def fmt(s, depth=0):
|
||||||
"""Formatter function for all output specs"""
|
"""Formatter function for all output specs"""
|
||||||
string = ""
|
string = ""
|
||||||
|
|
||||||
if status_fn:
|
|
||||||
# This was copied from spec.tree's colorization logic
|
|
||||||
# then shortened because it seems like status_fn should
|
|
||||||
# always return an InstallStatus
|
|
||||||
string += colorize(status_fn(s).value)
|
|
||||||
|
|
||||||
if hashes:
|
if hashes:
|
||||||
string += gray_hash(s, hlen) + " "
|
string += gray_hash(s, hlen) + " "
|
||||||
string += depth * " "
|
string += depth * " "
|
||||||
@@ -453,7 +444,7 @@ def format_list(specs):
|
|||||||
def filter_loaded_specs(specs):
|
def filter_loaded_specs(specs):
|
||||||
"""Filter a list of specs returning only those that are
|
"""Filter a list of specs returning only those that are
|
||||||
currently loaded."""
|
currently loaded."""
|
||||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
|
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||||
return [x for x in specs if x.dag_hash() in hashes]
|
return [x for x in specs if x.dag_hash() in hashes]
|
||||||
|
|
||||||
|
|
||||||
|
@@ -165,7 +165,7 @@ def _reset(args):
|
|||||||
if not ok_to_continue:
|
if not ok_to_continue:
|
||||||
raise RuntimeError("Aborting")
|
raise RuntimeError("Aborting")
|
||||||
|
|
||||||
for scope in spack.config.CONFIG.writable_scopes:
|
for scope in spack.config.CONFIG.file_scopes:
|
||||||
# The default scope should stay untouched
|
# The default scope should stay untouched
|
||||||
if scope.name == "defaults":
|
if scope.name == "defaults":
|
||||||
continue
|
continue
|
||||||
|
@@ -70,6 +70,12 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
|
|
||||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
|
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
|
||||||
|
push.add_argument(
|
||||||
|
"--allow-root",
|
||||||
|
"-a",
|
||||||
|
action="store_true",
|
||||||
|
help="allow install root string in binary files after RPATH substitution",
|
||||||
|
)
|
||||||
push_sign = push.add_mutually_exclusive_group(required=False)
|
push_sign = push.add_mutually_exclusive_group(required=False)
|
||||||
push_sign.add_argument(
|
push_sign.add_argument(
|
||||||
"--unsigned",
|
"--unsigned",
|
||||||
@@ -184,6 +190,10 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
keys.add_argument("-f", "--force", action="store_true", help="force new download of keys")
|
keys.add_argument("-f", "--force", action="store_true", help="force new download of keys")
|
||||||
keys.set_defaults(func=keys_fn)
|
keys.set_defaults(func=keys_fn)
|
||||||
|
|
||||||
|
preview = subparsers.add_parser("preview", help=preview_fn.__doc__)
|
||||||
|
arguments.add_common_arguments(preview, ["installed_specs"])
|
||||||
|
preview.set_defaults(func=preview_fn)
|
||||||
|
|
||||||
# Check if binaries need to be rebuilt on remote mirror
|
# Check if binaries need to be rebuilt on remote mirror
|
||||||
check = subparsers.add_parser("check", help=check_fn.__doc__)
|
check = subparsers.add_parser("check", help=check_fn.__doc__)
|
||||||
check.add_argument(
|
check.add_argument(
|
||||||
@@ -394,6 +404,11 @@ def push_fn(args):
|
|||||||
else:
|
else:
|
||||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||||
|
|
||||||
|
if args.allow_root:
|
||||||
|
tty.warn(
|
||||||
|
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
|
||||||
|
)
|
||||||
|
|
||||||
mirror: spack.mirror.Mirror = args.mirror
|
mirror: spack.mirror.Mirror = args.mirror
|
||||||
|
|
||||||
# Check if this is an OCI image.
|
# Check if this is an OCI image.
|
||||||
@@ -945,6 +960,14 @@ def keys_fn(args):
|
|||||||
bindist.get_keys(args.install, args.trust, args.force)
|
bindist.get_keys(args.install, args.trust, args.force)
|
||||||
|
|
||||||
|
|
||||||
|
def preview_fn(args):
|
||||||
|
"""analyze an installed spec and reports whether executables and libraries are relocatable"""
|
||||||
|
tty.warn(
|
||||||
|
"`spack buildcache preview` is deprecated since `spack buildcache push --allow-root` is "
|
||||||
|
"now the default. This command will be removed in Spack 0.22"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_fn(args: argparse.Namespace):
|
def check_fn(args: argparse.Namespace):
|
||||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
||||||
|
|
||||||
|
@@ -6,7 +6,6 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import warnings
|
|
||||||
from urllib.parse import urlparse, urlunparse
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
@@ -74,7 +73,7 @@ def setup_parser(subparser):
|
|||||||
"--optimize",
|
"--optimize",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="(DEPRECATED) optimize the gitlab yaml file for size\n\n"
|
help="(experimental) optimize the gitlab yaml file for size\n\n"
|
||||||
"run the generated document through a series of optimization passes "
|
"run the generated document through a series of optimization passes "
|
||||||
"designed to reduce the size of the generated file",
|
"designed to reduce the size of the generated file",
|
||||||
)
|
)
|
||||||
@@ -82,7 +81,7 @@ def setup_parser(subparser):
|
|||||||
"--dependencies",
|
"--dependencies",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
|
||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--buildcache-destination",
|
"--buildcache-destination",
|
||||||
@@ -201,18 +200,6 @@ def ci_generate(args):
|
|||||||
before invoking this command. the value must be the CDash authorization token needed to create
|
before invoking this command. the value must be the CDash authorization token needed to create
|
||||||
a build group and register all generated jobs under it
|
a build group and register all generated jobs under it
|
||||||
"""
|
"""
|
||||||
if args.optimize:
|
|
||||||
warnings.warn(
|
|
||||||
"The --optimize option has been deprecated, and currently has no effect. "
|
|
||||||
"It will be removed in Spack v0.24."
|
|
||||||
)
|
|
||||||
|
|
||||||
if args.dependencies:
|
|
||||||
warnings.warn(
|
|
||||||
"The --dependencies option has been deprecated, and currently has no effect. "
|
|
||||||
"It will be removed in Spack v0.24."
|
|
||||||
)
|
|
||||||
|
|
||||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||||
|
|
||||||
if args.copy_to:
|
if args.copy_to:
|
||||||
@@ -225,6 +212,8 @@ def ci_generate(args):
|
|||||||
|
|
||||||
output_file = args.output_file
|
output_file = args.output_file
|
||||||
copy_yaml_to = args.copy_to
|
copy_yaml_to = args.copy_to
|
||||||
|
run_optimizer = args.optimize
|
||||||
|
use_dependencies = args.dependencies
|
||||||
prune_dag = args.prune_dag
|
prune_dag = args.prune_dag
|
||||||
index_only = args.index_only
|
index_only = args.index_only
|
||||||
artifacts_root = args.artifacts_root
|
artifacts_root = args.artifacts_root
|
||||||
@@ -245,6 +234,8 @@ def ci_generate(args):
|
|||||||
output_file,
|
output_file,
|
||||||
prune_dag=prune_dag,
|
prune_dag=prune_dag,
|
||||||
check_index_only=index_only,
|
check_index_only=index_only,
|
||||||
|
run_optimizer=run_optimizer,
|
||||||
|
use_dependencies=use_dependencies,
|
||||||
artifacts_root=artifacts_root,
|
artifacts_root=artifacts_root,
|
||||||
remote_mirror_override=buildcache_destination,
|
remote_mirror_override=buildcache_destination,
|
||||||
)
|
)
|
||||||
|
@@ -3,9 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
from llnl.string import plural
|
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common.arguments
|
import spack.cmd.common.arguments
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
@@ -46,9 +43,5 @@ def concretize(parser, args):
|
|||||||
with env.write_transaction():
|
with env.write_transaction():
|
||||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||||
if not args.quiet:
|
if not args.quiet:
|
||||||
if concretized_specs:
|
ev.display_specs(concretized_specs)
|
||||||
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}:")
|
|
||||||
ev.display_specs([concrete for _, concrete in concretized_specs])
|
|
||||||
else:
|
|
||||||
tty.msg("No new specs to concretize.")
|
|
||||||
env.write()
|
env.write()
|
||||||
|
@@ -156,7 +156,7 @@ def print_flattened_configuration(*, blame: bool) -> None:
|
|||||||
"""
|
"""
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
if env is not None:
|
if env is not None:
|
||||||
pristine = env.manifest.yaml_content
|
pristine = env.manifest.pristine_yaml_content
|
||||||
flattened = pristine.copy()
|
flattened = pristine.copy()
|
||||||
flattened[spack.schema.env.TOP_LEVEL_KEY] = pristine[spack.schema.env.TOP_LEVEL_KEY].copy()
|
flattened[spack.schema.env.TOP_LEVEL_KEY] = pristine[spack.schema.env.TOP_LEVEL_KEY].copy()
|
||||||
else:
|
else:
|
||||||
@@ -264,9 +264,7 @@ def config_remove(args):
|
|||||||
def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||||
if isinstance(scope, spack.config.SingleFileScope):
|
if isinstance(scope, spack.config.SingleFileScope):
|
||||||
return fs.can_access(cfg_file)
|
return fs.can_access(cfg_file)
|
||||||
elif isinstance(scope, spack.config.DirectoryConfigScope):
|
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
|
||||||
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _config_change_requires_scope(path, spec, scope, match_spec=None):
|
def _config_change_requires_scope(path, spec, scope, match_spec=None):
|
||||||
@@ -364,11 +362,14 @@ def config_change(args):
|
|||||||
def config_update(args):
|
def config_update(args):
|
||||||
# Read the configuration files
|
# Read the configuration files
|
||||||
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||||
updates: List[spack.config.ConfigScope] = [
|
updates: List[spack.config.ConfigScope] = list(
|
||||||
x
|
filter(
|
||||||
for x in spack.config.CONFIG.format_updates[args.section]
|
lambda s: not isinstance(
|
||||||
if not isinstance(x, spack.config.InternalConfigScope) and x.writable
|
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||||
]
|
),
|
||||||
|
spack.config.CONFIG.format_updates[args.section],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
cannot_overwrite, skip_system_scope = [], False
|
cannot_overwrite, skip_system_scope = [], False
|
||||||
for scope in updates:
|
for scope in updates:
|
||||||
@@ -446,7 +447,7 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
|||||||
|
|
||||||
|
|
||||||
def config_revert(args):
|
def config_revert(args):
|
||||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.writable_scopes]
|
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
|
||||||
|
|
||||||
# Search for backup files in the configuration scopes
|
# Search for backup files in the configuration scopes
|
||||||
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
||||||
|
@@ -2,6 +2,7 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
@@ -933,7 +934,7 @@ def get_repository(args, name):
|
|||||||
# Figure out where the new package should live
|
# Figure out where the new package should live
|
||||||
repo_path = args.repo
|
repo_path = args.repo
|
||||||
if repo_path is not None:
|
if repo_path is not None:
|
||||||
repo = spack.repo.from_path(repo_path)
|
repo = spack.repo.Repo(repo_path)
|
||||||
if spec.namespace and spec.namespace != repo.namespace:
|
if spec.namespace and spec.namespace != repo.namespace:
|
||||||
tty.die(
|
tty.die(
|
||||||
"Can't create package with namespace {0} in repo with "
|
"Can't create package with namespace {0} in repo with "
|
||||||
@@ -941,7 +942,9 @@ def get_repository(args, name):
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if spec.namespace:
|
if spec.namespace:
|
||||||
repo = spack.repo.PATH.get_repo(spec.namespace)
|
repo = spack.repo.PATH.get_repo(spec.namespace, None)
|
||||||
|
if not repo:
|
||||||
|
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
|
||||||
else:
|
else:
|
||||||
repo = spack.repo.PATH.first_repo()
|
repo = spack.repo.PATH.first_repo()
|
||||||
|
|
||||||
|
@@ -47,6 +47,16 @@ def inverted_dependencies():
|
|||||||
dependents of, e.g., `mpi`, but virtuals are not included as
|
dependents of, e.g., `mpi`, but virtuals are not included as
|
||||||
actual dependents.
|
actual dependents.
|
||||||
"""
|
"""
|
||||||
|
dag = {}
|
||||||
|
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||||
|
dag.setdefault(pkg_cls.name, set())
|
||||||
|
for dep in pkg_cls.dependencies_by_name():
|
||||||
|
deps = [dep]
|
||||||
|
|
||||||
|
# expand virtuals if necessary
|
||||||
|
if spack.repo.PATH.is_virtual(dep):
|
||||||
|
deps += [s.name for s in spack.repo.PATH.providers_for(dep)]
|
||||||
|
|
||||||
dag = collections.defaultdict(set)
|
dag = collections.defaultdict(set)
|
||||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||||
for _, deps_by_name in pkg_cls.dependencies.items():
|
for _, deps_by_name in pkg_cls.dependencies.items():
|
||||||
|
@@ -9,8 +9,6 @@
|
|||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.fetch_strategy
|
|
||||||
import spack.repo
|
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.version
|
import spack.version
|
||||||
@@ -71,15 +69,13 @@ def _retrieve_develop_source(spec, abspath):
|
|||||||
# We construct a package class ourselves, rather than asking for
|
# We construct a package class ourselves, rather than asking for
|
||||||
# Spec.package, since Spec only allows this when it is concrete
|
# Spec.package, since Spec only allows this when it is concrete
|
||||||
package = pkg_cls(spec)
|
package = pkg_cls(spec)
|
||||||
source_stage = package.stage[0]
|
if isinstance(package.stage[0].fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||||
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
|
package.stage[0].fetcher.get_full_repo = True
|
||||||
source_stage.fetcher.get_full_repo = True
|
|
||||||
# If we retrieved this version before and cached it, we may have
|
# If we retrieved this version before and cached it, we may have
|
||||||
# done so without cloning the full git repo; likewise, any
|
# done so without cloning the full git repo; likewise, any
|
||||||
# mirror might store an instance with truncated history.
|
# mirror might store an instance with truncated history.
|
||||||
source_stage.disable_mirrors()
|
package.stage[0].disable_mirrors()
|
||||||
|
|
||||||
source_stage.fetcher.set_package(package)
|
|
||||||
package.stage.steal_source(abspath)
|
package.stage.steal_source(abspath)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -3,7 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import errno
|
|
||||||
import glob
|
import glob
|
||||||
import os
|
import os
|
||||||
|
|
||||||
@@ -12,13 +11,43 @@
|
|||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.util.editor
|
from spack.spec import Spec
|
||||||
|
from spack.util.editor import editor
|
||||||
|
|
||||||
description = "open package files in $EDITOR"
|
description = "open package files in $EDITOR"
|
||||||
section = "packaging"
|
section = "packaging"
|
||||||
level = "short"
|
level = "short"
|
||||||
|
|
||||||
|
|
||||||
|
def edit_package(name, repo_path, namespace):
|
||||||
|
"""Opens the requested package file in your favorite $EDITOR.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name (str): The name of the package
|
||||||
|
repo_path (str): The path to the repository containing this package
|
||||||
|
namespace (str): A valid namespace registered with Spack
|
||||||
|
"""
|
||||||
|
# Find the location of the package
|
||||||
|
if repo_path:
|
||||||
|
repo = spack.repo.Repo(repo_path)
|
||||||
|
elif namespace:
|
||||||
|
repo = spack.repo.PATH.get_repo(namespace)
|
||||||
|
else:
|
||||||
|
repo = spack.repo.PATH
|
||||||
|
path = repo.filename_for_package_name(name)
|
||||||
|
|
||||||
|
spec = Spec(name)
|
||||||
|
if os.path.exists(path):
|
||||||
|
if not os.path.isfile(path):
|
||||||
|
tty.die("Something is wrong. '{0}' is not a file!".format(path))
|
||||||
|
if not os.access(path, os.R_OK):
|
||||||
|
tty.die("Insufficient permissions on '%s'!" % path)
|
||||||
|
else:
|
||||||
|
raise spack.repo.UnknownPackageError(spec.name)
|
||||||
|
|
||||||
|
editor(path)
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
excl_args = subparser.add_mutually_exclusive_group()
|
excl_args = subparser.add_mutually_exclusive_group()
|
||||||
|
|
||||||
@@ -69,67 +98,41 @@ def setup_parser(subparser):
|
|||||||
excl_args.add_argument("-r", "--repo", default=None, help="path to repo to edit package in")
|
excl_args.add_argument("-r", "--repo", default=None, help="path to repo to edit package in")
|
||||||
excl_args.add_argument("-N", "--namespace", default=None, help="namespace of package to edit")
|
excl_args.add_argument("-N", "--namespace", default=None, help="namespace of package to edit")
|
||||||
|
|
||||||
subparser.add_argument("package", nargs="*", default=None, help="package name")
|
subparser.add_argument("package", nargs="?", default=None, help="package name")
|
||||||
|
|
||||||
|
|
||||||
def locate_package(name: str, repo: spack.repo.Repo) -> str:
|
|
||||||
path = repo.filename_for_package_name(name)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(path, "r"):
|
|
||||||
return path
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno == errno.ENOENT:
|
|
||||||
raise spack.repo.UnknownPackageError(name) from e
|
|
||||||
tty.die(f"Cannot edit package: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
def locate_file(name: str, path: str) -> str:
|
|
||||||
# convert command names to python module name
|
|
||||||
if path == spack.paths.command_path:
|
|
||||||
name = spack.cmd.python_name(name)
|
|
||||||
|
|
||||||
file_path = os.path.join(path, name)
|
|
||||||
|
|
||||||
# Try to open direct match.
|
|
||||||
try:
|
|
||||||
with open(file_path, "r"):
|
|
||||||
return file_path
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno != errno.ENOENT:
|
|
||||||
tty.die(f"Cannot edit file: {e}")
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Otherwise try to find a file that starts with the name
|
|
||||||
candidates = glob.glob(file_path + "*")
|
|
||||||
exclude_list = [".pyc", "~"] # exclude binaries and backups
|
|
||||||
files = [f for f in candidates if not any(f.endswith(ext) for ext in exclude_list)]
|
|
||||||
if len(files) > 1:
|
|
||||||
tty.die(
|
|
||||||
f"Multiple files start with `{name}`:\n"
|
|
||||||
+ "\n".join(f" {os.path.basename(f)}" for f in files)
|
|
||||||
)
|
|
||||||
elif not files:
|
|
||||||
tty.die(f"No file for '{name}' was found in {path}")
|
|
||||||
return files[0]
|
|
||||||
|
|
||||||
|
|
||||||
def edit(parser, args):
|
def edit(parser, args):
|
||||||
names = args.package
|
name = args.package
|
||||||
|
|
||||||
|
# By default, edit package files
|
||||||
|
path = spack.paths.packages_path
|
||||||
|
|
||||||
# If `--command`, `--test`, or `--module` is chosen, edit those instead
|
# If `--command`, `--test`, or `--module` is chosen, edit those instead
|
||||||
if args.path:
|
if args.path:
|
||||||
paths = [locate_file(name, args.path) for name in names] if names else [args.path]
|
path = args.path
|
||||||
spack.util.editor.editor(*paths)
|
if name:
|
||||||
elif names:
|
# convert command names to python module name
|
||||||
if args.repo:
|
if path == spack.paths.command_path:
|
||||||
repo = spack.repo.from_path(args.repo)
|
name = spack.cmd.python_name(name)
|
||||||
elif args.namespace:
|
|
||||||
repo = spack.repo.PATH.get_repo(args.namespace)
|
path = os.path.join(path, name)
|
||||||
else:
|
if not os.path.exists(path):
|
||||||
repo = spack.repo.PATH
|
files = glob.glob(path + "*")
|
||||||
paths = [locate_package(name, repo) for name in names]
|
exclude_list = [".pyc", "~"] # exclude binaries and backups
|
||||||
spack.util.editor.editor(*paths)
|
files = list(filter(lambda x: all(s not in x for s in exclude_list), files))
|
||||||
|
if len(files) > 1:
|
||||||
|
m = "Multiple files exist with the name {0}.".format(name)
|
||||||
|
m += " Please specify a suffix. Files are:\n\n"
|
||||||
|
for f in files:
|
||||||
|
m += " " + os.path.basename(f) + "\n"
|
||||||
|
tty.die(m)
|
||||||
|
if not files:
|
||||||
|
tty.die("No file for '{0}' was found in {1}".format(name, path))
|
||||||
|
path = files[0] # already confirmed only one entry in files
|
||||||
|
|
||||||
|
editor(path)
|
||||||
|
elif name:
|
||||||
|
edit_package(name, args.repo, args.namespace)
|
||||||
else:
|
else:
|
||||||
# By default open the directory where packages live
|
# By default open the directory where packages live
|
||||||
spack.util.editor.editor(spack.paths.packages_path)
|
editor(path)
|
||||||
|
@@ -7,7 +7,7 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import List, Optional, Set
|
from typing import List, Optional
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.colify as colify
|
import llnl.util.tty.colify as colify
|
||||||
@@ -19,7 +19,6 @@
|
|||||||
import spack.detection
|
import spack.detection
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
|
|
||||||
@@ -139,26 +138,14 @@ def external_find(args):
|
|||||||
candidate_packages, path_hints=args.path, max_workers=args.jobs
|
candidate_packages, path_hints=args.path, max_workers=args.jobs
|
||||||
)
|
)
|
||||||
|
|
||||||
new_specs = spack.detection.update_configuration(
|
new_entries = spack.detection.update_configuration(
|
||||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||||
)
|
)
|
||||||
|
if new_entries:
|
||||||
# If the user runs `spack external find --not-buildable mpich` we also mark `mpi` non-buildable
|
|
||||||
# to avoid that the concretizer picks a different mpi provider.
|
|
||||||
if new_specs and args.not_buildable:
|
|
||||||
virtuals: Set[str] = {
|
|
||||||
virtual.name
|
|
||||||
for new_spec in new_specs
|
|
||||||
for virtual_specs in spack.repo.PATH.get_pkg_class(new_spec.name).provided.values()
|
|
||||||
for virtual in virtual_specs
|
|
||||||
}
|
|
||||||
new_virtuals = spack.detection.set_virtuals_nonbuildable(virtuals, scope=args.scope)
|
|
||||||
new_specs.extend(spack.spec.Spec(name) for name in new_virtuals)
|
|
||||||
|
|
||||||
if new_specs:
|
|
||||||
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||||
tty.msg(f"The following specs have been detected on this system and added to {path}")
|
msg = "The following specs have been detected on this system and added to {0}"
|
||||||
spack.cmd.display_specs(new_specs)
|
tty.msg(msg.format(path))
|
||||||
|
spack.cmd.display_specs(new_entries)
|
||||||
else:
|
else:
|
||||||
tty.msg("No new external packages detected")
|
tty.msg("No new external packages detected")
|
||||||
|
|
||||||
|
@@ -46,10 +46,6 @@ def setup_parser(subparser):
|
|||||||
help="output specs as machine-readable json records",
|
help="output specs as machine-readable json records",
|
||||||
)
|
)
|
||||||
|
|
||||||
subparser.add_argument(
|
|
||||||
"-I", "--install-status", action="store_true", help="show install status of packages"
|
|
||||||
)
|
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
|
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
|
||||||
)
|
)
|
||||||
@@ -297,24 +293,25 @@ def root_decorator(spec, string):
|
|||||||
)
|
)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
if args.show_concretized:
|
||||||
|
tty.msg("Concretized roots")
|
||||||
|
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)
|
||||||
|
print()
|
||||||
|
|
||||||
|
# Display a header for the installed packages section IF there are installed
|
||||||
|
# packages. If there aren't any, we'll just end up printing "0 installed packages"
|
||||||
|
# later.
|
||||||
|
if results and not args.only_roots:
|
||||||
|
tty.msg("Installed packages")
|
||||||
|
|
||||||
|
|
||||||
def find(parser, args):
|
def find(parser, args):
|
||||||
env = ev.active_environment()
|
q_args = query_arguments(args)
|
||||||
|
results = args.specs(**q_args)
|
||||||
|
|
||||||
|
env = ev.active_environment()
|
||||||
if not env and args.only_roots:
|
if not env and args.only_roots:
|
||||||
tty.die("-r / --only-roots requires an active environment")
|
tty.die("-r / --only-roots requires an active environment")
|
||||||
if not env and args.show_concretized:
|
|
||||||
tty.die("-c / --show-concretized requires an active environment")
|
|
||||||
|
|
||||||
if env:
|
|
||||||
if args.constraint:
|
|
||||||
init_specs = spack.cmd.parse_specs(args.constraint)
|
|
||||||
results = env.all_matching_specs(*init_specs)
|
|
||||||
else:
|
|
||||||
results = env.all_specs()
|
|
||||||
else:
|
|
||||||
q_args = query_arguments(args)
|
|
||||||
results = args.specs(**q_args)
|
|
||||||
|
|
||||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||||
|
|
||||||
@@ -335,11 +332,6 @@ def find(parser, args):
|
|||||||
if args.loaded:
|
if args.loaded:
|
||||||
results = spack.cmd.filter_loaded_specs(results)
|
results = spack.cmd.filter_loaded_specs(results)
|
||||||
|
|
||||||
if args.install_status or args.show_concretized:
|
|
||||||
status_fn = spack.spec.Spec.install_status
|
|
||||||
else:
|
|
||||||
status_fn = None
|
|
||||||
|
|
||||||
# Display the result
|
# Display the result
|
||||||
if args.json:
|
if args.json:
|
||||||
cmd.display_specs_as_json(results, deps=args.deps)
|
cmd.display_specs_as_json(results, deps=args.deps)
|
||||||
@@ -348,34 +340,12 @@ def find(parser, args):
|
|||||||
if env:
|
if env:
|
||||||
display_env(env, args, decorator, results)
|
display_env(env, args, decorator, results)
|
||||||
|
|
||||||
|
count_suffix = " (not shown)"
|
||||||
if not args.only_roots:
|
if not args.only_roots:
|
||||||
display_results = results
|
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||||
if not args.show_concretized:
|
count_suffix = ""
|
||||||
display_results = list(x for x in results if x.installed)
|
|
||||||
cmd.display_specs(
|
|
||||||
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
|
|
||||||
)
|
|
||||||
|
|
||||||
# print number of installed packages last (as the list may be long)
|
# print number of installed packages last (as the list may be long)
|
||||||
if sys.stdout.isatty() and args.groups:
|
if sys.stdout.isatty() and args.groups:
|
||||||
installed_suffix = ""
|
|
||||||
concretized_suffix = " to be installed"
|
|
||||||
|
|
||||||
if args.only_roots:
|
|
||||||
installed_suffix += " (not shown)"
|
|
||||||
concretized_suffix += " (not shown)"
|
|
||||||
else:
|
|
||||||
if env and not args.show_concretized:
|
|
||||||
concretized_suffix += " (show with `spack find -c`)"
|
|
||||||
|
|
||||||
pkg_type = "loaded" if args.loaded else "installed"
|
pkg_type = "loaded" if args.loaded else "installed"
|
||||||
spack.cmd.print_how_many_pkgs(
|
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)
|
||||||
list(x for x in results if x.installed), pkg_type, suffix=installed_suffix
|
|
||||||
)
|
|
||||||
|
|
||||||
if env:
|
|
||||||
spack.cmd.print_how_many_pkgs(
|
|
||||||
list(x for x in results if not x.installed),
|
|
||||||
"concretized",
|
|
||||||
suffix=concretized_suffix,
|
|
||||||
)
|
|
||||||
|
@@ -56,6 +56,7 @@ def roots_from_environments(args, active_env):
|
|||||||
|
|
||||||
# -e says "also preserve things needed by this particular env"
|
# -e says "also preserve things needed by this particular env"
|
||||||
for env_name_or_dir in args.except_environment:
|
for env_name_or_dir in args.except_environment:
|
||||||
|
print("HMM", env_name_or_dir)
|
||||||
if ev.exists(env_name_or_dir):
|
if ev.exists(env_name_or_dir):
|
||||||
env = ev.read(env_name_or_dir)
|
env = ev.read(env_name_or_dir)
|
||||||
elif ev.is_env_dir(env_name_or_dir):
|
elif ev.is_env_dir(env_name_or_dir):
|
||||||
|
@@ -10,7 +10,6 @@
|
|||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
from llnl.string import plural
|
|
||||||
from llnl.util import lang, tty
|
from llnl.util import lang, tty
|
||||||
|
|
||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
@@ -376,9 +375,7 @@ def _maybe_add_and_concretize(args, env, specs):
|
|||||||
# `spack concretize`
|
# `spack concretize`
|
||||||
tests = compute_tests_install_kwargs(env.user_specs, args.test)
|
tests = compute_tests_install_kwargs(env.user_specs, args.test)
|
||||||
concretized_specs = env.concretize(tests=tests)
|
concretized_specs = env.concretize(tests=tests)
|
||||||
if concretized_specs:
|
ev.display_specs(concretized_specs)
|
||||||
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}")
|
|
||||||
ev.display_specs([concrete for _, concrete in concretized_specs])
|
|
||||||
|
|
||||||
# save view regeneration for later, so that we only do it
|
# save view regeneration for later, so that we only do it
|
||||||
# once, as it can be slow.
|
# once, as it can be slow.
|
||||||
|
@@ -169,9 +169,7 @@ def pkg_hash(args):
|
|||||||
|
|
||||||
def get_grep(required=False):
|
def get_grep(required=False):
|
||||||
"""Get a grep command to use with ``spack pkg grep``."""
|
"""Get a grep command to use with ``spack pkg grep``."""
|
||||||
grep = exe.which(os.environ.get("SPACK_GREP") or "grep", required=required)
|
return exe.which(os.environ.get("SPACK_GREP") or "grep", required=required)
|
||||||
grep.ignore_quotes = True # allow `spack pkg grep '"quoted string"'` without warning
|
|
||||||
return grep
|
|
||||||
|
|
||||||
|
|
||||||
def pkg_grep(args, unknown_args):
|
def pkg_grep(args, unknown_args):
|
||||||
|
@@ -91,7 +91,7 @@ def repo_add(args):
|
|||||||
tty.die("Not a Spack repository: %s" % path)
|
tty.die("Not a Spack repository: %s" % path)
|
||||||
|
|
||||||
# Make sure it's actually a spack repository by constructing it.
|
# Make sure it's actually a spack repository by constructing it.
|
||||||
repo = spack.repo.from_path(canon_path)
|
repo = spack.repo.Repo(canon_path)
|
||||||
|
|
||||||
# If that succeeds, finally add it to the configuration.
|
# If that succeeds, finally add it to the configuration.
|
||||||
repos = spack.config.get("repos", scope=args.scope)
|
repos = spack.config.get("repos", scope=args.scope)
|
||||||
@@ -124,7 +124,7 @@ def repo_remove(args):
|
|||||||
# If it is a namespace, remove corresponding repo
|
# If it is a namespace, remove corresponding repo
|
||||||
for path in repos:
|
for path in repos:
|
||||||
try:
|
try:
|
||||||
repo = spack.repo.from_path(path)
|
repo = spack.repo.Repo(path)
|
||||||
if repo.namespace == namespace_or_path:
|
if repo.namespace == namespace_or_path:
|
||||||
repos.remove(path)
|
repos.remove(path)
|
||||||
spack.config.set("repos", repos, args.scope)
|
spack.config.set("repos", repos, args.scope)
|
||||||
@@ -142,7 +142,7 @@ def repo_list(args):
|
|||||||
repos = []
|
repos = []
|
||||||
for r in roots:
|
for r in roots:
|
||||||
try:
|
try:
|
||||||
repos.append(spack.repo.from_path(r))
|
repos.append(spack.repo.Repo(r))
|
||||||
except spack.repo.RepoError:
|
except spack.repo.RepoError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@@ -114,16 +114,15 @@ def _process_result(result, show, required_format, kwargs):
|
|||||||
|
|
||||||
# dump the solutions as concretized specs
|
# dump the solutions as concretized specs
|
||||||
if "solutions" in show:
|
if "solutions" in show:
|
||||||
if required_format:
|
for spec in result.specs:
|
||||||
for spec in result.specs:
|
# With -y, just print YAML to output.
|
||||||
# With -y, just print YAML to output.
|
if required_format == "yaml":
|
||||||
if required_format == "yaml":
|
# use write because to_yaml already has a newline.
|
||||||
# use write because to_yaml already has a newline.
|
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
elif required_format == "json":
|
||||||
elif required_format == "json":
|
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
else:
|
||||||
else:
|
sys.stdout.write(spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||||
sys.stdout.write(spack.spec.tree(result.specs, color=sys.stdout.isatty(), **kwargs))
|
|
||||||
print()
|
print()
|
||||||
|
|
||||||
if result.unsolved_specs and "solutions" in show:
|
if result.unsolved_specs and "solutions" in show:
|
||||||
|
@@ -105,19 +105,11 @@ def spec(parser, args):
|
|||||||
if env:
|
if env:
|
||||||
env.concretize()
|
env.concretize()
|
||||||
specs = env.concretized_specs()
|
specs = env.concretized_specs()
|
||||||
|
|
||||||
# environments are printed together in a combined tree() invocation,
|
|
||||||
# except when using --yaml or --json, which we print spec by spec below.
|
|
||||||
if not args.format:
|
|
||||||
tree_kwargs["key"] = spack.traverse.by_dag_hash
|
|
||||||
tree_kwargs["hashes"] = args.long or args.very_long
|
|
||||||
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
|
|
||||||
return
|
|
||||||
else:
|
else:
|
||||||
tty.die("spack spec requires at least one spec or an active environment")
|
tty.die("spack spec requires at least one spec or an active environment")
|
||||||
|
|
||||||
for input, output in specs:
|
for input, output in specs:
|
||||||
# With --yaml or --json, just print the raw specs to output
|
# With -y, just print YAML to output.
|
||||||
if args.format:
|
if args.format:
|
||||||
if args.format == "yaml":
|
if args.format == "yaml":
|
||||||
# use write because to_yaml already has a newline.
|
# use write because to_yaml already has a newline.
|
||||||
|
@@ -71,7 +71,7 @@ def unload(parser, args):
|
|||||||
"Cannot specify specs on command line when unloading all specs with '--all'"
|
"Cannot specify specs on command line when unloading all specs with '--all'"
|
||||||
)
|
)
|
||||||
|
|
||||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
|
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||||
if args.specs:
|
if args.specs:
|
||||||
specs = [
|
specs = [
|
||||||
spack.cmd.disambiguate_spec_from_hashes(spec, hashes)
|
spack.cmd.disambiguate_spec_from_hashes(spec, hashes)
|
||||||
|
@@ -38,10 +38,10 @@
|
|||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.filesystem_view as fsv
|
|
||||||
import spack.schema.projections
|
import spack.schema.projections
|
||||||
import spack.store
|
import spack.store
|
||||||
from spack.config import validate
|
from spack.config import validate
|
||||||
|
from spack.filesystem_view import YamlFilesystemView, view_func_parser
|
||||||
from spack.util import spack_yaml as s_yaml
|
from spack.util import spack_yaml as s_yaml
|
||||||
|
|
||||||
description = "project packages to a compact naming scheme on the filesystem"
|
description = "project packages to a compact naming scheme on the filesystem"
|
||||||
@@ -193,13 +193,17 @@ def view(parser, args):
|
|||||||
ordered_projections = {}
|
ordered_projections = {}
|
||||||
|
|
||||||
# What method are we using for this view
|
# What method are we using for this view
|
||||||
link_type = args.action if args.action in actions_link else "symlink"
|
if args.action in actions_link:
|
||||||
view = fsv.YamlFilesystemView(
|
link_fn = view_func_parser(args.action)
|
||||||
|
else:
|
||||||
|
link_fn = view_func_parser("symlink")
|
||||||
|
|
||||||
|
view = YamlFilesystemView(
|
||||||
path,
|
path,
|
||||||
spack.store.STORE.layout,
|
spack.store.STORE.layout,
|
||||||
projections=ordered_projections,
|
projections=ordered_projections,
|
||||||
ignore_conflicts=getattr(args, "ignore_conflicts", False),
|
ignore_conflicts=getattr(args, "ignore_conflicts", False),
|
||||||
link_type=link_type,
|
link=link_fn,
|
||||||
verbose=args.verbose,
|
verbose=args.verbose,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -18,6 +18,7 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
|
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
|
||||||
|
|
||||||
|
import spack.compilers
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.schema.environment
|
import spack.schema.environment
|
||||||
import spack.spec
|
import spack.spec
|
||||||
|
@@ -260,7 +260,7 @@ def _init_compiler_config(
|
|||||||
def compiler_config_files():
|
def compiler_config_files():
|
||||||
config_files = list()
|
config_files = list()
|
||||||
config = spack.config.CONFIG
|
config = spack.config.CONFIG
|
||||||
for scope in config.writable_scopes:
|
for scope in config.file_scopes:
|
||||||
name = scope.name
|
name = scope.name
|
||||||
compiler_config = config.get("compilers", scope=name)
|
compiler_config = config.get("compilers", scope=name)
|
||||||
if compiler_config:
|
if compiler_config:
|
||||||
@@ -488,7 +488,7 @@ def supported_compilers_for_host_platform() -> List[str]:
|
|||||||
return supported_compilers_for_platform(host_plat)
|
return supported_compilers_for_platform(host_plat)
|
||||||
|
|
||||||
|
|
||||||
def supported_compilers_for_platform(platform: "spack.platforms.Platform") -> List[str]:
|
def supported_compilers_for_platform(platform: spack.platforms.Platform) -> List[str]:
|
||||||
"""Return a set of compiler class objects supported by Spack
|
"""Return a set of compiler class objects supported by Spack
|
||||||
that are also supported by the provided platform
|
that are also supported by the provided platform
|
||||||
|
|
||||||
|
@@ -4,7 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from os.path import dirname, join
|
from os.path import dirname
|
||||||
|
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
|
|
||||||
@@ -135,12 +135,8 @@ def setup_custom_environment(self, pkg, env):
|
|||||||
# It is located in the same directory as the driver. Error message:
|
# It is located in the same directory as the driver. Error message:
|
||||||
# clang++: error: unable to execute command:
|
# clang++: error: unable to execute command:
|
||||||
# Executable "sycl-post-link" doesn't exist!
|
# Executable "sycl-post-link" doesn't exist!
|
||||||
# also ensures that shared objects and libraries required by the compiler,
|
if self.cxx:
|
||||||
# e.g. libonnx, can be found succesfully
|
|
||||||
# due to a fix, this is no longer required for OneAPI versions >= 2024.2
|
|
||||||
if self.cxx and pkg.spec.satisfies("%oneapi@:2024.1"):
|
|
||||||
env.prepend_path("PATH", dirname(self.cxx))
|
env.prepend_path("PATH", dirname(self.cxx))
|
||||||
env.prepend_path("LD_LIBRARY_PATH", join(dirname(dirname(self.cxx)), "lib"))
|
|
||||||
|
|
||||||
# 2024 release bumped the libsycl version because of an ABI
|
# 2024 release bumped the libsycl version because of an ABI
|
||||||
# change, 2024 compilers are required. You will see this
|
# change, 2024 compilers are required. You will see this
|
||||||
|
@@ -35,10 +35,11 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union
|
from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Type, Union
|
||||||
|
|
||||||
from llnl.util import filesystem, lang, tty
|
from llnl.util import filesystem, lang, tty
|
||||||
|
|
||||||
|
import spack.compilers
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.schema
|
import spack.schema
|
||||||
@@ -116,39 +117,21 @@
|
|||||||
|
|
||||||
|
|
||||||
class ConfigScope:
|
class ConfigScope:
|
||||||
def __init__(self, name: str) -> None:
|
"""This class represents a configuration scope.
|
||||||
self.name = name
|
|
||||||
self.writable = False
|
|
||||||
self.sections = syaml.syaml_dict()
|
|
||||||
|
|
||||||
def get_section_filename(self, section: str) -> str:
|
A scope is one directory containing named configuration files.
|
||||||
raise NotImplementedError
|
Each file is a config "section" (e.g., mirrors, compilers, etc.).
|
||||||
|
"""
|
||||||
|
|
||||||
def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
def __init__(self, name, path) -> None:
|
||||||
raise NotImplementedError
|
self.name = name # scope name.
|
||||||
|
self.path = path # path to directory containing configs.
|
||||||
def _write_section(self, section: str) -> None:
|
self.sections = syaml.syaml_dict() # sections read from config files.
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_platform_dependent(self) -> bool:
|
def is_platform_dependent(self) -> bool:
|
||||||
return False
|
"""Returns true if the scope name is platform specific"""
|
||||||
|
return os.sep in self.name
|
||||||
def clear(self) -> None:
|
|
||||||
"""Empty cached config information."""
|
|
||||||
self.sections = syaml.syaml_dict()
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return f"<ConfigScope: {self.name}>"
|
|
||||||
|
|
||||||
|
|
||||||
class DirectoryConfigScope(ConfigScope):
|
|
||||||
"""Config scope backed by a directory containing one file per section."""
|
|
||||||
|
|
||||||
def __init__(self, name: str, path: str, *, writable: bool = True) -> None:
|
|
||||||
super().__init__(name)
|
|
||||||
self.path = path
|
|
||||||
self.writable = writable
|
|
||||||
|
|
||||||
def get_section_filename(self, section: str) -> str:
|
def get_section_filename(self, section: str) -> str:
|
||||||
"""Returns the filename associated with a given section"""
|
"""Returns the filename associated with a given section"""
|
||||||
@@ -165,15 +148,14 @@ def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
|||||||
return self.sections[section]
|
return self.sections[section]
|
||||||
|
|
||||||
def _write_section(self, section: str) -> None:
|
def _write_section(self, section: str) -> None:
|
||||||
if not self.writable:
|
|
||||||
raise ConfigError(f"Cannot write to immutable scope {self}")
|
|
||||||
|
|
||||||
filename = self.get_section_filename(section)
|
filename = self.get_section_filename(section)
|
||||||
data = self.get_section(section)
|
data = self.get_section(section)
|
||||||
if data is None:
|
if data is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
validate(data, SECTION_SCHEMAS[section])
|
# We copy data here to avoid adding defaults at write time
|
||||||
|
validate_data = copy.deepcopy(data)
|
||||||
|
validate(validate_data, SECTION_SCHEMAS[section])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
filesystem.mkdirp(self.path)
|
filesystem.mkdirp(self.path)
|
||||||
@@ -182,23 +164,19 @@ def _write_section(self, section: str) -> None:
|
|||||||
except (syaml.SpackYAMLError, OSError) as e:
|
except (syaml.SpackYAMLError, OSError) as e:
|
||||||
raise ConfigFileError(f"cannot write to '{filename}'") from e
|
raise ConfigFileError(f"cannot write to '{filename}'") from e
|
||||||
|
|
||||||
@property
|
def clear(self) -> None:
|
||||||
def is_platform_dependent(self) -> bool:
|
"""Empty cached config information."""
|
||||||
"""Returns true if the scope name is platform specific"""
|
self.sections = syaml.syaml_dict()
|
||||||
return "/" in self.name
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"<ConfigScope: {self.name}: {self.path}>"
|
||||||
|
|
||||||
|
|
||||||
class SingleFileScope(ConfigScope):
|
class SingleFileScope(ConfigScope):
|
||||||
"""This class represents a configuration scope in a single YAML file."""
|
"""This class represents a configuration scope in a single YAML file."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self, name: str, path: str, schema: YamlConfigDict, yaml_path: Optional[List[str]] = None
|
||||||
name: str,
|
|
||||||
path: str,
|
|
||||||
schema: YamlConfigDict,
|
|
||||||
*,
|
|
||||||
yaml_path: Optional[List[str]] = None,
|
|
||||||
writable: bool = True,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Similar to ``ConfigScope`` but can be embedded in another schema.
|
"""Similar to ``ConfigScope`` but can be embedded in another schema.
|
||||||
|
|
||||||
@@ -217,13 +195,15 @@ def __init__(
|
|||||||
config:
|
config:
|
||||||
install_tree: $spack/opt/spack
|
install_tree: $spack/opt/spack
|
||||||
"""
|
"""
|
||||||
super().__init__(name)
|
super().__init__(name, path)
|
||||||
self._raw_data: Optional[YamlConfigDict] = None
|
self._raw_data: Optional[YamlConfigDict] = None
|
||||||
self.schema = schema
|
self.schema = schema
|
||||||
self.path = path
|
|
||||||
self.writable = writable
|
|
||||||
self.yaml_path = yaml_path or []
|
self.yaml_path = yaml_path or []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_platform_dependent(self) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
def get_section_filename(self, section) -> str:
|
def get_section_filename(self, section) -> str:
|
||||||
return self.path
|
return self.path
|
||||||
|
|
||||||
@@ -277,8 +257,6 @@ def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
|||||||
return self.sections.get(section, None)
|
return self.sections.get(section, None)
|
||||||
|
|
||||||
def _write_section(self, section: str) -> None:
|
def _write_section(self, section: str) -> None:
|
||||||
if not self.writable:
|
|
||||||
raise ConfigError(f"Cannot write to immutable scope {self}")
|
|
||||||
data_to_write: Optional[YamlConfigDict] = self._raw_data
|
data_to_write: Optional[YamlConfigDict] = self._raw_data
|
||||||
|
|
||||||
# If there is no existing data, this section SingleFileScope has never
|
# If there is no existing data, this section SingleFileScope has never
|
||||||
@@ -323,6 +301,19 @@ def __repr__(self) -> str:
|
|||||||
return f"<SingleFileScope: {self.name}: {self.path}>"
|
return f"<SingleFileScope: {self.name}: {self.path}>"
|
||||||
|
|
||||||
|
|
||||||
|
class ImmutableConfigScope(ConfigScope):
|
||||||
|
"""A configuration scope that cannot be written to.
|
||||||
|
|
||||||
|
This is used for ConfigScopes passed on the command line.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _write_section(self, section) -> None:
|
||||||
|
raise ConfigError(f"Cannot write to immutable scope {self}")
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"<ImmutableConfigScope: {self.name}: {self.path}>"
|
||||||
|
|
||||||
|
|
||||||
class InternalConfigScope(ConfigScope):
|
class InternalConfigScope(ConfigScope):
|
||||||
"""An internal configuration scope that is not persisted to a file.
|
"""An internal configuration scope that is not persisted to a file.
|
||||||
|
|
||||||
@@ -332,7 +323,7 @@ class InternalConfigScope(ConfigScope):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, name: str, data: Optional[YamlConfigDict] = None) -> None:
|
def __init__(self, name: str, data: Optional[YamlConfigDict] = None) -> None:
|
||||||
super().__init__(name)
|
super().__init__(name, None)
|
||||||
self.sections = syaml.syaml_dict()
|
self.sections = syaml.syaml_dict()
|
||||||
|
|
||||||
if data is not None:
|
if data is not None:
|
||||||
@@ -342,6 +333,9 @@ def __init__(self, name: str, data: Optional[YamlConfigDict] = None) -> None:
|
|||||||
validate({section: dsec}, SECTION_SCHEMAS[section])
|
validate({section: dsec}, SECTION_SCHEMAS[section])
|
||||||
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
|
||||||
|
|
||||||
|
def get_section_filename(self, section: str) -> str:
|
||||||
|
raise NotImplementedError("Cannot get filename for InternalConfigScope.")
|
||||||
|
|
||||||
def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
def get_section(self, section: str) -> Optional[YamlConfigDict]:
|
||||||
"""Just reads from an internal dictionary."""
|
"""Just reads from an internal dictionary."""
|
||||||
if section not in self.sections:
|
if section not in self.sections:
|
||||||
@@ -446,21 +440,27 @@ def remove_scope(self, scope_name: str) -> Optional[ConfigScope]:
|
|||||||
return scope
|
return scope
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def writable_scopes(self) -> Generator[ConfigScope, None, None]:
|
def file_scopes(self) -> List[ConfigScope]:
|
||||||
"""Generator of writable scopes with an associated file."""
|
"""List of writable scopes with an associated file."""
|
||||||
return (s for s in self.scopes.values() if s.writable)
|
return [
|
||||||
|
s
|
||||||
|
for s in self.scopes.values()
|
||||||
|
if (type(s) is ConfigScope or type(s) is SingleFileScope)
|
||||||
|
]
|
||||||
|
|
||||||
def highest_precedence_scope(self) -> ConfigScope:
|
def highest_precedence_scope(self) -> ConfigScope:
|
||||||
"""Writable scope with highest precedence."""
|
"""Non-internal scope with highest precedence."""
|
||||||
return next(s for s in reversed(self.scopes.values()) if s.writable) # type: ignore
|
return next(reversed(self.file_scopes))
|
||||||
|
|
||||||
def highest_precedence_non_platform_scope(self) -> ConfigScope:
|
def highest_precedence_non_platform_scope(self) -> ConfigScope:
|
||||||
"""Writable non-platform scope with highest precedence"""
|
"""Non-internal non-platform scope with highest precedence
|
||||||
return next(
|
|
||||||
s
|
Platform-specific scopes are of the form scope/platform"""
|
||||||
for s in reversed(self.scopes.values()) # type: ignore
|
generator = reversed(self.file_scopes)
|
||||||
if s.writable and not s.is_platform_dependent
|
highest = next(generator)
|
||||||
)
|
while highest and highest.is_platform_dependent:
|
||||||
|
highest = next(generator)
|
||||||
|
return highest
|
||||||
|
|
||||||
def matching_scopes(self, reg_expr) -> List[ConfigScope]:
|
def matching_scopes(self, reg_expr) -> List[ConfigScope]:
|
||||||
"""
|
"""
|
||||||
@@ -755,14 +755,13 @@ def override(
|
|||||||
|
|
||||||
|
|
||||||
def _add_platform_scope(
|
def _add_platform_scope(
|
||||||
cfg: Union[Configuration, lang.Singleton], name: str, path: str, writable: bool = True
|
cfg: Union[Configuration, lang.Singleton], scope_type: Type[ConfigScope], name: str, path: str
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add a platform-specific subdirectory for the current platform."""
|
"""Add a platform-specific subdirectory for the current platform."""
|
||||||
platform = spack.platforms.host().name
|
platform = spack.platforms.host().name
|
||||||
scope = DirectoryConfigScope(
|
plat_name = os.path.join(name, platform)
|
||||||
f"{name}/{platform}", os.path.join(path, platform), writable=writable
|
plat_path = os.path.join(path, platform)
|
||||||
)
|
cfg.push_scope(scope_type(plat_name, plat_path))
|
||||||
cfg.push_scope(scope)
|
|
||||||
|
|
||||||
|
|
||||||
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||||
@@ -793,27 +792,22 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
|||||||
def _add_command_line_scopes(
|
def _add_command_line_scopes(
|
||||||
cfg: Union[Configuration, lang.Singleton], command_line_scopes: List[str]
|
cfg: Union[Configuration, lang.Singleton], command_line_scopes: List[str]
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add additional scopes from the --config-scope argument, either envs or dirs."""
|
"""Add additional scopes from the --config-scope argument.
|
||||||
import spack.environment.environment as env # circular import
|
|
||||||
|
|
||||||
|
Command line scopes are named after their position in the arg list.
|
||||||
|
"""
|
||||||
for i, path in enumerate(command_line_scopes):
|
for i, path in enumerate(command_line_scopes):
|
||||||
name = f"cmd_scope_{i}"
|
# We ensure that these scopes exist and are readable, as they are
|
||||||
|
# provided on the command line by the user.
|
||||||
|
if not os.path.isdir(path):
|
||||||
|
raise ConfigError(f"config scope is not a directory: '{path}'")
|
||||||
|
elif not os.access(path, os.R_OK):
|
||||||
|
raise ConfigError(f"config scope is not readable: '{path}'")
|
||||||
|
|
||||||
if env.exists(path): # managed environment
|
# name based on order on the command line
|
||||||
manifest = env.EnvironmentManifestFile(env.root(path))
|
name = f"cmd_scope_{i:d}"
|
||||||
elif env.is_env_dir(path): # anonymous environment
|
cfg.push_scope(ImmutableConfigScope(name, path))
|
||||||
manifest = env.EnvironmentManifestFile(path)
|
_add_platform_scope(cfg, ImmutableConfigScope, name, path)
|
||||||
elif os.path.isdir(path): # directory with config files
|
|
||||||
cfg.push_scope(DirectoryConfigScope(name, path, writable=False))
|
|
||||||
_add_platform_scope(cfg, name, path, writable=False)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
raise ConfigError(f"Invalid configuration scope: {path}")
|
|
||||||
|
|
||||||
for scope in manifest.env_config_scopes:
|
|
||||||
scope.name = f"{name}:{scope.name}"
|
|
||||||
scope.writable = False
|
|
||||||
cfg.push_scope(scope)
|
|
||||||
|
|
||||||
|
|
||||||
def create() -> Configuration:
|
def create() -> Configuration:
|
||||||
@@ -857,10 +851,10 @@ def create() -> Configuration:
|
|||||||
|
|
||||||
# add each scope and its platform-specific directory
|
# add each scope and its platform-specific directory
|
||||||
for name, path in configuration_paths:
|
for name, path in configuration_paths:
|
||||||
cfg.push_scope(DirectoryConfigScope(name, path))
|
cfg.push_scope(ConfigScope(name, path))
|
||||||
|
|
||||||
# Each scope can have per-platfom overrides in subdirectories
|
# Each scope can have per-platfom overrides in subdirectories
|
||||||
_add_platform_scope(cfg, name, path)
|
_add_platform_scope(cfg, ConfigScope, name, path)
|
||||||
|
|
||||||
# add command-line scopes
|
# add command-line scopes
|
||||||
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
||||||
@@ -975,7 +969,7 @@ def set(path: str, value: Any, scope: Optional[str] = None) -> None:
|
|||||||
def add_default_platform_scope(platform: str) -> None:
|
def add_default_platform_scope(platform: str) -> None:
|
||||||
plat_name = os.path.join("defaults", platform)
|
plat_name = os.path.join("defaults", platform)
|
||||||
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
plat_path = os.path.join(CONFIGURATION_DEFAULTS_PATH[1], platform)
|
||||||
CONFIG.push_scope(DirectoryConfigScope(plat_name, plat_path))
|
CONFIG.push_scope(ConfigScope(plat_name, plat_path))
|
||||||
|
|
||||||
|
|
||||||
def scopes() -> Dict[str, ConfigScope]:
|
def scopes() -> Dict[str, ConfigScope]:
|
||||||
@@ -984,10 +978,19 @@ def scopes() -> Dict[str, ConfigScope]:
|
|||||||
|
|
||||||
|
|
||||||
def writable_scopes() -> List[ConfigScope]:
|
def writable_scopes() -> List[ConfigScope]:
|
||||||
"""Return list of writable scopes. Higher-priority scopes come first in the list."""
|
"""
|
||||||
scopes = [x for x in CONFIG.scopes.values() if x.writable]
|
Return list of writable scopes. Higher-priority scopes come first in the
|
||||||
scopes.reverse()
|
list.
|
||||||
return scopes
|
"""
|
||||||
|
return list(
|
||||||
|
reversed(
|
||||||
|
list(
|
||||||
|
x
|
||||||
|
for x in CONFIG.scopes.values()
|
||||||
|
if not isinstance(x, (InternalConfigScope, ImmutableConfigScope))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def writable_scope_names() -> List[str]:
|
def writable_scope_names() -> List[str]:
|
||||||
@@ -1077,8 +1080,11 @@ def validate(
|
|||||||
"""
|
"""
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
|
||||||
|
# Validate a copy to avoid adding defaults
|
||||||
|
# This allows us to round-trip data without adding to it.
|
||||||
|
test_data = syaml.deepcopy(data)
|
||||||
try:
|
try:
|
||||||
spack.schema.Validator(schema).validate(data)
|
spack.schema.Validator(schema).validate(test_data)
|
||||||
except jsonschema.ValidationError as e:
|
except jsonschema.ValidationError as e:
|
||||||
if hasattr(e.instance, "lc"):
|
if hasattr(e.instance, "lc"):
|
||||||
line_number = e.instance.lc.line + 1
|
line_number = e.instance.lc.line + 1
|
||||||
@@ -1087,7 +1093,7 @@ def validate(
|
|||||||
raise ConfigFormatError(e, data, filename, line_number) from e
|
raise ConfigFormatError(e, data, filename, line_number) from e
|
||||||
# return the validated data so that we can access the raw data
|
# return the validated data so that we can access the raw data
|
||||||
# mostly relevant for environments
|
# mostly relevant for environments
|
||||||
return data
|
return test_data
|
||||||
|
|
||||||
|
|
||||||
def read_config_file(
|
def read_config_file(
|
||||||
@@ -1593,7 +1599,7 @@ def _config_from(scopes_or_paths: List[Union[ConfigScope, str]]) -> Configuratio
|
|||||||
path = os.path.normpath(scope_or_path)
|
path = os.path.normpath(scope_or_path)
|
||||||
assert os.path.isdir(path), f'"{path}" must be a directory'
|
assert os.path.isdir(path), f'"{path}" must be a directory'
|
||||||
name = os.path.basename(path)
|
name = os.path.basename(path)
|
||||||
scopes.append(DirectoryConfigScope(name, path))
|
scopes.append(ConfigScope(name, path))
|
||||||
|
|
||||||
configuration = Configuration(*scopes)
|
configuration = Configuration(*scopes)
|
||||||
return configuration
|
return configuration
|
||||||
|
@@ -78,17 +78,24 @@
|
|||||||
"image": "quay.io/almalinuxorg/almalinux:8"
|
"image": "quay.io/almalinuxorg/almalinux:8"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"centos:stream9": {
|
"centos:stream": {
|
||||||
"bootstrap": {
|
"bootstrap": {
|
||||||
"template": "container/centos_stream9.dockerfile",
|
"template": "container/centos_stream.dockerfile",
|
||||||
"image": "quay.io/centos/centos:stream9"
|
"image": "quay.io/centos/centos:stream"
|
||||||
},
|
},
|
||||||
"os_package_manager": "dnf_epel",
|
"os_package_manager": "dnf_epel",
|
||||||
"build": "spack/centos-stream9",
|
"build": "spack/centos-stream",
|
||||||
"final": {
|
"final": {
|
||||||
"image": "quay.io/centos/centos:stream9"
|
"image": "quay.io/centos/centos:stream"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"centos:7": {
|
||||||
|
"bootstrap": {
|
||||||
|
"template": "container/centos_7.dockerfile"
|
||||||
|
},
|
||||||
|
"os_package_manager": "yum",
|
||||||
|
"build": "spack/centos7"
|
||||||
|
},
|
||||||
"opensuse/leap:15": {
|
"opensuse/leap:15": {
|
||||||
"bootstrap": {
|
"bootstrap": {
|
||||||
"template": "container/leap-15.dockerfile"
|
"template": "container/leap-15.dockerfile"
|
||||||
|
@@ -2,12 +2,7 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
from .common import (
|
from .common import DetectedPackage, executable_prefix, update_configuration
|
||||||
DetectedPackage,
|
|
||||||
executable_prefix,
|
|
||||||
set_virtuals_nonbuildable,
|
|
||||||
update_configuration,
|
|
||||||
)
|
|
||||||
from .path import by_path, executables_in_path
|
from .path import by_path, executables_in_path
|
||||||
from .test import detection_tests
|
from .test import detection_tests
|
||||||
|
|
||||||
@@ -17,6 +12,5 @@
|
|||||||
"executables_in_path",
|
"executables_in_path",
|
||||||
"executable_prefix",
|
"executable_prefix",
|
||||||
"update_configuration",
|
"update_configuration",
|
||||||
"set_virtuals_nonbuildable",
|
|
||||||
"detection_tests",
|
"detection_tests",
|
||||||
]
|
]
|
||||||
|
@@ -252,27 +252,6 @@ def update_configuration(
|
|||||||
return all_new_specs
|
return all_new_specs
|
||||||
|
|
||||||
|
|
||||||
def set_virtuals_nonbuildable(virtuals: Set[str], scope: Optional[str] = None) -> List[str]:
|
|
||||||
"""Update packages:virtual:buildable:False for the provided virtual packages, if the property
|
|
||||||
is not set by the user. Returns the list of virtual packages that have been updated."""
|
|
||||||
packages = spack.config.get("packages")
|
|
||||||
new_config = {}
|
|
||||||
for virtual in virtuals:
|
|
||||||
# If the user has set the buildable prop do not override it
|
|
||||||
if virtual in packages and "buildable" in packages[virtual]:
|
|
||||||
continue
|
|
||||||
new_config[virtual] = {"buildable": False}
|
|
||||||
|
|
||||||
# Update the provided scope
|
|
||||||
spack.config.set(
|
|
||||||
"packages",
|
|
||||||
spack.config.merge_yaml(spack.config.get("packages", scope=scope), new_config),
|
|
||||||
scope=scope,
|
|
||||||
)
|
|
||||||
|
|
||||||
return list(new_config.keys())
|
|
||||||
|
|
||||||
|
|
||||||
def _windows_drive() -> str:
|
def _windows_drive() -> str:
|
||||||
"""Return Windows drive string extracted from the PROGRAMFILES environment variable,
|
"""Return Windows drive string extracted from the PROGRAMFILES environment variable,
|
||||||
which is guaranteed to be defined for all logins.
|
which is guaranteed to be defined for all logins.
|
||||||
|
@@ -12,7 +12,7 @@
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Dict, List, Optional, Set, Tuple, Type
|
from typing import Dict, List, Optional, Set, Tuple
|
||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
@@ -200,7 +200,7 @@ class Finder:
|
|||||||
def default_path_hints(self) -> List[str]:
|
def default_path_hints(self) -> List[str]:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def search_patterns(self, *, pkg: Type["spack.package_base.PackageBase"]) -> List[str]:
|
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||||
"""Returns the list of patterns used to match candidate files.
|
"""Returns the list of patterns used to match candidate files.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -226,7 +226,7 @@ def prefix_from_path(self, *, path: str) -> str:
|
|||||||
raise NotImplementedError("must be implemented by derived classes")
|
raise NotImplementedError("must be implemented by derived classes")
|
||||||
|
|
||||||
def detect_specs(
|
def detect_specs(
|
||||||
self, *, pkg: Type["spack.package_base.PackageBase"], paths: List[str]
|
self, *, pkg: "spack.package_base.PackageBase", paths: List[str]
|
||||||
) -> List[DetectedPackage]:
|
) -> List[DetectedPackage]:
|
||||||
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
||||||
|
|
||||||
@@ -327,7 +327,7 @@ class ExecutablesFinder(Finder):
|
|||||||
def default_path_hints(self) -> List[str]:
|
def default_path_hints(self) -> List[str]:
|
||||||
return spack.util.environment.get_path("PATH")
|
return spack.util.environment.get_path("PATH")
|
||||||
|
|
||||||
def search_patterns(self, *, pkg: Type["spack.package_base.PackageBase"]) -> List[str]:
|
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||||
result = []
|
result = []
|
||||||
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
|
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
|
||||||
result = pkg.platform_executables()
|
result = pkg.platform_executables()
|
||||||
@@ -356,7 +356,7 @@ class LibrariesFinder(Finder):
|
|||||||
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths
|
DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and standard system library paths
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def search_patterns(self, *, pkg: Type["spack.package_base.PackageBase"]) -> List[str]:
|
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
|
||||||
result = []
|
result = []
|
||||||
if hasattr(pkg, "libraries"):
|
if hasattr(pkg, "libraries"):
|
||||||
result = pkg.libraries
|
result = pkg.libraries
|
||||||
|
@@ -90,14 +90,14 @@ class OpenMpi(Package):
|
|||||||
_patch_order_index = 0
|
_patch_order_index = 0
|
||||||
|
|
||||||
|
|
||||||
SpecType = str
|
SpecType = Union["spack.spec.Spec", str]
|
||||||
DepType = Union[Tuple[str, ...], str]
|
DepType = Union[Tuple[str, ...], str]
|
||||||
WhenType = Optional[Union["spack.spec.Spec", str, bool]]
|
WhenType = Optional[Union["spack.spec.Spec", str, bool]]
|
||||||
Patcher = Callable[[Union["spack.package_base.PackageBase", Dependency]], None]
|
Patcher = Callable[[Union["spack.package_base.PackageBase", Dependency]], None]
|
||||||
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
|
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
|
||||||
|
|
||||||
|
|
||||||
SUPPORTED_LANGUAGES = ("fortran", "cxx", "c")
|
SUPPORTED_LANGUAGES = ("fortran", "cxx")
|
||||||
|
|
||||||
|
|
||||||
def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
|
def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
|
||||||
@@ -475,7 +475,7 @@ def _execute_version(pkg, ver, **kwargs):
|
|||||||
|
|
||||||
def _depends_on(
|
def _depends_on(
|
||||||
pkg: "spack.package_base.PackageBase",
|
pkg: "spack.package_base.PackageBase",
|
||||||
spec: "spack.spec.Spec",
|
spec: SpecType,
|
||||||
*,
|
*,
|
||||||
when: WhenType = None,
|
when: WhenType = None,
|
||||||
type: DepType = dt.DEFAULT_TYPES,
|
type: DepType = dt.DEFAULT_TYPES,
|
||||||
@@ -485,10 +485,11 @@ def _depends_on(
|
|||||||
if not when_spec:
|
if not when_spec:
|
||||||
return
|
return
|
||||||
|
|
||||||
if not spec.name:
|
dep_spec = spack.spec.Spec(spec)
|
||||||
raise DependencyError(f"Invalid dependency specification in package '{pkg.name}':", spec)
|
if not dep_spec.name:
|
||||||
if pkg.name == spec.name:
|
raise DependencyError("Invalid dependency specification in package '%s':" % pkg.name, spec)
|
||||||
raise CircularReferenceError(f"Package '{pkg.name}' cannot depend on itself.")
|
if pkg.name == dep_spec.name:
|
||||||
|
raise CircularReferenceError("Package '%s' cannot depend on itself." % pkg.name)
|
||||||
|
|
||||||
depflag = dt.canonicalize(type)
|
depflag = dt.canonicalize(type)
|
||||||
|
|
||||||
@@ -504,7 +505,7 @@ def _depends_on(
|
|||||||
# ensure `Spec.virtual` is a valid thing to call in a directive.
|
# ensure `Spec.virtual` is a valid thing to call in a directive.
|
||||||
# For now, we comment out the following check to allow for virtual packages
|
# For now, we comment out the following check to allow for virtual packages
|
||||||
# with package files.
|
# with package files.
|
||||||
# if patches and spec.virtual:
|
# if patches and dep_spec.virtual:
|
||||||
# raise DependencyPatchError("Cannot patch a virtual dependency.")
|
# raise DependencyPatchError("Cannot patch a virtual dependency.")
|
||||||
|
|
||||||
# ensure patches is a list
|
# ensure patches is a list
|
||||||
@@ -519,13 +520,13 @@ def _depends_on(
|
|||||||
|
|
||||||
# this is where we actually add the dependency to this package
|
# this is where we actually add the dependency to this package
|
||||||
deps_by_name = pkg.dependencies.setdefault(when_spec, {})
|
deps_by_name = pkg.dependencies.setdefault(when_spec, {})
|
||||||
dependency = deps_by_name.get(spec.name)
|
dependency = deps_by_name.get(dep_spec.name)
|
||||||
|
|
||||||
if not dependency:
|
if not dependency:
|
||||||
dependency = Dependency(pkg, spec, depflag=depflag)
|
dependency = Dependency(pkg, dep_spec, depflag=depflag)
|
||||||
deps_by_name[spec.name] = dependency
|
deps_by_name[dep_spec.name] = dependency
|
||||||
else:
|
else:
|
||||||
dependency.spec.constrain(spec, deps=False)
|
dependency.spec.constrain(dep_spec, deps=False)
|
||||||
dependency.depflag |= depflag
|
dependency.depflag |= depflag
|
||||||
|
|
||||||
# apply patches to the dependency
|
# apply patches to the dependency
|
||||||
@@ -590,13 +591,12 @@ def depends_on(
|
|||||||
@see The section "Dependency specs" in the Spack Packaging Guide.
|
@see The section "Dependency specs" in the Spack Packaging Guide.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
dep_spec = spack.spec.Spec(spec)
|
if spack.spec.Spec(spec).name in SUPPORTED_LANGUAGES:
|
||||||
if dep_spec.name in SUPPORTED_LANGUAGES:
|
|
||||||
assert type == "build", "languages must be of 'build' type"
|
assert type == "build", "languages must be of 'build' type"
|
||||||
return _language(lang_spec_str=spec, when=when)
|
return _language(lang_spec_str=spec, when=when)
|
||||||
|
|
||||||
def _execute_depends_on(pkg: "spack.package_base.PackageBase"):
|
def _execute_depends_on(pkg: "spack.package_base.PackageBase"):
|
||||||
_depends_on(pkg, dep_spec, when=when, type=type, patches=patches)
|
_depends_on(pkg, spec, when=when, type=type, patches=patches)
|
||||||
|
|
||||||
return _execute_depends_on
|
return _execute_depends_on
|
||||||
|
|
||||||
@@ -666,24 +666,25 @@ def extends(spec, when=None, type=("build", "run"), patches=None):
|
|||||||
|
|
||||||
keyword arguments can be passed to extends() so that extension
|
keyword arguments can be passed to extends() so that extension
|
||||||
packages can pass parameters to the extendee's extension
|
packages can pass parameters to the extendee's extension
|
||||||
mechanism."""
|
mechanism.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
def _execute_extends(pkg):
|
def _execute_extends(pkg):
|
||||||
when_spec = _make_when_spec(when)
|
when_spec = _make_when_spec(when)
|
||||||
if not when_spec:
|
if not when_spec:
|
||||||
return
|
return
|
||||||
|
|
||||||
dep_spec = spack.spec.Spec(spec)
|
_depends_on(pkg, spec, when=when, type=type, patches=patches)
|
||||||
|
spec_obj = spack.spec.Spec(spec)
|
||||||
_depends_on(pkg, dep_spec, when=when, type=type, patches=patches)
|
|
||||||
|
|
||||||
# When extending python, also add a dependency on python-venv. This is done so that
|
# When extending python, also add a dependency on python-venv. This is done so that
|
||||||
# Spack environment views are Python virtual environments.
|
# Spack environment views are Python virtual environments.
|
||||||
if dep_spec.name == "python" and not pkg.name == "python-venv":
|
if spec_obj.name == "python" and not pkg.name == "python-venv":
|
||||||
_depends_on(pkg, spack.spec.Spec("python-venv"), when=when, type=("build", "run"))
|
_depends_on(pkg, "python-venv", when=when, type=("build", "run"))
|
||||||
|
|
||||||
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
|
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
|
||||||
pkg.extendees[dep_spec.name] = (dep_spec, None)
|
pkg.extendees[spec_obj.name] = (spec_obj, None)
|
||||||
|
|
||||||
return _execute_extends
|
return _execute_extends
|
||||||
|
|
||||||
|
@@ -5,7 +5,7 @@
|
|||||||
import collections
|
import collections
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import contextlib
|
import contextlib
|
||||||
import errno
|
import copy
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import re
|
import re
|
||||||
@@ -24,15 +24,12 @@
|
|||||||
from llnl.util.link_tree import ConflictingSpecsError
|
from llnl.util.link_tree import ConflictingSpecsError
|
||||||
from llnl.util.symlink import readlink, symlink
|
from llnl.util.symlink import readlink, symlink
|
||||||
|
|
||||||
import spack.caches
|
|
||||||
import spack.cmd
|
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.concretize
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.fetch_strategy
|
import spack.fetch_strategy
|
||||||
import spack.filesystem_view as fsv
|
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.hooks
|
import spack.hooks
|
||||||
import spack.main
|
import spack.main
|
||||||
@@ -55,6 +52,7 @@
|
|||||||
import spack.util.url
|
import spack.util.url
|
||||||
import spack.version
|
import spack.version
|
||||||
from spack import traverse
|
from spack import traverse
|
||||||
|
from spack.filesystem_view import SimpleFilesystemView, inverse_view_func_parser, view_func_parser
|
||||||
from spack.installer import PackageInstaller
|
from spack.installer import PackageInstaller
|
||||||
from spack.schema.env import TOP_LEVEL_KEY
|
from spack.schema.env import TOP_LEVEL_KEY
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
@@ -269,7 +267,9 @@ def root(name):
|
|||||||
|
|
||||||
def exists(name):
|
def exists(name):
|
||||||
"""Whether an environment with this name exists or not."""
|
"""Whether an environment with this name exists or not."""
|
||||||
return valid_env_name(name) and os.path.isdir(_root(name))
|
if not valid_env_name(name):
|
||||||
|
return False
|
||||||
|
return os.path.isdir(root(name))
|
||||||
|
|
||||||
|
|
||||||
def active(name):
|
def active(name):
|
||||||
@@ -528,8 +528,8 @@ def _read_yaml(str_or_file):
|
|||||||
)
|
)
|
||||||
|
|
||||||
filename = getattr(str_or_file, "name", None)
|
filename = getattr(str_or_file, "name", None)
|
||||||
spack.config.validate(data, spack.schema.env.schema, filename)
|
default_data = spack.config.validate(data, spack.schema.env.schema, filename)
|
||||||
return data
|
return data, default_data
|
||||||
|
|
||||||
|
|
||||||
def _write_yaml(data, str_or_file):
|
def _write_yaml(data, str_or_file):
|
||||||
@@ -606,7 +606,7 @@ def __init__(
|
|||||||
self.projections = projections
|
self.projections = projections
|
||||||
self.select = select
|
self.select = select
|
||||||
self.exclude = exclude
|
self.exclude = exclude
|
||||||
self.link_type = fsv.canonicalize_link_type(link_type)
|
self.link_type = view_func_parser(link_type)
|
||||||
self.link = link
|
self.link = link
|
||||||
|
|
||||||
def select_fn(self, spec):
|
def select_fn(self, spec):
|
||||||
@@ -640,7 +640,7 @@ def to_dict(self):
|
|||||||
if self.exclude:
|
if self.exclude:
|
||||||
ret["exclude"] = self.exclude
|
ret["exclude"] = self.exclude
|
||||||
if self.link_type:
|
if self.link_type:
|
||||||
ret["link_type"] = self.link_type
|
ret["link_type"] = inverse_view_func_parser(self.link_type)
|
||||||
if self.link != default_view_link:
|
if self.link != default_view_link:
|
||||||
ret["link"] = self.link
|
ret["link"] = self.link
|
||||||
return ret
|
return ret
|
||||||
@@ -690,7 +690,7 @@ def get_projection_for_spec(self, spec):
|
|||||||
to exist on the filesystem."""
|
to exist on the filesystem."""
|
||||||
return self._view(self.root).get_projection_for_spec(spec)
|
return self._view(self.root).get_projection_for_spec(spec)
|
||||||
|
|
||||||
def view(self, new: Optional[str] = None) -> fsv.SimpleFilesystemView:
|
def view(self, new: Optional[str] = None) -> SimpleFilesystemView:
|
||||||
"""
|
"""
|
||||||
Returns a view object for the *underlying* view directory. This means that the
|
Returns a view object for the *underlying* view directory. This means that the
|
||||||
self.root symlink is followed, and that the view has to exist on the filesystem
|
self.root symlink is followed, and that the view has to exist on the filesystem
|
||||||
@@ -710,14 +710,14 @@ def view(self, new: Optional[str] = None) -> fsv.SimpleFilesystemView:
|
|||||||
)
|
)
|
||||||
return self._view(path)
|
return self._view(path)
|
||||||
|
|
||||||
def _view(self, root: str) -> fsv.SimpleFilesystemView:
|
def _view(self, root: str) -> SimpleFilesystemView:
|
||||||
"""Returns a view object for a given root dir."""
|
"""Returns a view object for a given root dir."""
|
||||||
return fsv.SimpleFilesystemView(
|
return SimpleFilesystemView(
|
||||||
root,
|
root,
|
||||||
spack.store.STORE.layout,
|
spack.store.STORE.layout,
|
||||||
ignore_conflicts=True,
|
ignore_conflicts=True,
|
||||||
projections=self.projections,
|
projections=self.projections,
|
||||||
link_type=self.link_type,
|
link=self.link_type,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __contains__(self, spec):
|
def __contains__(self, spec):
|
||||||
@@ -789,23 +789,6 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
|
|||||||
root_dirname = os.path.dirname(self.root)
|
root_dirname = os.path.dirname(self.root)
|
||||||
tmp_symlink_name = os.path.join(root_dirname, "._view_link")
|
tmp_symlink_name = os.path.join(root_dirname, "._view_link")
|
||||||
|
|
||||||
# Remove self.root if is it an empty dir, since we need a symlink there. Note that rmdir
|
|
||||||
# fails if self.root is a symlink.
|
|
||||||
try:
|
|
||||||
os.rmdir(self.root)
|
|
||||||
except (FileNotFoundError, NotADirectoryError):
|
|
||||||
pass
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno == errno.ENOTEMPTY:
|
|
||||||
msg = "it is a non-empty directory"
|
|
||||||
elif e.errno == errno.EACCES:
|
|
||||||
msg = "of insufficient permissions"
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
raise SpackEnvironmentViewError(
|
|
||||||
f"The environment view in {self.root} cannot not be created because {msg}."
|
|
||||||
) from e
|
|
||||||
|
|
||||||
# Create a new view
|
# Create a new view
|
||||||
try:
|
try:
|
||||||
fs.mkdirp(new_root)
|
fs.mkdirp(new_root)
|
||||||
@@ -937,7 +920,7 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
|||||||
def _load_manifest_file(self):
|
def _load_manifest_file(self):
|
||||||
"""Instantiate and load the manifest file contents into memory."""
|
"""Instantiate and load the manifest file contents into memory."""
|
||||||
with lk.ReadTransaction(self.txlock):
|
with lk.ReadTransaction(self.txlock):
|
||||||
self.manifest = EnvironmentManifestFile(self.path, self.name)
|
self.manifest = EnvironmentManifestFile(self.path)
|
||||||
with self.manifest.use_config():
|
with self.manifest.use_config():
|
||||||
self._read()
|
self._read()
|
||||||
|
|
||||||
@@ -974,25 +957,18 @@ def write_transaction(self):
|
|||||||
"""Get a write lock context manager for use in a `with` block."""
|
"""Get a write lock context manager for use in a `with` block."""
|
||||||
return lk.WriteTransaction(self.txlock, acquire=self._re_read)
|
return lk.WriteTransaction(self.txlock, acquire=self._re_read)
|
||||||
|
|
||||||
def _process_definition(self, entry):
|
def _process_definition(self, item):
|
||||||
"""Process a single spec definition item."""
|
"""Process a single spec definition item."""
|
||||||
when_string = entry.get("when")
|
entry = copy.deepcopy(item)
|
||||||
if when_string is not None:
|
when = _eval_conditional(entry.pop("when", "True"))
|
||||||
when = _eval_conditional(when_string)
|
assert len(entry) == 1
|
||||||
assert len([x for x in entry if x != "when"]) == 1
|
|
||||||
else:
|
|
||||||
when = True
|
|
||||||
assert len(entry) == 1
|
|
||||||
|
|
||||||
if when:
|
if when:
|
||||||
for name, spec_list in entry.items():
|
name, spec_list = next(iter(entry.items()))
|
||||||
if name == "when":
|
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
|
||||||
continue
|
if name in self.spec_lists:
|
||||||
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
|
self.spec_lists[name].extend(user_specs)
|
||||||
if name in self.spec_lists:
|
else:
|
||||||
self.spec_lists[name].extend(user_specs)
|
self.spec_lists[name] = user_specs
|
||||||
else:
|
|
||||||
self.spec_lists[name] = user_specs
|
|
||||||
|
|
||||||
def _process_view(self, env_view: Optional[Union[bool, str, Dict]]):
|
def _process_view(self, env_view: Optional[Union[bool, str, Dict]]):
|
||||||
"""Process view option(s), which can be boolean, string, or None.
|
"""Process view option(s), which can be boolean, string, or None.
|
||||||
@@ -2497,21 +2473,27 @@ def _equiv_dict(first, second):
|
|||||||
return same_values and same_keys_with_same_overrides
|
return same_values and same_keys_with_same_overrides
|
||||||
|
|
||||||
|
|
||||||
def display_specs(specs):
|
def display_specs(concretized_specs):
|
||||||
"""Displays a list of specs traversed breadth-first, covering nodes, with install status.
|
"""Displays the list of specs returned by `Environment.concretize()`.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
specs (list): list of specs
|
concretized_specs (list): list of specs returned by
|
||||||
|
`Environment.concretize()`
|
||||||
"""
|
"""
|
||||||
tree_string = spack.spec.tree(
|
|
||||||
specs,
|
def _tree_to_display(spec):
|
||||||
format=spack.spec.DISPLAY_FORMAT,
|
return spec.tree(
|
||||||
hashes=True,
|
recurse_dependencies=True,
|
||||||
hashlen=7,
|
format=spack.spec.DISPLAY_FORMAT,
|
||||||
status_fn=spack.spec.Spec.install_status,
|
status_fn=spack.spec.Spec.install_status,
|
||||||
key=traverse.by_dag_hash,
|
hashlen=7,
|
||||||
)
|
hashes=True,
|
||||||
print(tree_string)
|
)
|
||||||
|
|
||||||
|
for user_spec, concrete_spec in concretized_specs:
|
||||||
|
tty.msg("Concretized {0}".format(user_spec))
|
||||||
|
sys.stdout.write(_tree_to_display(concrete_spec))
|
||||||
|
print("")
|
||||||
|
|
||||||
|
|
||||||
def _concretize_from_constraints(spec_constraints, tests=False):
|
def _concretize_from_constraints(spec_constraints, tests=False):
|
||||||
@@ -2565,7 +2547,7 @@ def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
|
|||||||
|
|
||||||
def make_repo_path(root):
|
def make_repo_path(root):
|
||||||
"""Make a RepoPath from the repo subdirectories in an environment."""
|
"""Make a RepoPath from the repo subdirectories in an environment."""
|
||||||
path = spack.repo.RepoPath(cache=spack.caches.MISC_CACHE)
|
path = spack.repo.RepoPath()
|
||||||
|
|
||||||
if os.path.isdir(root):
|
if os.path.isdir(root):
|
||||||
for repo_root in os.listdir(root):
|
for repo_root in os.listdir(root):
|
||||||
@@ -2574,7 +2556,7 @@ def make_repo_path(root):
|
|||||||
if not os.path.isdir(repo_root):
|
if not os.path.isdir(repo_root):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
repo = spack.repo.from_path(repo_root)
|
repo = spack.repo.Repo(repo_root)
|
||||||
path.put_last(repo)
|
path.put_last(repo)
|
||||||
|
|
||||||
return path
|
return path
|
||||||
@@ -2775,11 +2757,10 @@ def from_lockfile(manifest_dir: Union[pathlib.Path, str]) -> "EnvironmentManifes
|
|||||||
manifest.flush()
|
manifest.flush()
|
||||||
return manifest
|
return manifest
|
||||||
|
|
||||||
def __init__(self, manifest_dir: Union[pathlib.Path, str], name: Optional[str] = None) -> None:
|
def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
|
||||||
self.manifest_dir = pathlib.Path(manifest_dir)
|
self.manifest_dir = pathlib.Path(manifest_dir)
|
||||||
self.name = name or str(manifest_dir)
|
|
||||||
self.manifest_file = self.manifest_dir / manifest_name
|
self.manifest_file = self.manifest_dir / manifest_name
|
||||||
self.scope_name = f"env:{self.name}"
|
self.scope_name = f"env:{environment_name(self.manifest_dir)}"
|
||||||
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
|
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
|
||||||
|
|
||||||
#: Configuration scopes associated with this environment. Note that these are not
|
#: Configuration scopes associated with this environment. Note that these are not
|
||||||
@@ -2791,8 +2772,12 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str], name: Optional[str] =
|
|||||||
raise SpackEnvironmentError(msg)
|
raise SpackEnvironmentError(msg)
|
||||||
|
|
||||||
with self.manifest_file.open() as f:
|
with self.manifest_file.open() as f:
|
||||||
self.yaml_content = _read_yaml(f)
|
raw, with_defaults_added = _read_yaml(f)
|
||||||
|
|
||||||
|
#: Pristine YAML content, without defaults being added
|
||||||
|
self.pristine_yaml_content = raw
|
||||||
|
#: YAML content with defaults added by Spack, if they're missing
|
||||||
|
self.yaml_content = with_defaults_added
|
||||||
self.changed = False
|
self.changed = False
|
||||||
|
|
||||||
def _all_matches(self, user_spec: str) -> List[str]:
|
def _all_matches(self, user_spec: str) -> List[str]:
|
||||||
@@ -2806,7 +2791,7 @@ def _all_matches(self, user_spec: str) -> List[str]:
|
|||||||
ValueError: if no equivalent match is found
|
ValueError: if no equivalent match is found
|
||||||
"""
|
"""
|
||||||
result = []
|
result = []
|
||||||
for yaml_spec_str in self.configuration["specs"]:
|
for yaml_spec_str in self.pristine_configuration["specs"]:
|
||||||
if Spec(yaml_spec_str) == Spec(user_spec):
|
if Spec(yaml_spec_str) == Spec(user_spec):
|
||||||
result.append(yaml_spec_str)
|
result.append(yaml_spec_str)
|
||||||
|
|
||||||
@@ -2821,6 +2806,7 @@ def add_user_spec(self, user_spec: str) -> None:
|
|||||||
Args:
|
Args:
|
||||||
user_spec: user spec to be appended
|
user_spec: user spec to be appended
|
||||||
"""
|
"""
|
||||||
|
self.pristine_configuration.setdefault("specs", []).append(user_spec)
|
||||||
self.configuration.setdefault("specs", []).append(user_spec)
|
self.configuration.setdefault("specs", []).append(user_spec)
|
||||||
self.changed = True
|
self.changed = True
|
||||||
|
|
||||||
@@ -2835,6 +2821,7 @@ def remove_user_spec(self, user_spec: str) -> None:
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
for key in self._all_matches(user_spec):
|
for key in self._all_matches(user_spec):
|
||||||
|
self.pristine_configuration["specs"].remove(key)
|
||||||
self.configuration["specs"].remove(key)
|
self.configuration["specs"].remove(key)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
|
msg = f"cannot remove {user_spec} from {self}, no such spec exists"
|
||||||
@@ -2852,6 +2839,7 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
|
|||||||
SpackEnvironmentError: when the user spec cannot be overridden
|
SpackEnvironmentError: when the user spec cannot be overridden
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
|
self.pristine_configuration["specs"][idx] = user_spec
|
||||||
self.configuration["specs"][idx] = user_spec
|
self.configuration["specs"][idx] = user_spec
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
msg = f"cannot override {user_spec} from {self}"
|
msg = f"cannot override {user_spec} from {self}"
|
||||||
@@ -2864,10 +2852,10 @@ def set_include_concrete(self, include_concrete: List[str]) -> None:
|
|||||||
Args:
|
Args:
|
||||||
include_concrete: list of already existing concrete environments to include
|
include_concrete: list of already existing concrete environments to include
|
||||||
"""
|
"""
|
||||||
self.configuration[included_concrete_name] = []
|
self.pristine_configuration[included_concrete_name] = []
|
||||||
|
|
||||||
for env_path in include_concrete:
|
for env_path in include_concrete:
|
||||||
self.configuration[included_concrete_name].append(env_path)
|
self.pristine_configuration[included_concrete_name].append(env_path)
|
||||||
|
|
||||||
self.changed = True
|
self.changed = True
|
||||||
|
|
||||||
@@ -2881,13 +2869,14 @@ def add_definition(self, user_spec: str, list_name: str) -> None:
|
|||||||
Raises:
|
Raises:
|
||||||
SpackEnvironmentError: is no valid definition exists already
|
SpackEnvironmentError: is no valid definition exists already
|
||||||
"""
|
"""
|
||||||
defs = self.configuration.get("definitions", [])
|
defs = self.pristine_configuration.get("definitions", [])
|
||||||
msg = f"cannot add {user_spec} to the '{list_name}' definition, no valid list exists"
|
msg = f"cannot add {user_spec} to the '{list_name}' definition, no valid list exists"
|
||||||
|
|
||||||
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
|
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
|
||||||
item[list_name].append(user_spec)
|
item[list_name].append(user_spec)
|
||||||
break
|
break
|
||||||
|
|
||||||
|
self.configuration["definitions"][idx][list_name].append(user_spec)
|
||||||
self.changed = True
|
self.changed = True
|
||||||
|
|
||||||
def remove_definition(self, user_spec: str, list_name: str) -> None:
|
def remove_definition(self, user_spec: str, list_name: str) -> None:
|
||||||
@@ -2901,7 +2890,7 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
|||||||
SpackEnvironmentError: if the user spec cannot be removed from the list,
|
SpackEnvironmentError: if the user spec cannot be removed from the list,
|
||||||
or the list does not exist
|
or the list does not exist
|
||||||
"""
|
"""
|
||||||
defs = self.configuration.get("definitions", [])
|
defs = self.pristine_configuration.get("definitions", [])
|
||||||
msg = (
|
msg = (
|
||||||
f"cannot remove {user_spec} from the '{list_name}' definition, "
|
f"cannot remove {user_spec} from the '{list_name}' definition, "
|
||||||
f"no valid list exists"
|
f"no valid list exists"
|
||||||
@@ -2914,6 +2903,7 @@ def remove_definition(self, user_spec: str, list_name: str) -> None:
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
self.configuration["definitions"][idx][list_name].remove(user_spec)
|
||||||
self.changed = True
|
self.changed = True
|
||||||
|
|
||||||
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
|
def override_definition(self, user_spec: str, *, override: str, list_name: str) -> None:
|
||||||
@@ -2928,7 +2918,7 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
|||||||
Raises:
|
Raises:
|
||||||
SpackEnvironmentError: if the user spec cannot be overridden
|
SpackEnvironmentError: if the user spec cannot be overridden
|
||||||
"""
|
"""
|
||||||
defs = self.configuration.get("definitions", [])
|
defs = self.pristine_configuration.get("definitions", [])
|
||||||
msg = f"cannot override {user_spec} with {override} in the '{list_name}' definition"
|
msg = f"cannot override {user_spec} with {override} in the '{list_name}' definition"
|
||||||
|
|
||||||
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
|
for idx, item in self._iterate_on_definitions(defs, list_name=list_name, err_msg=msg):
|
||||||
@@ -2939,6 +2929,7 @@ def override_definition(self, user_spec: str, *, override: str, list_name: str)
|
|||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
self.configuration["definitions"][idx][list_name][sub_index] = override
|
||||||
self.changed = True
|
self.changed = True
|
||||||
|
|
||||||
def _iterate_on_definitions(self, definitions, *, list_name, err_msg):
|
def _iterate_on_definitions(self, definitions, *, list_name, err_msg):
|
||||||
@@ -2970,6 +2961,7 @@ def set_default_view(self, view: Union[bool, str, pathlib.Path, Dict[str, str]])
|
|||||||
True the default view is used for the environment, if False there's no view.
|
True the default view is used for the environment, if False there's no view.
|
||||||
"""
|
"""
|
||||||
if isinstance(view, dict):
|
if isinstance(view, dict):
|
||||||
|
self.pristine_configuration["view"][default_view_name].update(view)
|
||||||
self.configuration["view"][default_view_name].update(view)
|
self.configuration["view"][default_view_name].update(view)
|
||||||
self.changed = True
|
self.changed = True
|
||||||
return
|
return
|
||||||
@@ -2977,13 +2969,15 @@ def set_default_view(self, view: Union[bool, str, pathlib.Path, Dict[str, str]])
|
|||||||
if not isinstance(view, bool):
|
if not isinstance(view, bool):
|
||||||
view = str(view)
|
view = str(view)
|
||||||
|
|
||||||
|
self.pristine_configuration["view"] = view
|
||||||
self.configuration["view"] = view
|
self.configuration["view"] = view
|
||||||
self.changed = True
|
self.changed = True
|
||||||
|
|
||||||
def remove_default_view(self) -> None:
|
def remove_default_view(self) -> None:
|
||||||
"""Removes the default view from the manifest file"""
|
"""Removes the default view from the manifest file"""
|
||||||
view_data = self.configuration.get("view")
|
view_data = self.pristine_configuration.get("view")
|
||||||
if isinstance(view_data, collections.abc.Mapping):
|
if isinstance(view_data, collections.abc.Mapping):
|
||||||
|
self.pristine_configuration["view"].pop(default_view_name)
|
||||||
self.configuration["view"].pop(default_view_name)
|
self.configuration["view"].pop(default_view_name)
|
||||||
self.changed = True
|
self.changed = True
|
||||||
return
|
return
|
||||||
@@ -2996,12 +2990,17 @@ def flush(self) -> None:
|
|||||||
return
|
return
|
||||||
|
|
||||||
with fs.write_tmp_and_move(os.path.realpath(self.manifest_file)) as f:
|
with fs.write_tmp_and_move(os.path.realpath(self.manifest_file)) as f:
|
||||||
_write_yaml(self.yaml_content, f)
|
_write_yaml(self.pristine_yaml_content, f)
|
||||||
self.changed = False
|
self.changed = False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def configuration(self):
|
def pristine_configuration(self):
|
||||||
"""Return the dictionaries in the pristine YAML, without the top level attribute"""
|
"""Return the dictionaries in the pristine YAML, without the top level attribute"""
|
||||||
|
return self.pristine_yaml_content[TOP_LEVEL_KEY]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def configuration(self):
|
||||||
|
"""Return the dictionaries in the YAML, without the top level attribute"""
|
||||||
return self.yaml_content[TOP_LEVEL_KEY]
|
return self.yaml_content[TOP_LEVEL_KEY]
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
@@ -3033,11 +3032,12 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
|||||||
SpackEnvironmentError: if the manifest includes a remote file but
|
SpackEnvironmentError: if the manifest includes a remote file but
|
||||||
no configuration stage directory has been identified
|
no configuration stage directory has been identified
|
||||||
"""
|
"""
|
||||||
scopes: List[spack.config.ConfigScope] = []
|
scopes = []
|
||||||
|
|
||||||
# load config scopes added via 'include:', in reverse so that
|
# load config scopes added via 'include:', in reverse so that
|
||||||
# highest-precedence scopes are last.
|
# highest-precedence scopes are last.
|
||||||
includes = self[TOP_LEVEL_KEY].get("include", [])
|
includes = self[TOP_LEVEL_KEY].get("include", [])
|
||||||
|
env_name = environment_name(self.manifest_dir)
|
||||||
missing = []
|
missing = []
|
||||||
for i, config_path in enumerate(reversed(includes)):
|
for i, config_path in enumerate(reversed(includes)):
|
||||||
# allow paths to contain spack config/environment variables, etc.
|
# allow paths to contain spack config/environment variables, etc.
|
||||||
@@ -3100,22 +3100,24 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
|||||||
|
|
||||||
if os.path.isdir(config_path):
|
if os.path.isdir(config_path):
|
||||||
# directories are treated as regular ConfigScopes
|
# directories are treated as regular ConfigScopes
|
||||||
config_name = f"env:{self.name}:{os.path.basename(config_path)}"
|
config_name = "env:%s:%s" % (env_name, os.path.basename(config_path))
|
||||||
tty.debug(f"Creating DirectoryConfigScope {config_name} for '{config_path}'")
|
tty.debug("Creating ConfigScope {0} for '{1}'".format(config_name, config_path))
|
||||||
scopes.append(spack.config.DirectoryConfigScope(config_name, config_path))
|
scope = spack.config.ConfigScope(config_name, config_path)
|
||||||
elif os.path.exists(config_path):
|
elif os.path.exists(config_path):
|
||||||
# files are assumed to be SingleFileScopes
|
# files are assumed to be SingleFileScopes
|
||||||
config_name = f"env:{self.name}:{config_path}"
|
config_name = "env:%s:%s" % (env_name, config_path)
|
||||||
tty.debug(f"Creating SingleFileScope {config_name} for '{config_path}'")
|
tty.debug(
|
||||||
scopes.append(
|
"Creating SingleFileScope {0} for '{1}'".format(config_name, config_path)
|
||||||
spack.config.SingleFileScope(
|
)
|
||||||
config_name, config_path, spack.schema.merged.schema
|
scope = spack.config.SingleFileScope(
|
||||||
)
|
config_name, config_path, spack.schema.merged.schema
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
missing.append(config_path)
|
missing.append(config_path)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
scopes.append(scope)
|
||||||
|
|
||||||
if missing:
|
if missing:
|
||||||
msg = "Detected {0} missing include path(s):".format(len(missing))
|
msg = "Detected {0} missing include path(s):".format(len(missing))
|
||||||
msg += "\n {0}".format("\n ".join(missing))
|
msg += "\n {0}".format("\n ".join(missing))
|
||||||
@@ -3132,10 +3134,7 @@ def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
|||||||
scopes: List[spack.config.ConfigScope] = [
|
scopes: List[spack.config.ConfigScope] = [
|
||||||
*self.included_config_scopes,
|
*self.included_config_scopes,
|
||||||
spack.config.SingleFileScope(
|
spack.config.SingleFileScope(
|
||||||
self.scope_name,
|
self.scope_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||||
str(self.manifest_file),
|
|
||||||
spack.schema.env.schema,
|
|
||||||
yaml_path=[TOP_LEVEL_KEY],
|
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
ensure_no_disallowed_env_config_mods(scopes)
|
ensure_no_disallowed_env_config_mods(scopes)
|
||||||
|
@@ -10,9 +10,8 @@
|
|||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
import sys
|
import sys
|
||||||
from typing import Callable, Dict, Optional
|
from typing import Optional
|
||||||
|
|
||||||
from llnl.string import comma_or
|
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
from llnl.util.filesystem import (
|
from llnl.util.filesystem import (
|
||||||
mkdirp,
|
mkdirp,
|
||||||
@@ -50,20 +49,19 @@
|
|||||||
_projections_path = ".spack/projections.yaml"
|
_projections_path = ".spack/projections.yaml"
|
||||||
|
|
||||||
|
|
||||||
LinkCallbackType = Callable[[str, str, "FilesystemView", Optional["spack.spec.Spec"]], None]
|
def view_symlink(src, dst, **kwargs):
|
||||||
|
# keyword arguments are irrelevant
|
||||||
|
# here to fit required call signature
|
||||||
def view_symlink(src: str, dst: str, *args, **kwargs) -> None:
|
|
||||||
symlink(src, dst)
|
symlink(src, dst)
|
||||||
|
|
||||||
|
|
||||||
def view_hardlink(src: str, dst: str, *args, **kwargs) -> None:
|
def view_hardlink(src, dst, **kwargs):
|
||||||
|
# keyword arguments are irrelevant
|
||||||
|
# here to fit required call signature
|
||||||
os.link(src, dst)
|
os.link(src, dst)
|
||||||
|
|
||||||
|
|
||||||
def view_copy(
|
def view_copy(src: str, dst: str, view, spec: Optional[spack.spec.Spec] = None):
|
||||||
src: str, dst: str, view: "FilesystemView", spec: Optional["spack.spec.Spec"] = None
|
|
||||||
) -> None:
|
|
||||||
"""
|
"""
|
||||||
Copy a file from src to dst.
|
Copy a file from src to dst.
|
||||||
|
|
||||||
@@ -106,40 +104,27 @@ def view_copy(
|
|||||||
tty.debug(f"Can't change the permissions for {dst}")
|
tty.debug(f"Can't change the permissions for {dst}")
|
||||||
|
|
||||||
|
|
||||||
#: supported string values for `link_type` in an env, mapped to canonical values
|
def view_func_parser(parsed_name):
|
||||||
_LINK_TYPES = {
|
# What method are we using for this view
|
||||||
"hardlink": "hardlink",
|
if parsed_name in ("hardlink", "hard"):
|
||||||
"hard": "hardlink",
|
|
||||||
"copy": "copy",
|
|
||||||
"relocate": "copy",
|
|
||||||
"add": "symlink",
|
|
||||||
"symlink": "symlink",
|
|
||||||
"soft": "symlink",
|
|
||||||
}
|
|
||||||
|
|
||||||
_VALID_LINK_TYPES = sorted(set(_LINK_TYPES.values()))
|
|
||||||
|
|
||||||
|
|
||||||
def canonicalize_link_type(link_type: str) -> str:
|
|
||||||
"""Return canonical"""
|
|
||||||
canonical = _LINK_TYPES.get(link_type)
|
|
||||||
if not canonical:
|
|
||||||
raise ValueError(
|
|
||||||
f"Invalid link type: '{link_type}. Must be one of {comma_or(_VALID_LINK_TYPES)}'"
|
|
||||||
)
|
|
||||||
return canonical
|
|
||||||
|
|
||||||
|
|
||||||
def function_for_link_type(link_type: str) -> LinkCallbackType:
|
|
||||||
link_type = canonicalize_link_type(link_type)
|
|
||||||
if link_type == "hardlink":
|
|
||||||
return view_hardlink
|
return view_hardlink
|
||||||
elif link_type == "symlink":
|
elif parsed_name in ("copy", "relocate"):
|
||||||
return view_symlink
|
|
||||||
elif link_type == "copy":
|
|
||||||
return view_copy
|
return view_copy
|
||||||
|
elif parsed_name in ("add", "symlink", "soft"):
|
||||||
|
return view_symlink
|
||||||
|
else:
|
||||||
|
raise ValueError(f"invalid link type for view: '{parsed_name}'")
|
||||||
|
|
||||||
assert False, "invalid link type" # need mypy Literal values
|
|
||||||
|
def inverse_view_func_parser(view_type):
|
||||||
|
# get string based on view type
|
||||||
|
if view_type is view_hardlink:
|
||||||
|
link_name = "hardlink"
|
||||||
|
elif view_type is view_copy:
|
||||||
|
link_name = "copy"
|
||||||
|
else:
|
||||||
|
link_name = "symlink"
|
||||||
|
return link_name
|
||||||
|
|
||||||
|
|
||||||
class FilesystemView:
|
class FilesystemView:
|
||||||
@@ -155,16 +140,7 @@ class FilesystemView:
|
|||||||
directory structure.
|
directory structure.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, root, layout, **kwargs):
|
||||||
self,
|
|
||||||
root: str,
|
|
||||||
layout: "spack.directory_layout.DirectoryLayout",
|
|
||||||
*,
|
|
||||||
projections: Optional[Dict] = None,
|
|
||||||
ignore_conflicts: bool = False,
|
|
||||||
verbose: bool = False,
|
|
||||||
link_type: str = "symlink",
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
Initialize a filesystem view under the given `root` directory with
|
Initialize a filesystem view under the given `root` directory with
|
||||||
corresponding directory `layout`.
|
corresponding directory `layout`.
|
||||||
@@ -173,14 +149,15 @@ def __init__(
|
|||||||
"""
|
"""
|
||||||
self._root = root
|
self._root = root
|
||||||
self.layout = layout
|
self.layout = layout
|
||||||
self.projections = {} if projections is None else projections
|
|
||||||
|
|
||||||
self.ignore_conflicts = ignore_conflicts
|
self.projections = kwargs.get("projections", {})
|
||||||
self.verbose = verbose
|
|
||||||
|
self.ignore_conflicts = kwargs.get("ignore_conflicts", False)
|
||||||
|
self.verbose = kwargs.get("verbose", False)
|
||||||
|
|
||||||
# Setup link function to include view
|
# Setup link function to include view
|
||||||
self.link_type = link_type
|
link_func = kwargs.get("link", view_symlink)
|
||||||
self.link = ft.partial(function_for_link_type(link_type), view=self)
|
self.link = ft.partial(link_func, view=self)
|
||||||
|
|
||||||
def add_specs(self, *specs, **kwargs):
|
def add_specs(self, *specs, **kwargs):
|
||||||
"""
|
"""
|
||||||
@@ -278,24 +255,8 @@ class YamlFilesystemView(FilesystemView):
|
|||||||
Filesystem view to work with a yaml based directory layout.
|
Filesystem view to work with a yaml based directory layout.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, root, layout, **kwargs):
|
||||||
self,
|
super().__init__(root, layout, **kwargs)
|
||||||
root: str,
|
|
||||||
layout: "spack.directory_layout.DirectoryLayout",
|
|
||||||
*,
|
|
||||||
projections: Optional[Dict] = None,
|
|
||||||
ignore_conflicts: bool = False,
|
|
||||||
verbose: bool = False,
|
|
||||||
link_type: str = "symlink",
|
|
||||||
):
|
|
||||||
super().__init__(
|
|
||||||
root,
|
|
||||||
layout,
|
|
||||||
projections=projections,
|
|
||||||
ignore_conflicts=ignore_conflicts,
|
|
||||||
verbose=verbose,
|
|
||||||
link_type=link_type,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Super class gets projections from the kwargs
|
# Super class gets projections from the kwargs
|
||||||
# YAML specific to get projections from YAML file
|
# YAML specific to get projections from YAML file
|
||||||
@@ -677,6 +638,9 @@ class SimpleFilesystemView(FilesystemView):
|
|||||||
"""A simple and partial implementation of FilesystemView focused on performance and immutable
|
"""A simple and partial implementation of FilesystemView focused on performance and immutable
|
||||||
views, where specs cannot be removed after they were added."""
|
views, where specs cannot be removed after they were added."""
|
||||||
|
|
||||||
|
def __init__(self, root, layout, **kwargs):
|
||||||
|
super().__init__(root, layout, **kwargs)
|
||||||
|
|
||||||
def _sanity_check_view_projection(self, specs):
|
def _sanity_check_view_projection(self, specs):
|
||||||
"""A very common issue is that we end up with two specs of the same package, that project
|
"""A very common issue is that we end up with two specs of the same package, that project
|
||||||
to the same prefix. We want to catch that as early as possible and give a sensible error to
|
to the same prefix. We want to catch that as early as possible and give a sensible error to
|
||||||
|
@@ -582,7 +582,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
|||||||
|
|
||||||
# Create a source repo and get the pkg directory out of it.
|
# Create a source repo and get the pkg directory out of it.
|
||||||
try:
|
try:
|
||||||
source_repo = spack.repo.from_path(source_repo_root)
|
source_repo = spack.repo.Repo(source_repo_root)
|
||||||
source_pkg_dir = source_repo.dirname_for_package_name(node.name)
|
source_pkg_dir = source_repo.dirname_for_package_name(node.name)
|
||||||
except spack.repo.RepoError as err:
|
except spack.repo.RepoError as err:
|
||||||
tty.debug(f"Failed to create source repo for {node.name}: {str(err)}")
|
tty.debug(f"Failed to create source repo for {node.name}: {str(err)}")
|
||||||
@@ -593,7 +593,7 @@ def dump_packages(spec: "spack.spec.Spec", path: str) -> None:
|
|||||||
dest_repo_root = os.path.join(path, node.namespace)
|
dest_repo_root = os.path.join(path, node.namespace)
|
||||||
if not os.path.exists(dest_repo_root):
|
if not os.path.exists(dest_repo_root):
|
||||||
spack.repo.create_repo(dest_repo_root)
|
spack.repo.create_repo(dest_repo_root)
|
||||||
repo = spack.repo.from_path(dest_repo_root)
|
repo = spack.repo.Repo(dest_repo_root)
|
||||||
|
|
||||||
# Get the location of the package in the dest repo.
|
# Get the location of the package in the dest repo.
|
||||||
dest_pkg_dir = repo.dirname_for_package_name(node.name)
|
dest_pkg_dir = repo.dirname_for_package_name(node.name)
|
||||||
@@ -1542,6 +1542,17 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
|||||||
tty.warn(f"Installation request refused: {str(err)}")
|
tty.warn(f"Installation request refused: {str(err)}")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Skip out early if the spec is not being installed locally (i.e., if
|
||||||
|
# external or upstream).
|
||||||
|
#
|
||||||
|
# External and upstream packages need to get flagged as installed to
|
||||||
|
# ensure proper status tracking for environment build.
|
||||||
|
explicit = request.pkg.spec.dag_hash() in request.install_args.get("explicit", [])
|
||||||
|
not_local = _handle_external_and_upstream(request.pkg, explicit)
|
||||||
|
if not_local:
|
||||||
|
self._flag_installed(request.pkg)
|
||||||
|
return
|
||||||
|
|
||||||
install_compilers = spack.config.get("config:install_missing_compilers", False)
|
install_compilers = spack.config.get("config:install_missing_compilers", False)
|
||||||
|
|
||||||
install_deps = request.install_args.get("install_deps")
|
install_deps = request.install_args.get("install_deps")
|
||||||
@@ -2018,10 +2029,11 @@ def install(self) -> None:
|
|||||||
# Skip the installation if the spec is not being installed locally
|
# Skip the installation if the spec is not being installed locally
|
||||||
# (i.e., if external or upstream) BUT flag it as installed since
|
# (i.e., if external or upstream) BUT flag it as installed since
|
||||||
# some package likely depends on it.
|
# some package likely depends on it.
|
||||||
if _handle_external_and_upstream(pkg, task.explicit):
|
if not task.explicit:
|
||||||
term_status.clear()
|
if _handle_external_and_upstream(pkg, False):
|
||||||
self._flag_installed(pkg, task.dependents)
|
term_status.clear()
|
||||||
continue
|
self._flag_installed(pkg, task.dependents)
|
||||||
|
continue
|
||||||
|
|
||||||
# Flag a failed spec. Do not need an (install) prefix lock since
|
# Flag a failed spec. Do not need an (install) prefix lock since
|
||||||
# assume using a separate (failed) prefix lock file.
|
# assume using a separate (failed) prefix lock file.
|
||||||
|
@@ -444,9 +444,8 @@ def make_argument_parser(**kwargs):
|
|||||||
"--config-scope",
|
"--config-scope",
|
||||||
dest="config_scopes",
|
dest="config_scopes",
|
||||||
action="append",
|
action="append",
|
||||||
metavar="DIR|ENV",
|
metavar="DIR",
|
||||||
help="add directory or environment as read-only configuration scope, without activating "
|
help="add a custom configuration scope",
|
||||||
"the environment.",
|
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-d",
|
"-d",
|
||||||
|
@@ -143,7 +143,6 @@ def __init__(self):
|
|||||||
"12": "monterey",
|
"12": "monterey",
|
||||||
"13": "ventura",
|
"13": "ventura",
|
||||||
"14": "sonoma",
|
"14": "sonoma",
|
||||||
"15": "sequoia",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
version = macos_version()
|
version = macos_version()
|
||||||
|
@@ -35,7 +35,6 @@
|
|||||||
|
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.dependency
|
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.directives
|
import spack.directives
|
||||||
import spack.directory_layout
|
import spack.directory_layout
|
||||||
@@ -200,10 +199,10 @@ def __init__(cls, name, bases, attr_dict):
|
|||||||
# assumed to be detectable
|
# assumed to be detectable
|
||||||
if hasattr(cls, "executables") or hasattr(cls, "libraries"):
|
if hasattr(cls, "executables") or hasattr(cls, "libraries"):
|
||||||
# Append a tag to each detectable package, so that finding them is faster
|
# Append a tag to each detectable package, so that finding them is faster
|
||||||
if not hasattr(cls, "tags"):
|
if hasattr(cls, "tags"):
|
||||||
|
getattr(cls, "tags").append(DetectablePackageMeta.TAG)
|
||||||
|
else:
|
||||||
setattr(cls, "tags", [DetectablePackageMeta.TAG])
|
setattr(cls, "tags", [DetectablePackageMeta.TAG])
|
||||||
elif DetectablePackageMeta.TAG not in cls.tags:
|
|
||||||
cls.tags.append(DetectablePackageMeta.TAG)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def platform_executables(cls):
|
def platform_executables(cls):
|
||||||
@@ -622,6 +621,10 @@ class PackageBase(WindowsRPath, PackageViewMixin, RedistributionMixin, metaclass
|
|||||||
#: By default do not run tests within package's install()
|
#: By default do not run tests within package's install()
|
||||||
run_tests = False
|
run_tests = False
|
||||||
|
|
||||||
|
#: Keep -Werror flags, matches config:flags:keep_werror to override config
|
||||||
|
# NOTE: should be type Optional[Literal['all', 'specific', 'none']] in 3.8+
|
||||||
|
keep_werror: Optional[str] = None
|
||||||
|
|
||||||
#: Most packages are NOT extendable. Set to True if you want extensions.
|
#: Most packages are NOT extendable. Set to True if you want extensions.
|
||||||
extendable = False
|
extendable = False
|
||||||
|
|
||||||
@@ -749,6 +752,11 @@ def __init__(self, spec):
|
|||||||
self._fetch_time = 0.0
|
self._fetch_time = 0.0
|
||||||
|
|
||||||
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
self.win_rpath = fsys.WindowsSimulatedRPath(self)
|
||||||
|
|
||||||
|
if self.is_extension:
|
||||||
|
pkg_cls = spack.repo.PATH.get_pkg_class(self.extendee_spec.name)
|
||||||
|
pkg_cls(self.extendee_spec)._check_extendable()
|
||||||
|
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -922,32 +930,6 @@ def global_license_file(self):
|
|||||||
self.global_license_dir, self.name, os.path.basename(self.license_files[0])
|
self.global_license_dir, self.name, os.path.basename(self.license_files[0])
|
||||||
)
|
)
|
||||||
|
|
||||||
# NOTE: return type should be Optional[Literal['all', 'specific', 'none']] in
|
|
||||||
# Python 3.8+, but we still support 3.6.
|
|
||||||
@property
|
|
||||||
def keep_werror(self) -> Optional[str]:
|
|
||||||
"""Keep ``-Werror`` flags, matches ``config:flags:keep_werror`` to override config.
|
|
||||||
|
|
||||||
Valid return values are:
|
|
||||||
* ``"all"``: keep all ``-Werror`` flags.
|
|
||||||
* ``"specific"``: keep only ``-Werror=specific-warning`` flags.
|
|
||||||
* ``"none"``: filter out all ``-Werror*`` flags.
|
|
||||||
* ``None``: respect the user's configuration (``"none"`` by default).
|
|
||||||
"""
|
|
||||||
if self.spec.satisfies("%nvhpc@:23.3") or self.spec.satisfies("%pgi"):
|
|
||||||
# Filtering works by replacing -Werror with -Wno-error, but older nvhpc and
|
|
||||||
# PGI do not understand -Wno-error, so we disable filtering.
|
|
||||||
return "all"
|
|
||||||
|
|
||||||
elif self.spec.satisfies("%nvhpc@23.4:"):
|
|
||||||
# newer nvhpc supports -Wno-error but can't disable specific warnings with
|
|
||||||
# -Wno-error=warning. Skip -Werror=warning, but still filter -Werror.
|
|
||||||
return "specific"
|
|
||||||
|
|
||||||
else:
|
|
||||||
# use -Werror disablement by default for other compilers
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def version(self):
|
||||||
if not self.spec.versions.concrete:
|
if not self.spec.versions.concrete:
|
||||||
@@ -1137,9 +1119,10 @@ def _make_stage(self):
|
|||||||
if not link_format:
|
if not link_format:
|
||||||
link_format = "build-{arch}-{hash:7}"
|
link_format = "build-{arch}-{hash:7}"
|
||||||
stage_link = self.spec.format_path(link_format)
|
stage_link = self.spec.format_path(link_format)
|
||||||
source_stage = DevelopStage(compute_stage_name(self.spec), dev_path, stage_link)
|
return DevelopStage(compute_stage_name(self.spec), dev_path, stage_link)
|
||||||
else:
|
|
||||||
source_stage = self._make_root_stage(self.fetcher)
|
# To fetch the current version
|
||||||
|
source_stage = self._make_root_stage(self.fetcher)
|
||||||
|
|
||||||
# all_stages is source + resources + patches
|
# all_stages is source + resources + patches
|
||||||
all_stages = StageComposite()
|
all_stages = StageComposite()
|
||||||
@@ -1468,8 +1451,10 @@ def do_fetch(self, mirror_only=False):
|
|||||||
return
|
return
|
||||||
|
|
||||||
checksum = spack.config.get("config:checksum")
|
checksum = spack.config.get("config:checksum")
|
||||||
|
fetch = self.stage.needs_fetching
|
||||||
if (
|
if (
|
||||||
checksum
|
checksum
|
||||||
|
and fetch
|
||||||
and (self.version not in self.versions)
|
and (self.version not in self.versions)
|
||||||
and (not isinstance(self.version, GitVersion))
|
and (not isinstance(self.version, GitVersion))
|
||||||
):
|
):
|
||||||
@@ -1576,11 +1561,13 @@ def do_patch(self):
|
|||||||
tty.debug("Patching failed last time. Restaging.")
|
tty.debug("Patching failed last time. Restaging.")
|
||||||
self.stage.restage()
|
self.stage.restage()
|
||||||
else:
|
else:
|
||||||
# develop specs may have patch failures but should never be restaged
|
# develop specs/ DIYStages may have patch failures but
|
||||||
tty.warn(
|
# should never be restaged
|
||||||
f"A patch failure was detected in {self.name}."
|
msg = (
|
||||||
" Build errors may occur due to this."
|
"A patch failure was detected in %s." % self.name
|
||||||
|
+ " Build errors may occur due to this."
|
||||||
)
|
)
|
||||||
|
tty.warn(msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
# If this file exists, then we already applied all the patches.
|
# If this file exists, then we already applied all the patches.
|
||||||
@@ -2384,6 +2371,10 @@ def do_deprecate(self, deprecator, link_fn):
|
|||||||
PackageBase.uninstall_by_spec(spec, force=True, deprecator=deprecator)
|
PackageBase.uninstall_by_spec(spec, force=True, deprecator=deprecator)
|
||||||
link_fn(deprecator.prefix, spec.prefix)
|
link_fn(deprecator.prefix, spec.prefix)
|
||||||
|
|
||||||
|
def _check_extendable(self):
|
||||||
|
if not self.extendable:
|
||||||
|
raise ValueError("Package %s is not extendable!" % self.name)
|
||||||
|
|
||||||
def view(self):
|
def view(self):
|
||||||
"""Create a view with the prefix of this package as the root.
|
"""Create a view with the prefix of this package as the root.
|
||||||
Extensions added to this view will modify the installation prefix of
|
Extensions added to this view will modify the installation prefix of
|
||||||
|
@@ -9,7 +9,7 @@
|
|||||||
import os.path
|
import os.path
|
||||||
import pathlib
|
import pathlib
|
||||||
import sys
|
import sys
|
||||||
from typing import Any, Dict, Optional, Tuple, Type, Union
|
from typing import Any, Dict, Optional, Tuple, Type
|
||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
from llnl.url import allowed_archive
|
from llnl.url import allowed_archive
|
||||||
@@ -65,9 +65,6 @@ def apply_patch(
|
|||||||
patch(*args)
|
patch(*args)
|
||||||
|
|
||||||
|
|
||||||
PatchPackageType = Union["spack.package_base.PackageBase", Type["spack.package_base.PackageBase"]]
|
|
||||||
|
|
||||||
|
|
||||||
class Patch:
|
class Patch:
|
||||||
"""Base class for patches.
|
"""Base class for patches.
|
||||||
|
|
||||||
@@ -80,7 +77,7 @@ class Patch:
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
pkg: PatchPackageType,
|
pkg: "spack.package_base.PackageBase",
|
||||||
path_or_url: str,
|
path_or_url: str,
|
||||||
level: int,
|
level: int,
|
||||||
working_dir: str,
|
working_dir: str,
|
||||||
@@ -162,7 +159,7 @@ class FilePatch(Patch):
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
pkg: PatchPackageType,
|
pkg: "spack.package_base.PackageBase",
|
||||||
relative_path: str,
|
relative_path: str,
|
||||||
level: int,
|
level: int,
|
||||||
working_dir: str,
|
working_dir: str,
|
||||||
@@ -186,7 +183,7 @@ def __init__(
|
|||||||
abs_path: Optional[str] = None
|
abs_path: Optional[str] = None
|
||||||
# At different times we call FilePatch on instances and classes
|
# At different times we call FilePatch on instances and classes
|
||||||
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
|
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
|
||||||
for cls in inspect.getmro(pkg_cls): # type: ignore
|
for cls in inspect.getmro(pkg_cls):
|
||||||
if not hasattr(cls, "module"):
|
if not hasattr(cls, "module"):
|
||||||
# We've gone too far up the MRO
|
# We've gone too far up the MRO
|
||||||
break
|
break
|
||||||
@@ -245,7 +242,7 @@ class UrlPatch(Patch):
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
pkg: PatchPackageType,
|
pkg: "spack.package_base.PackageBase",
|
||||||
url: str,
|
url: str,
|
||||||
level: int = 1,
|
level: int = 1,
|
||||||
*,
|
*,
|
||||||
@@ -364,9 +361,8 @@ def from_dict(
|
|||||||
"""
|
"""
|
||||||
repository = repository or spack.repo.PATH
|
repository = repository or spack.repo.PATH
|
||||||
owner = dictionary.get("owner")
|
owner = dictionary.get("owner")
|
||||||
if owner is None:
|
if "owner" not in dictionary:
|
||||||
raise ValueError(f"Invalid patch dictionary: {dictionary}")
|
raise ValueError("Invalid patch dictionary: %s" % dictionary)
|
||||||
assert isinstance(owner, str)
|
|
||||||
pkg_cls = repository.get_pkg_class(owner)
|
pkg_cls = repository.get_pkg_class(owner)
|
||||||
|
|
||||||
if "url" in dictionary:
|
if "url" in dictionary:
|
||||||
|
@@ -25,8 +25,7 @@
|
|||||||
import traceback
|
import traceback
|
||||||
import types
|
import types
|
||||||
import uuid
|
import uuid
|
||||||
import warnings
|
from typing import Any, Dict, List, Set, Tuple, Union
|
||||||
from typing import Any, Dict, Generator, List, Optional, Set, Tuple, Type, Union
|
|
||||||
|
|
||||||
import llnl.path
|
import llnl.path
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
@@ -127,35 +126,11 @@ def exec_module(self, module):
|
|||||||
|
|
||||||
|
|
||||||
class ReposFinder:
|
class ReposFinder:
|
||||||
"""MetaPathFinder class that loads a Python module corresponding to a Spack package.
|
"""MetaPathFinder class that loads a Python module corresponding to a Spack package
|
||||||
|
|
||||||
Returns a loader based on the inspection of the current repository list.
|
Return a loader based on the inspection of the current global repository list.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self._repo_init = _path
|
|
||||||
self._repo = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def current_repository(self):
|
|
||||||
if self._repo is None:
|
|
||||||
self._repo = self._repo_init()
|
|
||||||
return self._repo
|
|
||||||
|
|
||||||
@current_repository.setter
|
|
||||||
def current_repository(self, value):
|
|
||||||
self._repo = value
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def switch_repo(self, substitute: "RepoType"):
|
|
||||||
"""Switch the current repository list for the duration of the context manager."""
|
|
||||||
old = self.current_repository
|
|
||||||
try:
|
|
||||||
self.current_repository = substitute
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
self.current_repository = old
|
|
||||||
|
|
||||||
def find_spec(self, fullname, python_path, target=None):
|
def find_spec(self, fullname, python_path, target=None):
|
||||||
# "target" is not None only when calling importlib.reload()
|
# "target" is not None only when calling importlib.reload()
|
||||||
if target is not None:
|
if target is not None:
|
||||||
@@ -174,14 +149,9 @@ def compute_loader(self, fullname):
|
|||||||
# namespaces are added to repo, and package modules are leaves.
|
# namespaces are added to repo, and package modules are leaves.
|
||||||
namespace, dot, module_name = fullname.rpartition(".")
|
namespace, dot, module_name = fullname.rpartition(".")
|
||||||
|
|
||||||
# If it's a module in some repo, or if it is the repo's namespace, let the repo handle it.
|
# If it's a module in some repo, or if it is the repo's
|
||||||
is_repo_path = isinstance(self.current_repository, RepoPath)
|
# namespace, let the repo handle it.
|
||||||
if is_repo_path:
|
for repo in PATH.repos:
|
||||||
repos = self.current_repository.repos
|
|
||||||
else:
|
|
||||||
repos = [self.current_repository]
|
|
||||||
|
|
||||||
for repo in repos:
|
|
||||||
# We are using the namespace of the repo and the repo contains the package
|
# We are using the namespace of the repo and the repo contains the package
|
||||||
if namespace == repo.full_namespace:
|
if namespace == repo.full_namespace:
|
||||||
# With 2 nested conditionals we can call "repo.real_name" only once
|
# With 2 nested conditionals we can call "repo.real_name" only once
|
||||||
@@ -195,7 +165,7 @@ def compute_loader(self, fullname):
|
|||||||
|
|
||||||
# No repo provides the namespace, but it is a valid prefix of
|
# No repo provides the namespace, but it is a valid prefix of
|
||||||
# something in the RepoPath.
|
# something in the RepoPath.
|
||||||
if is_repo_path and self.current_repository.by_namespace.is_prefix(fullname):
|
if PATH.by_namespace.is_prefix(fullname):
|
||||||
return SpackNamespaceLoader()
|
return SpackNamespaceLoader()
|
||||||
|
|
||||||
return None
|
return None
|
||||||
@@ -590,7 +560,7 @@ def __init__(
|
|||||||
self,
|
self,
|
||||||
package_checker: FastPackageChecker,
|
package_checker: FastPackageChecker,
|
||||||
namespace: str,
|
namespace: str,
|
||||||
cache: "spack.caches.FileCacheType",
|
cache: spack.util.file_cache.FileCache,
|
||||||
):
|
):
|
||||||
self.checker = package_checker
|
self.checker = package_checker
|
||||||
self.packages_path = self.checker.packages_path
|
self.packages_path = self.checker.packages_path
|
||||||
@@ -675,39 +645,33 @@ class RepoPath:
|
|||||||
repository.
|
repository.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
repos: list Repo objects or paths to put in this RepoPath
|
repos (list): list Repo objects or paths to put in this RepoPath
|
||||||
cache: file cache associated with this repository
|
|
||||||
overrides: dict mapping package name to class attribute overrides for that package
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, *repos, **kwargs):
|
||||||
self,
|
cache = kwargs.get("cache", spack.caches.MISC_CACHE)
|
||||||
*repos: Union[str, "Repo"],
|
self.repos = []
|
||||||
cache: "spack.caches.FileCacheType",
|
|
||||||
overrides: Optional[Dict[str, Any]] = None,
|
|
||||||
) -> None:
|
|
||||||
self.repos: List[Repo] = []
|
|
||||||
self.by_namespace = nm.NamespaceTrie()
|
self.by_namespace = nm.NamespaceTrie()
|
||||||
self._provider_index: Optional[spack.provider_index.ProviderIndex] = None
|
|
||||||
self._patch_index: Optional[spack.patch.PatchCache] = None
|
self._provider_index = None
|
||||||
self._tag_index: Optional[spack.tag.TagIndex] = None
|
self._patch_index = None
|
||||||
|
self._tag_index = None
|
||||||
|
|
||||||
# Add each repo to this path.
|
# Add each repo to this path.
|
||||||
for repo in repos:
|
for repo in repos:
|
||||||
try:
|
try:
|
||||||
if isinstance(repo, str):
|
if isinstance(repo, str):
|
||||||
repo = Repo(repo, cache=cache, overrides=overrides)
|
repo = Repo(repo, cache=cache)
|
||||||
repo.finder(self)
|
|
||||||
self.put_last(repo)
|
self.put_last(repo)
|
||||||
except RepoError as e:
|
except RepoError as e:
|
||||||
tty.warn(
|
tty.warn(
|
||||||
f"Failed to initialize repository: '{repo}'.",
|
"Failed to initialize repository: '%s'." % repo,
|
||||||
e.message,
|
e.message,
|
||||||
"To remove the bad repository, run this command:",
|
"To remove the bad repository, run this command:",
|
||||||
f" spack repo rm {repo}",
|
" spack repo rm %s" % repo,
|
||||||
)
|
)
|
||||||
|
|
||||||
def put_first(self, repo: "Repo") -> None:
|
def put_first(self, repo):
|
||||||
"""Add repo first in the search path."""
|
"""Add repo first in the search path."""
|
||||||
if isinstance(repo, RepoPath):
|
if isinstance(repo, RepoPath):
|
||||||
for r in reversed(repo.repos):
|
for r in reversed(repo.repos):
|
||||||
@@ -735,34 +699,50 @@ def remove(self, repo):
|
|||||||
if repo in self.repos:
|
if repo in self.repos:
|
||||||
self.repos.remove(repo)
|
self.repos.remove(repo)
|
||||||
|
|
||||||
def get_repo(self, namespace: str) -> "Repo":
|
def get_repo(self, namespace, default=NOT_PROVIDED):
|
||||||
"""Get a repository by namespace."""
|
"""Get a repository by namespace.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
namespace:
|
||||||
|
|
||||||
|
Look up this namespace in the RepoPath, and return it if found.
|
||||||
|
|
||||||
|
Optional Arguments:
|
||||||
|
|
||||||
|
default:
|
||||||
|
|
||||||
|
If default is provided, return it when the namespace
|
||||||
|
isn't found. If not, raise an UnknownNamespaceError.
|
||||||
|
"""
|
||||||
full_namespace = python_package_for_repo(namespace)
|
full_namespace = python_package_for_repo(namespace)
|
||||||
if full_namespace not in self.by_namespace:
|
if full_namespace not in self.by_namespace:
|
||||||
raise UnknownNamespaceError(namespace)
|
if default == NOT_PROVIDED:
|
||||||
|
raise UnknownNamespaceError(namespace)
|
||||||
|
return default
|
||||||
return self.by_namespace[full_namespace]
|
return self.by_namespace[full_namespace]
|
||||||
|
|
||||||
def first_repo(self) -> Optional["Repo"]:
|
def first_repo(self):
|
||||||
"""Get the first repo in precedence order."""
|
"""Get the first repo in precedence order."""
|
||||||
return self.repos[0] if self.repos else None
|
return self.repos[0] if self.repos else None
|
||||||
|
|
||||||
@llnl.util.lang.memoized
|
@llnl.util.lang.memoized
|
||||||
def _all_package_names_set(self, include_virtuals) -> Set[str]:
|
def _all_package_names_set(self, include_virtuals):
|
||||||
return {name for repo in self.repos for name in repo.all_package_names(include_virtuals)}
|
return {name for repo in self.repos for name in repo.all_package_names(include_virtuals)}
|
||||||
|
|
||||||
@llnl.util.lang.memoized
|
@llnl.util.lang.memoized
|
||||||
def _all_package_names(self, include_virtuals: bool) -> List[str]:
|
def _all_package_names(self, include_virtuals):
|
||||||
"""Return all unique package names in all repositories."""
|
"""Return all unique package names in all repositories."""
|
||||||
return sorted(self._all_package_names_set(include_virtuals), key=lambda n: n.lower())
|
return sorted(self._all_package_names_set(include_virtuals), key=lambda n: n.lower())
|
||||||
|
|
||||||
def all_package_names(self, include_virtuals: bool = False) -> List[str]:
|
def all_package_names(self, include_virtuals=False):
|
||||||
return self._all_package_names(include_virtuals)
|
return self._all_package_names(include_virtuals)
|
||||||
|
|
||||||
def package_path(self, name: str) -> str:
|
def package_path(self, name):
|
||||||
"""Get path to package.py file for this repo."""
|
"""Get path to package.py file for this repo."""
|
||||||
return self.repo_for_pkg(name).package_path(name)
|
return self.repo_for_pkg(name).package_path(name)
|
||||||
|
|
||||||
def all_package_paths(self) -> Generator[str, None, None]:
|
def all_package_paths(self):
|
||||||
for name in self.all_package_names():
|
for name in self.all_package_names():
|
||||||
yield self.package_path(name)
|
yield self.package_path(name)
|
||||||
|
|
||||||
@@ -778,52 +758,53 @@ def packages_with_tags(self, *tags: str, full: bool = False) -> Set[str]:
|
|||||||
for pkg in repo.packages_with_tags(*tags)
|
for pkg in repo.packages_with_tags(*tags)
|
||||||
}
|
}
|
||||||
|
|
||||||
def all_package_classes(self) -> Generator[Type["spack.package_base.PackageBase"], None, None]:
|
def all_package_classes(self):
|
||||||
for name in self.all_package_names():
|
for name in self.all_package_names():
|
||||||
yield self.get_pkg_class(name)
|
yield self.get_pkg_class(name)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def provider_index(self) -> spack.provider_index.ProviderIndex:
|
def provider_index(self):
|
||||||
"""Merged ProviderIndex from all Repos in the RepoPath."""
|
"""Merged ProviderIndex from all Repos in the RepoPath."""
|
||||||
if self._provider_index is None:
|
if self._provider_index is None:
|
||||||
self._provider_index = spack.provider_index.ProviderIndex(repository=self)
|
self._provider_index = spack.provider_index.ProviderIndex(repository=self)
|
||||||
for repo in reversed(self.repos):
|
for repo in reversed(self.repos):
|
||||||
self._provider_index.merge(repo.provider_index)
|
self._provider_index.merge(repo.provider_index)
|
||||||
|
|
||||||
return self._provider_index
|
return self._provider_index
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def tag_index(self) -> spack.tag.TagIndex:
|
def tag_index(self):
|
||||||
"""Merged TagIndex from all Repos in the RepoPath."""
|
"""Merged TagIndex from all Repos in the RepoPath."""
|
||||||
if self._tag_index is None:
|
if self._tag_index is None:
|
||||||
self._tag_index = spack.tag.TagIndex(repository=self)
|
self._tag_index = spack.tag.TagIndex(repository=self)
|
||||||
for repo in reversed(self.repos):
|
for repo in reversed(self.repos):
|
||||||
self._tag_index.merge(repo.tag_index)
|
self._tag_index.merge(repo.tag_index)
|
||||||
|
|
||||||
return self._tag_index
|
return self._tag_index
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def patch_index(self) -> spack.patch.PatchCache:
|
def patch_index(self):
|
||||||
"""Merged PatchIndex from all Repos in the RepoPath."""
|
"""Merged PatchIndex from all Repos in the RepoPath."""
|
||||||
if self._patch_index is None:
|
if self._patch_index is None:
|
||||||
self._patch_index = spack.patch.PatchCache(repository=self)
|
self._patch_index = spack.patch.PatchCache(repository=self)
|
||||||
for repo in reversed(self.repos):
|
for repo in reversed(self.repos):
|
||||||
self._patch_index.update(repo.patch_index)
|
self._patch_index.update(repo.patch_index)
|
||||||
|
|
||||||
return self._patch_index
|
return self._patch_index
|
||||||
|
|
||||||
@autospec
|
@autospec
|
||||||
def providers_for(self, virtual_spec: "spack.spec.Spec") -> List["spack.spec.Spec"]:
|
def providers_for(self, vpkg_spec):
|
||||||
providers = [
|
providers = [
|
||||||
spec
|
spec
|
||||||
for spec in self.provider_index.providers_for(virtual_spec)
|
for spec in self.provider_index.providers_for(vpkg_spec)
|
||||||
if spec.name in self._all_package_names_set(include_virtuals=False)
|
if spec.name in self._all_package_names_set(include_virtuals=False)
|
||||||
]
|
]
|
||||||
if not providers:
|
if not providers:
|
||||||
raise UnknownPackageError(virtual_spec.fullname)
|
raise UnknownPackageError(vpkg_spec.fullname)
|
||||||
return providers
|
return providers
|
||||||
|
|
||||||
@autospec
|
@autospec
|
||||||
def extensions_for(
|
def extensions_for(self, extendee_spec):
|
||||||
self, extendee_spec: "spack.spec.Spec"
|
|
||||||
) -> List["spack.package_base.PackageBase"]:
|
|
||||||
return [
|
return [
|
||||||
pkg_cls(spack.spec.Spec(pkg_cls.name))
|
pkg_cls(spack.spec.Spec(pkg_cls.name))
|
||||||
for pkg_cls in self.all_package_classes()
|
for pkg_cls in self.all_package_classes()
|
||||||
@@ -834,7 +815,7 @@ def last_mtime(self):
|
|||||||
"""Time a package file in this repo was last updated."""
|
"""Time a package file in this repo was last updated."""
|
||||||
return max(repo.last_mtime() for repo in self.repos)
|
return max(repo.last_mtime() for repo in self.repos)
|
||||||
|
|
||||||
def repo_for_pkg(self, spec: Union[str, "spack.spec.Spec"]) -> "Repo":
|
def repo_for_pkg(self, spec):
|
||||||
"""Given a spec, get the repository for its package."""
|
"""Given a spec, get the repository for its package."""
|
||||||
# We don't @_autospec this function b/c it's called very frequently
|
# We don't @_autospec this function b/c it's called very frequently
|
||||||
# and we want to avoid parsing str's into Specs unnecessarily.
|
# and we want to avoid parsing str's into Specs unnecessarily.
|
||||||
@@ -859,20 +840,17 @@ def repo_for_pkg(self, spec: Union[str, "spack.spec.Spec"]) -> "Repo":
|
|||||||
return repo
|
return repo
|
||||||
|
|
||||||
# If the package isn't in any repo, return the one with
|
# If the package isn't in any repo, return the one with
|
||||||
# highest precedence. This is for commands like `spack edit`
|
# highest precedence. This is for commands like `spack edit`
|
||||||
# that can operate on packages that don't exist yet.
|
# that can operate on packages that don't exist yet.
|
||||||
selected = self.first_repo()
|
return self.first_repo()
|
||||||
if selected is None:
|
|
||||||
raise UnknownPackageError(name)
|
|
||||||
return selected
|
|
||||||
|
|
||||||
def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase":
|
def get(self, spec):
|
||||||
"""Returns the package associated with the supplied spec."""
|
"""Returns the package associated with the supplied spec."""
|
||||||
msg = "RepoPath.get can only be called on concrete specs"
|
msg = "RepoPath.get can only be called on concrete specs"
|
||||||
assert isinstance(spec, spack.spec.Spec) and spec.concrete, msg
|
assert isinstance(spec, spack.spec.Spec) and spec.concrete, msg
|
||||||
return self.repo_for_pkg(spec).get(spec)
|
return self.repo_for_pkg(spec).get(spec)
|
||||||
|
|
||||||
def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]:
|
def get_pkg_class(self, pkg_name):
|
||||||
"""Find a class for the spec's package and return the class object."""
|
"""Find a class for the spec's package and return the class object."""
|
||||||
return self.repo_for_pkg(pkg_name).get_pkg_class(pkg_name)
|
return self.repo_for_pkg(pkg_name).get_pkg_class(pkg_name)
|
||||||
|
|
||||||
@@ -885,26 +863,26 @@ def dump_provenance(self, spec, path):
|
|||||||
"""
|
"""
|
||||||
return self.repo_for_pkg(spec).dump_provenance(spec, path)
|
return self.repo_for_pkg(spec).dump_provenance(spec, path)
|
||||||
|
|
||||||
def dirname_for_package_name(self, pkg_name: str) -> str:
|
def dirname_for_package_name(self, pkg_name):
|
||||||
return self.repo_for_pkg(pkg_name).dirname_for_package_name(pkg_name)
|
return self.repo_for_pkg(pkg_name).dirname_for_package_name(pkg_name)
|
||||||
|
|
||||||
def filename_for_package_name(self, pkg_name: str) -> str:
|
def filename_for_package_name(self, pkg_name):
|
||||||
return self.repo_for_pkg(pkg_name).filename_for_package_name(pkg_name)
|
return self.repo_for_pkg(pkg_name).filename_for_package_name(pkg_name)
|
||||||
|
|
||||||
def exists(self, pkg_name: str) -> bool:
|
def exists(self, pkg_name):
|
||||||
"""Whether package with the give name exists in the path's repos.
|
"""Whether package with the give name exists in the path's repos.
|
||||||
|
|
||||||
Note that virtual packages do not "exist".
|
Note that virtual packages do not "exist".
|
||||||
"""
|
"""
|
||||||
return any(repo.exists(pkg_name) for repo in self.repos)
|
return any(repo.exists(pkg_name) for repo in self.repos)
|
||||||
|
|
||||||
def _have_name(self, pkg_name: str) -> bool:
|
def _have_name(self, pkg_name):
|
||||||
have_name = pkg_name is not None
|
have_name = pkg_name is not None
|
||||||
if have_name and not isinstance(pkg_name, str):
|
if have_name and not isinstance(pkg_name, str):
|
||||||
raise ValueError(f"is_virtual(): expected package name, got {type(pkg_name)}")
|
raise ValueError("is_virtual(): expected package name, got %s" % type(pkg_name))
|
||||||
return have_name
|
return have_name
|
||||||
|
|
||||||
def is_virtual(self, pkg_name: str) -> bool:
|
def is_virtual(self, pkg_name):
|
||||||
"""Return True if the package with this name is virtual, False otherwise.
|
"""Return True if the package with this name is virtual, False otherwise.
|
||||||
|
|
||||||
This function use the provider index. If calling from a code block that
|
This function use the provider index. If calling from a code block that
|
||||||
@@ -916,7 +894,7 @@ def is_virtual(self, pkg_name: str) -> bool:
|
|||||||
have_name = self._have_name(pkg_name)
|
have_name = self._have_name(pkg_name)
|
||||||
return have_name and pkg_name in self.provider_index
|
return have_name and pkg_name in self.provider_index
|
||||||
|
|
||||||
def is_virtual_safe(self, pkg_name: str) -> bool:
|
def is_virtual_safe(self, pkg_name):
|
||||||
"""Return True if the package with this name is virtual, False otherwise.
|
"""Return True if the package with this name is virtual, False otherwise.
|
||||||
|
|
||||||
This function doesn't use the provider index.
|
This function doesn't use the provider index.
|
||||||
@@ -937,28 +915,18 @@ class Repo:
|
|||||||
Each package repository must have a top-level configuration file
|
Each package repository must have a top-level configuration file
|
||||||
called `repo.yaml`.
|
called `repo.yaml`.
|
||||||
|
|
||||||
Currently, `repo.yaml` must define:
|
Currently, `repo.yaml` this must define:
|
||||||
|
|
||||||
`namespace`:
|
`namespace`:
|
||||||
A Python namespace where the repository's packages should live.
|
A Python namespace where the repository's packages should live.
|
||||||
|
|
||||||
`subdirectory`:
|
|
||||||
An optional subdirectory name where packages are placed
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, root, cache=None):
|
||||||
self,
|
|
||||||
root: str,
|
|
||||||
*,
|
|
||||||
cache: "spack.caches.FileCacheType",
|
|
||||||
overrides: Optional[Dict[str, Any]] = None,
|
|
||||||
) -> None:
|
|
||||||
"""Instantiate a package repository from a filesystem path.
|
"""Instantiate a package repository from a filesystem path.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
root: the root directory of the repository
|
root: the root directory of the repository
|
||||||
cache: file cache associated with this repository
|
|
||||||
overrides: dict mapping package name to class attribute overrides for that package
|
|
||||||
"""
|
"""
|
||||||
# Root directory, containing _repo.yaml and package dirs
|
# Root directory, containing _repo.yaml and package dirs
|
||||||
# Allow roots to by spack-relative by starting with '$spack'
|
# Allow roots to by spack-relative by starting with '$spack'
|
||||||
@@ -971,20 +939,20 @@ def check(condition, msg):
|
|||||||
|
|
||||||
# Validate repository layout.
|
# Validate repository layout.
|
||||||
self.config_file = os.path.join(self.root, repo_config_name)
|
self.config_file = os.path.join(self.root, repo_config_name)
|
||||||
check(os.path.isfile(self.config_file), f"No {repo_config_name} found in '{root}'")
|
check(os.path.isfile(self.config_file), "No %s found in '%s'" % (repo_config_name, root))
|
||||||
|
|
||||||
# Read configuration and validate namespace
|
# Read configuration and validate namespace
|
||||||
config = self._read_config()
|
config = self._read_config()
|
||||||
check(
|
check(
|
||||||
"namespace" in config,
|
"namespace" in config,
|
||||||
f"{os.path.join(root, repo_config_name)} must define a namespace.",
|
"%s must define a namespace." % os.path.join(root, repo_config_name),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.namespace = config["namespace"]
|
self.namespace = config["namespace"]
|
||||||
check(
|
check(
|
||||||
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
||||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
|
("Invalid namespace '%s' in repo '%s'. " % (self.namespace, self.root))
|
||||||
"Namespaces must be valid python identifiers separated by '.'",
|
+ "Namespaces must be valid python identifiers separated by '.'",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Set up 'full_namespace' to include the super-namespace
|
# Set up 'full_namespace' to include the super-namespace
|
||||||
@@ -996,26 +964,23 @@ def check(condition, msg):
|
|||||||
packages_dir = config.get("subdirectory", packages_dir_name)
|
packages_dir = config.get("subdirectory", packages_dir_name)
|
||||||
self.packages_path = os.path.join(self.root, packages_dir)
|
self.packages_path = os.path.join(self.root, packages_dir)
|
||||||
check(
|
check(
|
||||||
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
|
os.path.isdir(self.packages_path),
|
||||||
|
"No directory '%s' found in '%s'" % (packages_dir, root),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Class attribute overrides by package name
|
# These are internal cache variables.
|
||||||
self.overrides = overrides or {}
|
self._modules = {}
|
||||||
|
self._classes = {}
|
||||||
# Optional reference to a RepoPath to influence module import from spack.pkg
|
self._instances = {}
|
||||||
self._finder: Optional[RepoPath] = None
|
|
||||||
|
|
||||||
# Maps that goes from package name to corresponding file stat
|
# Maps that goes from package name to corresponding file stat
|
||||||
self._fast_package_checker: Optional[FastPackageChecker] = None
|
self._fast_package_checker = None
|
||||||
|
|
||||||
# Indexes for this repository, computed lazily
|
# Indexes for this repository, computed lazily
|
||||||
self._repo_index: Optional[RepoIndex] = None
|
self._repo_index = None
|
||||||
self._cache = cache
|
self._cache = cache or spack.caches.MISC_CACHE
|
||||||
|
|
||||||
def finder(self, value: RepoPath) -> None:
|
def real_name(self, import_name):
|
||||||
self._finder = value
|
|
||||||
|
|
||||||
def real_name(self, import_name: str) -> Optional[str]:
|
|
||||||
"""Allow users to import Spack packages using Python identifiers.
|
"""Allow users to import Spack packages using Python identifiers.
|
||||||
|
|
||||||
A python identifier might map to many different Spack package
|
A python identifier might map to many different Spack package
|
||||||
@@ -1034,21 +999,18 @@ def real_name(self, import_name: str) -> Optional[str]:
|
|||||||
return import_name
|
return import_name
|
||||||
|
|
||||||
options = nm.possible_spack_module_names(import_name)
|
options = nm.possible_spack_module_names(import_name)
|
||||||
try:
|
options.remove(import_name)
|
||||||
options.remove(import_name)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
for name in options:
|
for name in options:
|
||||||
if name in self:
|
if name in self:
|
||||||
return name
|
return name
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def is_prefix(self, fullname: str) -> bool:
|
def is_prefix(self, fullname):
|
||||||
"""True if fullname is a prefix of this Repo's namespace."""
|
"""True if fullname is a prefix of this Repo's namespace."""
|
||||||
parts = fullname.split(".")
|
parts = fullname.split(".")
|
||||||
return self._names[: len(parts)] == parts
|
return self._names[: len(parts)] == parts
|
||||||
|
|
||||||
def _read_config(self) -> Dict[str, str]:
|
def _read_config(self):
|
||||||
"""Check for a YAML config file in this db's root directory."""
|
"""Check for a YAML config file in this db's root directory."""
|
||||||
try:
|
try:
|
||||||
with open(self.config_file) as reponame_file:
|
with open(self.config_file) as reponame_file:
|
||||||
@@ -1059,14 +1021,14 @@ def _read_config(self) -> Dict[str, str]:
|
|||||||
or "repo" not in yaml_data
|
or "repo" not in yaml_data
|
||||||
or not isinstance(yaml_data["repo"], dict)
|
or not isinstance(yaml_data["repo"], dict)
|
||||||
):
|
):
|
||||||
tty.die(f"Invalid {repo_config_name} in repository {self.root}")
|
tty.die("Invalid %s in repository %s" % (repo_config_name, self.root))
|
||||||
|
|
||||||
return yaml_data["repo"]
|
return yaml_data["repo"]
|
||||||
|
|
||||||
except IOError:
|
except IOError:
|
||||||
tty.die(f"Error reading {self.config_file} when opening {self.root}")
|
tty.die("Error reading %s when opening %s" % (self.config_file, self.root))
|
||||||
|
|
||||||
def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase":
|
def get(self, spec):
|
||||||
"""Returns the package associated with the supplied spec."""
|
"""Returns the package associated with the supplied spec."""
|
||||||
msg = "Repo.get can only be called on concrete specs"
|
msg = "Repo.get can only be called on concrete specs"
|
||||||
assert isinstance(spec, spack.spec.Spec) and spec.concrete, msg
|
assert isinstance(spec, spack.spec.Spec) and spec.concrete, msg
|
||||||
@@ -1087,13 +1049,16 @@ def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase":
|
|||||||
# pass these through as their error messages will be fine.
|
# pass these through as their error messages will be fine.
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
tty.debug(e)
|
||||||
|
|
||||||
# Make sure other errors in constructors hit the error
|
# Make sure other errors in constructors hit the error
|
||||||
# handler by wrapping them
|
# handler by wrapping them
|
||||||
tty.debug(e)
|
if spack.config.get("config:debug"):
|
||||||
raise FailedConstructorError(spec.fullname, *sys.exc_info()) from e
|
sys.excepthook(*sys.exc_info())
|
||||||
|
raise FailedConstructorError(spec.fullname, *sys.exc_info())
|
||||||
|
|
||||||
@autospec
|
@autospec
|
||||||
def dump_provenance(self, spec: "spack.spec.Spec", path: str) -> None:
|
def dump_provenance(self, spec, path):
|
||||||
"""Dump provenance information for a spec to a particular path.
|
"""Dump provenance information for a spec to a particular path.
|
||||||
|
|
||||||
This dumps the package file and any associated patch files.
|
This dumps the package file and any associated patch files.
|
||||||
@@ -1101,7 +1066,7 @@ def dump_provenance(self, spec: "spack.spec.Spec", path: str) -> None:
|
|||||||
"""
|
"""
|
||||||
if spec.namespace and spec.namespace != self.namespace:
|
if spec.namespace and spec.namespace != self.namespace:
|
||||||
raise UnknownPackageError(
|
raise UnknownPackageError(
|
||||||
f"Repository {self.namespace} does not contain package {spec.fullname}."
|
"Repository %s does not contain package %s." % (self.namespace, spec.fullname)
|
||||||
)
|
)
|
||||||
|
|
||||||
package_path = self.filename_for_package_name(spec.name)
|
package_path = self.filename_for_package_name(spec.name)
|
||||||
@@ -1118,13 +1083,17 @@ def dump_provenance(self, spec: "spack.spec.Spec", path: str) -> None:
|
|||||||
if os.path.exists(patch.path):
|
if os.path.exists(patch.path):
|
||||||
fs.install(patch.path, path)
|
fs.install(patch.path, path)
|
||||||
else:
|
else:
|
||||||
warnings.warn(f"Patch file did not exist: {patch.path}")
|
tty.warn("Patch file did not exist: %s" % patch.path)
|
||||||
|
|
||||||
# Install the package.py file itself.
|
# Install the package.py file itself.
|
||||||
fs.install(self.filename_for_package_name(spec.name), path)
|
fs.install(self.filename_for_package_name(spec.name), path)
|
||||||
|
|
||||||
|
def purge(self):
|
||||||
|
"""Clear entire package instance cache."""
|
||||||
|
self._instances.clear()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def index(self) -> RepoIndex:
|
def index(self):
|
||||||
"""Construct the index for this repo lazily."""
|
"""Construct the index for this repo lazily."""
|
||||||
if self._repo_index is None:
|
if self._repo_index is None:
|
||||||
self._repo_index = RepoIndex(self._pkg_checker, self.namespace, cache=self._cache)
|
self._repo_index = RepoIndex(self._pkg_checker, self.namespace, cache=self._cache)
|
||||||
@@ -1134,40 +1103,42 @@ def index(self) -> RepoIndex:
|
|||||||
return self._repo_index
|
return self._repo_index
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def provider_index(self) -> spack.provider_index.ProviderIndex:
|
def provider_index(self):
|
||||||
"""A provider index with names *specific* to this repo."""
|
"""A provider index with names *specific* to this repo."""
|
||||||
return self.index["providers"]
|
return self.index["providers"]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def tag_index(self) -> spack.tag.TagIndex:
|
def tag_index(self):
|
||||||
"""Index of tags and which packages they're defined on."""
|
"""Index of tags and which packages they're defined on."""
|
||||||
return self.index["tags"]
|
return self.index["tags"]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def patch_index(self) -> spack.patch.PatchCache:
|
def patch_index(self):
|
||||||
"""Index of patches and packages they're defined on."""
|
"""Index of patches and packages they're defined on."""
|
||||||
return self.index["patches"]
|
return self.index["patches"]
|
||||||
|
|
||||||
@autospec
|
@autospec
|
||||||
def providers_for(self, vpkg_spec: "spack.spec.Spec") -> List["spack.spec.Spec"]:
|
def providers_for(self, vpkg_spec):
|
||||||
providers = self.provider_index.providers_for(vpkg_spec)
|
providers = self.provider_index.providers_for(vpkg_spec)
|
||||||
if not providers:
|
if not providers:
|
||||||
raise UnknownPackageError(vpkg_spec.fullname)
|
raise UnknownPackageError(vpkg_spec.fullname)
|
||||||
return providers
|
return providers
|
||||||
|
|
||||||
@autospec
|
@autospec
|
||||||
def extensions_for(
|
def extensions_for(self, extendee_spec):
|
||||||
self, extendee_spec: "spack.spec.Spec"
|
return [
|
||||||
) -> List["spack.package_base.PackageBase"]:
|
pkg_cls(spack.spec.Spec(pkg_cls.name))
|
||||||
result = [pkg_cls(spack.spec.Spec(pkg_cls.name)) for pkg_cls in self.all_package_classes()]
|
for pkg_cls in self.all_package_classes()
|
||||||
return [x for x in result if x.extends(extendee_spec)]
|
if pkg_cls(spack.spec.Spec(pkg_cls.name)).extends(extendee_spec)
|
||||||
|
]
|
||||||
|
|
||||||
def dirname_for_package_name(self, pkg_name: str) -> str:
|
def dirname_for_package_name(self, pkg_name):
|
||||||
"""Given a package name, get the directory containing its package.py file."""
|
"""Get the directory name for a particular package. This is the
|
||||||
|
directory that contains its package.py file."""
|
||||||
_, unqualified_name = self.partition_package_name(pkg_name)
|
_, unqualified_name = self.partition_package_name(pkg_name)
|
||||||
return os.path.join(self.packages_path, unqualified_name)
|
return os.path.join(self.packages_path, unqualified_name)
|
||||||
|
|
||||||
def filename_for_package_name(self, pkg_name: str) -> str:
|
def filename_for_package_name(self, pkg_name):
|
||||||
"""Get the filename for the module we should load for a particular
|
"""Get the filename for the module we should load for a particular
|
||||||
package. Packages for a Repo live in
|
package. Packages for a Repo live in
|
||||||
``$root/<package_name>/package.py``
|
``$root/<package_name>/package.py``
|
||||||
@@ -1180,23 +1151,23 @@ def filename_for_package_name(self, pkg_name: str) -> str:
|
|||||||
return os.path.join(pkg_dir, package_file_name)
|
return os.path.join(pkg_dir, package_file_name)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _pkg_checker(self) -> FastPackageChecker:
|
def _pkg_checker(self):
|
||||||
if self._fast_package_checker is None:
|
if self._fast_package_checker is None:
|
||||||
self._fast_package_checker = FastPackageChecker(self.packages_path)
|
self._fast_package_checker = FastPackageChecker(self.packages_path)
|
||||||
return self._fast_package_checker
|
return self._fast_package_checker
|
||||||
|
|
||||||
def all_package_names(self, include_virtuals: bool = False) -> List[str]:
|
def all_package_names(self, include_virtuals=False):
|
||||||
"""Returns a sorted list of all package names in the Repo."""
|
"""Returns a sorted list of all package names in the Repo."""
|
||||||
names = sorted(self._pkg_checker.keys())
|
names = sorted(self._pkg_checker.keys())
|
||||||
if include_virtuals:
|
if include_virtuals:
|
||||||
return names
|
return names
|
||||||
return [x for x in names if not self.is_virtual(x)]
|
return [x for x in names if not self.is_virtual(x)]
|
||||||
|
|
||||||
def package_path(self, name: str) -> str:
|
def package_path(self, name):
|
||||||
"""Get path to package.py file for this repo."""
|
"""Get path to package.py file for this repo."""
|
||||||
return os.path.join(self.packages_path, name, package_file_name)
|
return os.path.join(self.packages_path, name, package_file_name)
|
||||||
|
|
||||||
def all_package_paths(self) -> Generator[str, None, None]:
|
def all_package_paths(self):
|
||||||
for name in self.all_package_names():
|
for name in self.all_package_names():
|
||||||
yield self.package_path(name)
|
yield self.package_path(name)
|
||||||
|
|
||||||
@@ -1205,7 +1176,7 @@ def packages_with_tags(self, *tags: str) -> Set[str]:
|
|||||||
v.intersection_update(*(self.tag_index[tag.lower()] for tag in tags))
|
v.intersection_update(*(self.tag_index[tag.lower()] for tag in tags))
|
||||||
return v
|
return v
|
||||||
|
|
||||||
def all_package_classes(self) -> Generator[Type["spack.package_base.PackageBase"], None, None]:
|
def all_package_classes(self):
|
||||||
"""Iterator over all package *classes* in the repository.
|
"""Iterator over all package *classes* in the repository.
|
||||||
|
|
||||||
Use this with care, because loading packages is slow.
|
Use this with care, because loading packages is slow.
|
||||||
@@ -1213,7 +1184,7 @@ def all_package_classes(self) -> Generator[Type["spack.package_base.PackageBase"
|
|||||||
for name in self.all_package_names():
|
for name in self.all_package_names():
|
||||||
yield self.get_pkg_class(name)
|
yield self.get_pkg_class(name)
|
||||||
|
|
||||||
def exists(self, pkg_name: str) -> bool:
|
def exists(self, pkg_name):
|
||||||
"""Whether a package with the supplied name exists."""
|
"""Whether a package with the supplied name exists."""
|
||||||
if pkg_name is None:
|
if pkg_name is None:
|
||||||
return False
|
return False
|
||||||
@@ -1230,22 +1201,28 @@ def last_mtime(self):
|
|||||||
"""Time a package file in this repo was last updated."""
|
"""Time a package file in this repo was last updated."""
|
||||||
return self._pkg_checker.last_mtime()
|
return self._pkg_checker.last_mtime()
|
||||||
|
|
||||||
def is_virtual(self, pkg_name: str) -> bool:
|
def is_virtual(self, pkg_name):
|
||||||
"""Return True if the package with this name is virtual, False otherwise.
|
"""Return True if the package with this name is virtual, False otherwise.
|
||||||
|
|
||||||
This function use the provider index. If calling from a code block that
|
This function use the provider index. If calling from a code block that
|
||||||
is used to construct the provider index use the ``is_virtual_safe`` function.
|
is used to construct the provider index use the ``is_virtual_safe`` function.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg_name (str): name of the package we want to check
|
||||||
"""
|
"""
|
||||||
return pkg_name in self.provider_index
|
return pkg_name in self.provider_index
|
||||||
|
|
||||||
def is_virtual_safe(self, pkg_name: str) -> bool:
|
def is_virtual_safe(self, pkg_name):
|
||||||
"""Return True if the package with this name is virtual, False otherwise.
|
"""Return True if the package with this name is virtual, False otherwise.
|
||||||
|
|
||||||
This function doesn't use the provider index.
|
This function doesn't use the provider index.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg_name (str): name of the package we want to check
|
||||||
"""
|
"""
|
||||||
return not self.exists(pkg_name) or self.get_pkg_class(pkg_name).virtual
|
return not self.exists(pkg_name) or self.get_pkg_class(pkg_name).virtual
|
||||||
|
|
||||||
def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]:
|
def get_pkg_class(self, pkg_name):
|
||||||
"""Get the class for the package out of its module.
|
"""Get the class for the package out of its module.
|
||||||
|
|
||||||
First loads (or fetches from cache) a module for the
|
First loads (or fetches from cache) a module for the
|
||||||
@@ -1257,8 +1234,7 @@ def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]
|
|||||||
fullname = f"{self.full_namespace}.{pkg_name}"
|
fullname = f"{self.full_namespace}.{pkg_name}"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with REPOS_FINDER.switch_repo(self._finder or self):
|
module = importlib.import_module(fullname)
|
||||||
module = importlib.import_module(fullname)
|
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise UnknownPackageError(fullname)
|
raise UnknownPackageError(fullname)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -1269,21 +1245,26 @@ def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]
|
|||||||
if not inspect.isclass(cls):
|
if not inspect.isclass(cls):
|
||||||
tty.die(f"{pkg_name}.{class_name} is not a class")
|
tty.die(f"{pkg_name}.{class_name} is not a class")
|
||||||
|
|
||||||
# Clear any prior changes to class attributes in case the class was loaded from the
|
new_cfg_settings = (
|
||||||
# same repo, but with different overrides
|
spack.config.get("packages").get(pkg_name, {}).get("package_attributes", {})
|
||||||
|
)
|
||||||
|
|
||||||
overridden_attrs = getattr(cls, "overridden_attrs", {})
|
overridden_attrs = getattr(cls, "overridden_attrs", {})
|
||||||
attrs_exclusively_from_config = getattr(cls, "attrs_exclusively_from_config", [])
|
attrs_exclusively_from_config = getattr(cls, "attrs_exclusively_from_config", [])
|
||||||
|
# Clear any prior changes to class attributes in case the config has
|
||||||
|
# since changed
|
||||||
for key, val in overridden_attrs.items():
|
for key, val in overridden_attrs.items():
|
||||||
setattr(cls, key, val)
|
setattr(cls, key, val)
|
||||||
for key in attrs_exclusively_from_config:
|
for key in attrs_exclusively_from_config:
|
||||||
delattr(cls, key)
|
delattr(cls, key)
|
||||||
|
|
||||||
# Keep track of every class attribute that is overridden: if different overrides
|
# Keep track of every class attribute that is overridden by the config:
|
||||||
# dictionaries are used on the same physical repo, we make sure to restore the original
|
# if the config changes between calls to this method, we make sure to
|
||||||
# config values
|
# restore the original config values (in case the new config no longer
|
||||||
|
# sets attributes that it used to)
|
||||||
new_overridden_attrs = {}
|
new_overridden_attrs = {}
|
||||||
new_attrs_exclusively_from_config = set()
|
new_attrs_exclusively_from_config = set()
|
||||||
for key, val in self.overrides.get(pkg_name, {}).items():
|
for key, val in new_cfg_settings.items():
|
||||||
if hasattr(cls, key):
|
if hasattr(cls, key):
|
||||||
new_overridden_attrs[key] = getattr(cls, key)
|
new_overridden_attrs[key] = getattr(cls, key)
|
||||||
else:
|
else:
|
||||||
@@ -1310,13 +1291,13 @@ def partition_package_name(self, pkg_name: str) -> Tuple[str, str]:
|
|||||||
|
|
||||||
return namespace, pkg_name
|
return namespace, pkg_name
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self):
|
||||||
return f"Repo '{self.namespace}' at {self.root}"
|
return "[Repo '%s' at '%s']" % (self.namespace, self.root)
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self):
|
||||||
return self.__str__()
|
return self.__str__()
|
||||||
|
|
||||||
def __contains__(self, pkg_name: str) -> bool:
|
def __contains__(self, pkg_name):
|
||||||
return self.exists(pkg_name)
|
return self.exists(pkg_name)
|
||||||
|
|
||||||
|
|
||||||
@@ -1392,17 +1373,12 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
|
|||||||
return full_path, namespace
|
return full_path, namespace
|
||||||
|
|
||||||
|
|
||||||
def from_path(path: str) -> "Repo":
|
|
||||||
"""Returns a repository from the path passed as input. Injects the global misc cache."""
|
|
||||||
return Repo(path, cache=spack.caches.MISC_CACHE)
|
|
||||||
|
|
||||||
|
|
||||||
def create_or_construct(path, namespace=None):
|
def create_or_construct(path, namespace=None):
|
||||||
"""Create a repository, or just return a Repo if it already exists."""
|
"""Create a repository, or just return a Repo if it already exists."""
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
fs.mkdirp(path)
|
fs.mkdirp(path)
|
||||||
create_repo(path, namespace)
|
create_repo(path, namespace)
|
||||||
return from_path(path)
|
return Repo(path)
|
||||||
|
|
||||||
|
|
||||||
def _path(configuration=None):
|
def _path(configuration=None):
|
||||||
@@ -1411,9 +1387,7 @@ def _path(configuration=None):
|
|||||||
return create(configuration=configuration)
|
return create(configuration=configuration)
|
||||||
|
|
||||||
|
|
||||||
def create(
|
def create(configuration):
|
||||||
configuration: Union["spack.config.Configuration", llnl.util.lang.Singleton]
|
|
||||||
) -> RepoPath:
|
|
||||||
"""Create a RepoPath from a configuration object.
|
"""Create a RepoPath from a configuration object.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -1422,17 +1396,7 @@ def create(
|
|||||||
repo_dirs = configuration.get("repos")
|
repo_dirs = configuration.get("repos")
|
||||||
if not repo_dirs:
|
if not repo_dirs:
|
||||||
raise NoRepoConfiguredError("Spack configuration contains no package repositories.")
|
raise NoRepoConfiguredError("Spack configuration contains no package repositories.")
|
||||||
|
return RepoPath(*repo_dirs)
|
||||||
overrides = {}
|
|
||||||
for pkg_name, data in configuration.get("packages").items():
|
|
||||||
if pkg_name == "all":
|
|
||||||
continue
|
|
||||||
value = data.get("package_attributes", {})
|
|
||||||
if not value:
|
|
||||||
continue
|
|
||||||
overrides[pkg_name] = value
|
|
||||||
|
|
||||||
return RepoPath(*repo_dirs, cache=spack.caches.MISC_CACHE, overrides=overrides)
|
|
||||||
|
|
||||||
|
|
||||||
#: Singleton repo path instance
|
#: Singleton repo path instance
|
||||||
@@ -1449,20 +1413,20 @@ def all_package_names(include_virtuals=False):
|
|||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def use_repositories(
|
def use_repositories(*paths_and_repos, **kwargs):
|
||||||
*paths_and_repos: Union[str, Repo], override: bool = True
|
|
||||||
) -> Generator[RepoPath, None, None]:
|
|
||||||
"""Use the repositories passed as arguments within the context manager.
|
"""Use the repositories passed as arguments within the context manager.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
*paths_and_repos: paths to the repositories to be used, or
|
*paths_and_repos: paths to the repositories to be used, or
|
||||||
already constructed Repo objects
|
already constructed Repo objects
|
||||||
override: if True use only the repositories passed as input,
|
override (bool): if True use only the repositories passed as input,
|
||||||
if False add them to the top of the list of current repositories.
|
if False add them to the top of the list of current repositories.
|
||||||
Returns:
|
Returns:
|
||||||
Corresponding RepoPath object
|
Corresponding RepoPath object
|
||||||
"""
|
"""
|
||||||
global PATH
|
global PATH
|
||||||
|
# TODO (Python 2.7): remove this kwargs on deprecation of Python 2.7 support
|
||||||
|
override = kwargs.get("override", True)
|
||||||
paths = [getattr(x, "root", x) for x in paths_and_repos]
|
paths = [getattr(x, "root", x) for x in paths_and_repos]
|
||||||
scope_name = "use-repo-{}".format(uuid.uuid4())
|
scope_name = "use-repo-{}".format(uuid.uuid4())
|
||||||
repos_key = "repos:" if override else "repos"
|
repos_key = "repos:" if override else "repos"
|
||||||
@@ -1471,8 +1435,7 @@ def use_repositories(
|
|||||||
)
|
)
|
||||||
PATH, saved = create(configuration=spack.config.CONFIG), PATH
|
PATH, saved = create(configuration=spack.config.CONFIG), PATH
|
||||||
try:
|
try:
|
||||||
with REPOS_FINDER.switch_repo(PATH): # type: ignore
|
yield PATH
|
||||||
yield PATH
|
|
||||||
finally:
|
finally:
|
||||||
spack.config.CONFIG.remove_scope(scope_name=scope_name)
|
spack.config.CONFIG.remove_scope(scope_name=scope_name)
|
||||||
PATH = saved
|
PATH = saved
|
||||||
@@ -1572,9 +1535,10 @@ class UnknownNamespaceError(UnknownEntityError):
|
|||||||
"""Raised when we encounter an unknown namespace"""
|
"""Raised when we encounter an unknown namespace"""
|
||||||
|
|
||||||
def __init__(self, namespace, name=None):
|
def __init__(self, namespace, name=None):
|
||||||
msg, long_msg = f"Unknown namespace: {namespace}", None
|
msg, long_msg = "Unknown namespace: {}".format(namespace), None
|
||||||
if name == "yaml":
|
if name == "yaml":
|
||||||
long_msg = f"Did you mean to specify a filename with './{namespace}.{name}'?"
|
long_msg = "Did you mean to specify a filename with './{}.{}'?"
|
||||||
|
long_msg = long_msg.format(namespace, name)
|
||||||
super().__init__(msg, long_msg)
|
super().__init__(msg, long_msg)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -44,6 +44,7 @@
|
|||||||
},
|
},
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"prefer_older": {"type": "boolean"},
|
||||||
"enable_node_namespace": {"type": "boolean"},
|
"enable_node_namespace": {"type": "boolean"},
|
||||||
"targets": {
|
"targets": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
|
@@ -23,7 +23,6 @@
|
|||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import elide_list
|
|
||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.binary_distribution
|
import spack.binary_distribution
|
||||||
@@ -117,8 +116,6 @@ class Provenance(enum.IntEnum):
|
|||||||
PACKAGE_PY = enum.auto()
|
PACKAGE_PY = enum.auto()
|
||||||
# An installed spec
|
# An installed spec
|
||||||
INSTALLED = enum.auto()
|
INSTALLED = enum.auto()
|
||||||
# lower provenance for installed git refs so concretizer prefers StandardVersion installs
|
|
||||||
INSTALLED_GIT_VERSION = enum.auto()
|
|
||||||
# A runtime injected from another package (e.g. a compiler)
|
# A runtime injected from another package (e.g. a compiler)
|
||||||
RUNTIME = enum.auto()
|
RUNTIME = enum.auto()
|
||||||
|
|
||||||
@@ -622,9 +619,8 @@ def _external_config_with_implicit_externals(configuration):
|
|||||||
|
|
||||||
|
|
||||||
class ErrorHandler:
|
class ErrorHandler:
|
||||||
def __init__(self, model, input_specs: List[spack.spec.Spec]):
|
def __init__(self, model):
|
||||||
self.model = model
|
self.model = model
|
||||||
self.input_specs = input_specs
|
|
||||||
self.full_model = None
|
self.full_model = None
|
||||||
|
|
||||||
def multiple_values_error(self, attribute, pkg):
|
def multiple_values_error(self, attribute, pkg):
|
||||||
@@ -711,13 +707,12 @@ def handle_error(self, msg, *args):
|
|||||||
return msg
|
return msg
|
||||||
|
|
||||||
def message(self, errors) -> str:
|
def message(self, errors) -> str:
|
||||||
input_specs = ", ".join(elide_list([f"`{s}`" for s in self.input_specs], 5))
|
messages = [
|
||||||
header = f"failed to concretize {input_specs} for the following reasons:"
|
f" {idx+1: 2}. {self.handle_error(msg, *args)}"
|
||||||
messages = (
|
|
||||||
f" {idx+1:2}. {self.handle_error(msg, *args)}"
|
|
||||||
for idx, (_, msg, args) in enumerate(errors)
|
for idx, (_, msg, args) in enumerate(errors)
|
||||||
)
|
]
|
||||||
return "\n".join((header, *messages))
|
header = "concretization failed for the following reasons:\n"
|
||||||
|
return "\n".join([header] + messages)
|
||||||
|
|
||||||
def raise_if_errors(self):
|
def raise_if_errors(self):
|
||||||
initial_error_args = extract_args(self.model, "error")
|
initial_error_args = extract_args(self.model, "error")
|
||||||
@@ -753,7 +748,7 @@ def on_model(model):
|
|||||||
f"unexpected error during concretization [{str(e)}]. "
|
f"unexpected error during concretization [{str(e)}]. "
|
||||||
f"Please report a bug at https://github.com/spack/spack/issues"
|
f"Please report a bug at https://github.com/spack/spack/issues"
|
||||||
)
|
)
|
||||||
raise spack.error.SpackError(msg) from e
|
raise spack.error.SpackError(msg)
|
||||||
raise UnsatisfiableSpecError(msg)
|
raise UnsatisfiableSpecError(msg)
|
||||||
|
|
||||||
|
|
||||||
@@ -849,6 +844,8 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
|||||||
parent_dir = os.path.dirname(__file__)
|
parent_dir = os.path.dirname(__file__)
|
||||||
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
||||||
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
|
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
|
||||||
|
if spack.config.CONFIG.get("concretizer:duplicates:strategy", "none") != "none":
|
||||||
|
self.control.load(os.path.join(parent_dir, "heuristic_separate.lp"))
|
||||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||||
if not setup.concretize_everything:
|
if not setup.concretize_everything:
|
||||||
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
|
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
|
||||||
@@ -897,7 +894,7 @@ def on_model(model):
|
|||||||
min_cost, best_model = min(models)
|
min_cost, best_model = min(models)
|
||||||
|
|
||||||
# first check for errors
|
# first check for errors
|
||||||
error_handler = ErrorHandler(best_model, specs)
|
error_handler = ErrorHandler(best_model)
|
||||||
error_handler.raise_if_errors()
|
error_handler.raise_if_errors()
|
||||||
|
|
||||||
# build specs from spec attributes in the model
|
# build specs from spec attributes in the model
|
||||||
@@ -1028,7 +1025,7 @@ def __iter__(self):
|
|||||||
class SpackSolverSetup:
|
class SpackSolverSetup:
|
||||||
"""Class to set up and run a Spack concretization solve."""
|
"""Class to set up and run a Spack concretization solve."""
|
||||||
|
|
||||||
def __init__(self, tests: bool = False):
|
def __init__(self, tests: bool = False, prefer_older=False):
|
||||||
# these are all initialized in setup()
|
# these are all initialized in setup()
|
||||||
self.gen: "ProblemInstanceBuilder" = ProblemInstanceBuilder()
|
self.gen: "ProblemInstanceBuilder" = ProblemInstanceBuilder()
|
||||||
self.possible_virtuals: Set[str] = set()
|
self.possible_virtuals: Set[str] = set()
|
||||||
@@ -1061,6 +1058,8 @@ def __init__(self, tests: bool = False):
|
|||||||
# whether to add installed/binary hashes to the solve
|
# whether to add installed/binary hashes to the solve
|
||||||
self.tests = tests
|
self.tests = tests
|
||||||
|
|
||||||
|
self.prefer_older = prefer_older
|
||||||
|
|
||||||
# If False allows for input specs that are not solved
|
# If False allows for input specs that are not solved
|
||||||
self.concretize_everything = True
|
self.concretize_everything = True
|
||||||
|
|
||||||
@@ -1094,7 +1093,9 @@ def key_fn(version):
|
|||||||
list(sorted(group, reverse=True, key=lambda x: vn.ver(x.version)))
|
list(sorted(group, reverse=True, key=lambda x: vn.ver(x.version)))
|
||||||
)
|
)
|
||||||
|
|
||||||
for weight, declared_version in enumerate(most_to_least_preferred):
|
for weight, declared_version in enumerate(
|
||||||
|
reversed(most_to_least_preferred) if self.prefer_older else most_to_least_preferred
|
||||||
|
):
|
||||||
self.gen.fact(
|
self.gen.fact(
|
||||||
fn.pkg_fact(
|
fn.pkg_fact(
|
||||||
pkg.name,
|
pkg.name,
|
||||||
@@ -1883,8 +1884,11 @@ def _spec_clauses(
|
|||||||
)
|
)
|
||||||
|
|
||||||
clauses.append(f.variant_value(spec.name, vname, value))
|
clauses.append(f.variant_value(spec.name, vname, value))
|
||||||
|
|
||||||
if variant.propagate:
|
if variant.propagate:
|
||||||
clauses.append(f.propagate(spec.name, fn.variant_value(vname, value)))
|
clauses.append(
|
||||||
|
f.variant_propagation_candidate(spec.name, vname, value, spec.name)
|
||||||
|
)
|
||||||
|
|
||||||
# Tell the concretizer that this is a possible value for the
|
# Tell the concretizer that this is a possible value for the
|
||||||
# variant, to account for things like int/str values where we
|
# variant, to account for things like int/str values where we
|
||||||
@@ -1917,12 +1921,9 @@ def _spec_clauses(
|
|||||||
for flag_type, flags in spec.compiler_flags.items():
|
for flag_type, flags in spec.compiler_flags.items():
|
||||||
for flag in flags:
|
for flag in flags:
|
||||||
clauses.append(f.node_flag(spec.name, flag_type, flag))
|
clauses.append(f.node_flag(spec.name, flag_type, flag))
|
||||||
|
clauses.append(f.node_flag_source(spec.name, flag_type, spec.name))
|
||||||
if not spec.concrete and flag.propagate is True:
|
if not spec.concrete and flag.propagate is True:
|
||||||
clauses.append(
|
clauses.append(f.node_flag_propagate(spec.name, flag_type))
|
||||||
f.propagate(
|
|
||||||
spec.name, fn.node_flag(flag_type, flag), fn.edge_types("link", "run")
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
if spec.concrete:
|
if spec.concrete:
|
||||||
@@ -2075,7 +2076,7 @@ def define_ad_hoc_versions_from_specs(
|
|||||||
# best possible, so they're guaranteed to be used preferentially.
|
# best possible, so they're guaranteed to be used preferentially.
|
||||||
version = s.versions.concrete
|
version = s.versions.concrete
|
||||||
|
|
||||||
if version is None or (any((v == version) for v in self.possible_versions[s.name])):
|
if version is None or any(v == version for v in self.possible_versions[s.name]):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if require_checksum and not _is_checksummed_git_version(version):
|
if require_checksum and not _is_checksummed_git_version(version):
|
||||||
@@ -2389,16 +2390,9 @@ def concrete_specs(self):
|
|||||||
# - Add OS to possible OS's
|
# - Add OS to possible OS's
|
||||||
for dep in spec.traverse():
|
for dep in spec.traverse():
|
||||||
self.possible_versions[dep.name].add(dep.version)
|
self.possible_versions[dep.name].add(dep.version)
|
||||||
if isinstance(dep.version, vn.GitVersion):
|
self.declared_versions[dep.name].append(
|
||||||
self.declared_versions[dep.name].append(
|
DeclaredVersion(version=dep.version, idx=0, origin=Provenance.INSTALLED)
|
||||||
DeclaredVersion(
|
)
|
||||||
version=dep.version, idx=0, origin=Provenance.INSTALLED_GIT_VERSION
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.declared_versions[dep.name].append(
|
|
||||||
DeclaredVersion(version=dep.version, idx=0, origin=Provenance.INSTALLED)
|
|
||||||
)
|
|
||||||
self.possible_oses.add(dep.os)
|
self.possible_oses.add(dep.os)
|
||||||
|
|
||||||
def define_concrete_input_specs(self, specs, possible):
|
def define_concrete_input_specs(self, specs, possible):
|
||||||
@@ -2747,7 +2741,9 @@ class _Head:
|
|||||||
node_compiler = fn.attr("node_compiler_set")
|
node_compiler = fn.attr("node_compiler_set")
|
||||||
node_compiler_version = fn.attr("node_compiler_version_set")
|
node_compiler_version = fn.attr("node_compiler_version_set")
|
||||||
node_flag = fn.attr("node_flag_set")
|
node_flag = fn.attr("node_flag_set")
|
||||||
propagate = fn.attr("propagate")
|
node_flag_source = fn.attr("node_flag_source")
|
||||||
|
node_flag_propagate = fn.attr("node_flag_propagate")
|
||||||
|
variant_propagation_candidate = fn.attr("variant_propagation_candidate")
|
||||||
|
|
||||||
|
|
||||||
class _Body:
|
class _Body:
|
||||||
@@ -2762,7 +2758,9 @@ class _Body:
|
|||||||
node_compiler = fn.attr("node_compiler")
|
node_compiler = fn.attr("node_compiler")
|
||||||
node_compiler_version = fn.attr("node_compiler_version")
|
node_compiler_version = fn.attr("node_compiler_version")
|
||||||
node_flag = fn.attr("node_flag")
|
node_flag = fn.attr("node_flag")
|
||||||
propagate = fn.attr("propagate")
|
node_flag_source = fn.attr("node_flag_source")
|
||||||
|
node_flag_propagate = fn.attr("node_flag_propagate")
|
||||||
|
variant_propagation_candidate = fn.attr("variant_propagation_candidate")
|
||||||
|
|
||||||
|
|
||||||
class ProblemInstanceBuilder:
|
class ProblemInstanceBuilder:
|
||||||
@@ -3236,39 +3234,6 @@ def requires(self, impose: str, *, when: str):
|
|||||||
self.runtime_conditions.add((imposed_spec, when_spec))
|
self.runtime_conditions.add((imposed_spec, when_spec))
|
||||||
self.reset()
|
self.reset()
|
||||||
|
|
||||||
def propagate(self, constraint_str: str, *, when: str):
|
|
||||||
msg = "the 'propagate' method can be called only with pkg('*')"
|
|
||||||
assert self.current_package == "*", msg
|
|
||||||
|
|
||||||
when_spec = spack.spec.Spec(when)
|
|
||||||
assert when_spec.name is None, "only anonymous when specs are accepted"
|
|
||||||
|
|
||||||
placeholder = "XXX"
|
|
||||||
node_variable = "node(ID, Package)"
|
|
||||||
when_spec.name = placeholder
|
|
||||||
|
|
||||||
body_clauses = self._setup.spec_clauses(when_spec, body=True)
|
|
||||||
body_str = (
|
|
||||||
f" {f',{os.linesep} '.join(str(x) for x in body_clauses)},\n"
|
|
||||||
f" not external({node_variable}),\n"
|
|
||||||
f" not runtime(Package)"
|
|
||||||
).replace(f'"{placeholder}"', f"{node_variable}")
|
|
||||||
|
|
||||||
constraint_spec = spack.spec.Spec(constraint_str)
|
|
||||||
assert constraint_spec.name is None, "only anonymous constraint specs are accepted"
|
|
||||||
|
|
||||||
constraint_spec.name = placeholder
|
|
||||||
constraint_clauses = self._setup.spec_clauses(constraint_spec, body=False)
|
|
||||||
for clause in constraint_clauses:
|
|
||||||
if clause.args[0] == "node_compiler_version_satisfies":
|
|
||||||
self._setup.compiler_version_constraints.add(constraint_spec.compiler)
|
|
||||||
args = f'"{constraint_spec.compiler.name}", "{constraint_spec.compiler.versions}"'
|
|
||||||
head_str = f"propagate({node_variable}, node_compiler_version_satisfies({args}))"
|
|
||||||
rule = f"{head_str} :-\n{body_str}.\n\n"
|
|
||||||
self.rules.append(rule)
|
|
||||||
|
|
||||||
self.reset()
|
|
||||||
|
|
||||||
def consume_facts(self):
|
def consume_facts(self):
|
||||||
"""Consume the facts collected by this object, and emits rules and
|
"""Consume the facts collected by this object, and emits rules and
|
||||||
facts for the runtimes.
|
facts for the runtimes.
|
||||||
@@ -3348,8 +3313,6 @@ def hash(self, node, h):
|
|||||||
def node(self, node):
|
def node(self, node):
|
||||||
if node not in self._specs:
|
if node not in self._specs:
|
||||||
self._specs[node] = spack.spec.Spec(node.pkg)
|
self._specs[node] = spack.spec.Spec(node.pkg)
|
||||||
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
|
|
||||||
self._specs[node].compiler_flags[flag_type] = []
|
|
||||||
|
|
||||||
def _arch(self, node):
|
def _arch(self, node):
|
||||||
arch = self._specs[node].architecture
|
arch = self._specs[node].architecture
|
||||||
@@ -3402,6 +3365,9 @@ def node_flag(self, node, flag_type, flag):
|
|||||||
def node_flag_source(self, node, flag_type, source):
|
def node_flag_source(self, node, flag_type, source):
|
||||||
self._flag_sources[(node, flag_type)].add(source)
|
self._flag_sources[(node, flag_type)].add(source)
|
||||||
|
|
||||||
|
def no_flags(self, node, flag_type):
|
||||||
|
self._specs[node].compiler_flags[flag_type] = []
|
||||||
|
|
||||||
def external_spec_selected(self, node, idx):
|
def external_spec_selected(self, node, idx):
|
||||||
"""This means that the external spec and index idx has been selected for this package."""
|
"""This means that the external spec and index idx has been selected for this package."""
|
||||||
packages_yaml = _external_config_with_implicit_externals(spack.config.CONFIG)
|
packages_yaml = _external_config_with_implicit_externals(spack.config.CONFIG)
|
||||||
@@ -3494,7 +3460,7 @@ def reorder_flags(self):
|
|||||||
ordered_compiler_flags = list(llnl.util.lang.dedupe(from_compiler + from_sources))
|
ordered_compiler_flags = list(llnl.util.lang.dedupe(from_compiler + from_sources))
|
||||||
compiler_flags = spec.compiler_flags.get(flag_type, [])
|
compiler_flags = spec.compiler_flags.get(flag_type, [])
|
||||||
|
|
||||||
msg = f"{set(compiler_flags)} does not equal {set(ordered_compiler_flags)}"
|
msg = "%s does not equal %s" % (set(compiler_flags), set(ordered_compiler_flags))
|
||||||
assert set(compiler_flags) == set(ordered_compiler_flags), msg
|
assert set(compiler_flags) == set(ordered_compiler_flags), msg
|
||||||
|
|
||||||
spec.compiler_flags.update({flag_type: ordered_compiler_flags})
|
spec.compiler_flags.update({flag_type: ordered_compiler_flags})
|
||||||
@@ -3564,8 +3530,9 @@ def build_specs(self, function_tuples):
|
|||||||
# do not bother calling actions on it except for node_flag_source,
|
# do not bother calling actions on it except for node_flag_source,
|
||||||
# since node_flag_source is tracking information not in the spec itself
|
# since node_flag_source is tracking information not in the spec itself
|
||||||
spec = self._specs.get(args[0])
|
spec = self._specs.get(args[0])
|
||||||
if spec and spec.concrete and name != "node_flag_source":
|
if spec and spec.concrete:
|
||||||
continue
|
if name != "node_flag_source":
|
||||||
|
continue
|
||||||
|
|
||||||
action(*args)
|
action(*args)
|
||||||
|
|
||||||
@@ -3837,6 +3804,8 @@ def __init__(self):
|
|||||||
self.driver = PyclingoDriver()
|
self.driver = PyclingoDriver()
|
||||||
self.selector = ReusableSpecsSelector(configuration=spack.config.CONFIG)
|
self.selector = ReusableSpecsSelector(configuration=spack.config.CONFIG)
|
||||||
|
|
||||||
|
self.prefer_older = spack.config.get("concretizer:prefer_older", False)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _check_input_and_extract_concrete_specs(specs):
|
def _check_input_and_extract_concrete_specs(specs):
|
||||||
reusable = []
|
reusable = []
|
||||||
@@ -3875,7 +3844,7 @@ def solve(
|
|||||||
specs = [s.lookup_hash() for s in specs]
|
specs = [s.lookup_hash() for s in specs]
|
||||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||||
reusable_specs.extend(self.selector.reusable_specs(specs))
|
reusable_specs.extend(self.selector.reusable_specs(specs))
|
||||||
setup = SpackSolverSetup(tests=tests)
|
setup = SpackSolverSetup(tests=tests, prefer_older=self.prefer_older)
|
||||||
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
||||||
result, _, _ = self.driver.solve(
|
result, _, _ = self.driver.solve(
|
||||||
setup, specs, reuse=reusable_specs, output=output, allow_deprecated=allow_deprecated
|
setup, specs, reuse=reusable_specs, output=output, allow_deprecated=allow_deprecated
|
||||||
@@ -3904,7 +3873,7 @@ def solve_in_rounds(
|
|||||||
specs = [s.lookup_hash() for s in specs]
|
specs = [s.lookup_hash() for s in specs]
|
||||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||||
reusable_specs.extend(self.selector.reusable_specs(specs))
|
reusable_specs.extend(self.selector.reusable_specs(specs))
|
||||||
setup = SpackSolverSetup(tests=tests)
|
setup = SpackSolverSetup(tests=tests, prefer_older=self.prefer_older)
|
||||||
|
|
||||||
# Tell clingo that we don't have to solve all the inputs at once
|
# Tell clingo that we don't have to solve all the inputs at once
|
||||||
setup.concretize_everything = False
|
setup.concretize_everything = False
|
||||||
|
@@ -29,6 +29,7 @@
|
|||||||
:- attr("variant_value", PackageNode, _, _), not attr("node", PackageNode).
|
:- attr("variant_value", PackageNode, _, _), not attr("node", PackageNode).
|
||||||
:- attr("node_flag_compiler_default", PackageNode), not attr("node", PackageNode).
|
:- attr("node_flag_compiler_default", PackageNode), not attr("node", PackageNode).
|
||||||
:- attr("node_flag", PackageNode, _, _), not attr("node", PackageNode).
|
:- attr("node_flag", PackageNode, _, _), not attr("node", PackageNode).
|
||||||
|
:- attr("no_flags", PackageNode, _), not attr("node", PackageNode).
|
||||||
:- attr("external_spec_selected", PackageNode, _), not attr("node", PackageNode).
|
:- attr("external_spec_selected", PackageNode, _), not attr("node", PackageNode).
|
||||||
:- attr("depends_on", ParentNode, _, _), not attr("node", ParentNode).
|
:- attr("depends_on", ParentNode, _, _), not attr("node", ParentNode).
|
||||||
:- attr("depends_on", _, ChildNode, _), not attr("node", ChildNode).
|
:- attr("depends_on", _, ChildNode, _), not attr("node", ChildNode).
|
||||||
@@ -255,7 +256,6 @@ possible_version_weight(node(ID, Package), Weight)
|
|||||||
:- attr("version", node(ID, Package), Version),
|
:- attr("version", node(ID, Package), Version),
|
||||||
version_weight(node(ID, Package), Weight),
|
version_weight(node(ID, Package), Weight),
|
||||||
not pkg_fact(Package, version_declared(Version, Weight, "installed")),
|
not pkg_fact(Package, version_declared(Version, Weight, "installed")),
|
||||||
not pkg_fact(Package, version_declared(Version, Weight, "installed_git_version")),
|
|
||||||
not build(node(ID, Package)),
|
not build(node(ID, Package)),
|
||||||
internal_error("Build version weight used for reused package").
|
internal_error("Build version weight used for reused package").
|
||||||
|
|
||||||
@@ -611,18 +611,25 @@ do_not_impose(EffectID, node(X, Package))
|
|||||||
% Virtual dependency weights
|
% Virtual dependency weights
|
||||||
%-----------------------------------------------------------------------------
|
%-----------------------------------------------------------------------------
|
||||||
|
|
||||||
% A provider has different possible weights depending on its preference. This rule ensures that
|
% A provider may have different possible weights depending on whether it's an external
|
||||||
|
% or not, or on preferences expressed in packages.yaml etc. This rule ensures that
|
||||||
% we select the weight, among the possible ones, that minimizes the overall objective function.
|
% we select the weight, among the possible ones, that minimizes the overall objective function.
|
||||||
1 { provider_weight(DependencyNode, VirtualNode, Weight) :
|
1 { provider_weight(DependencyNode, VirtualNode, Weight) :
|
||||||
possible_provider_weight(DependencyNode, VirtualNode, Weight, _) } 1
|
possible_provider_weight(DependencyNode, VirtualNode, Weight, _) } 1
|
||||||
:- provider(DependencyNode, VirtualNode), internal_error("Package provider weights must be unique").
|
:- provider(DependencyNode, VirtualNode), internal_error("Package provider weights must be unique").
|
||||||
|
|
||||||
% Any configured provider has a weight based on index in the preference list
|
% A provider that is an external can use a weight of 0
|
||||||
|
possible_provider_weight(DependencyNode, VirtualNode, 0, "external")
|
||||||
|
:- provider(DependencyNode, VirtualNode),
|
||||||
|
external(DependencyNode).
|
||||||
|
|
||||||
|
% A provider mentioned in the default configuration can use a weight
|
||||||
|
% according to its priority in the list of providers
|
||||||
possible_provider_weight(node(ProviderID, Provider), node(VirtualID, Virtual), Weight, "default")
|
possible_provider_weight(node(ProviderID, Provider), node(VirtualID, Virtual), Weight, "default")
|
||||||
:- provider(node(ProviderID, Provider), node(VirtualID, Virtual)),
|
:- provider(node(ProviderID, Provider), node(VirtualID, Virtual)),
|
||||||
default_provider_preference(Virtual, Provider, Weight).
|
default_provider_preference(Virtual, Provider, Weight).
|
||||||
|
|
||||||
% Any non-configured provider has a default weight of 100
|
% Any provider can use 100 as a weight, which is very high and discourage its use
|
||||||
possible_provider_weight(node(ProviderID, Provider), VirtualNode, 100, "fallback")
|
possible_provider_weight(node(ProviderID, Provider), VirtualNode, 100, "fallback")
|
||||||
:- provider(node(ProviderID, Provider), VirtualNode).
|
:- provider(node(ProviderID, Provider), VirtualNode).
|
||||||
|
|
||||||
@@ -804,6 +811,37 @@ node_has_variant(node(ID, Package), Variant) :-
|
|||||||
pkg_fact(Package, variant(Variant)),
|
pkg_fact(Package, variant(Variant)),
|
||||||
attr("node", node(ID, Package)).
|
attr("node", node(ID, Package)).
|
||||||
|
|
||||||
|
% Variant propagation is forwarded to dependencies
|
||||||
|
attr("variant_propagation_candidate", PackageNode, Variant, Value, Source) :-
|
||||||
|
attr("node", PackageNode),
|
||||||
|
depends_on(ParentNode, PackageNode),
|
||||||
|
attr("variant_value", node(_, Source), Variant, Value),
|
||||||
|
attr("variant_propagation_candidate", ParentNode, Variant, _, Source).
|
||||||
|
|
||||||
|
% If the node is a candidate, and it has the variant and value,
|
||||||
|
% then those variant and value should be propagated
|
||||||
|
attr("variant_propagate", node(ID, Package), Variant, Value, Source) :-
|
||||||
|
attr("variant_propagation_candidate", node(ID, Package), Variant, Value, Source),
|
||||||
|
node_has_variant(node(ID, Package), Variant),
|
||||||
|
pkg_fact(Package, variant_possible_value(Variant, Value)),
|
||||||
|
not attr("variant_set", node(ID, Package), Variant).
|
||||||
|
|
||||||
|
% Propagate the value, if there is the corresponding attribute
|
||||||
|
attr("variant_value", PackageNode, Variant, Value) :- attr("variant_propagate", PackageNode, Variant, Value, _).
|
||||||
|
|
||||||
|
% If a variant is propagated, we cannot have extraneous values (this is for multi valued variants)
|
||||||
|
variant_is_propagated(PackageNode, Variant) :- attr("variant_propagate", PackageNode, Variant, _, _).
|
||||||
|
:- variant_is_propagated(PackageNode, Variant),
|
||||||
|
attr("variant_value", PackageNode, Variant, Value),
|
||||||
|
not attr("variant_propagate", PackageNode, Variant, Value, _).
|
||||||
|
|
||||||
|
% Cannot receive different values from different sources on the same variant
|
||||||
|
error(100, "{0} and {1} cannot both propagate variant '{2}' to package {3} with values '{4}' and '{5}'", Source1, Source2, Variant, Package, Value1, Value2) :-
|
||||||
|
attr("variant_propagate", node(X, Package), Variant, Value1, Source1),
|
||||||
|
attr("variant_propagate", node(X, Package), Variant, Value2, Source2),
|
||||||
|
node_has_variant(node(X, Package), Variant),
|
||||||
|
Value1 < Value2, Source1 < Source2.
|
||||||
|
|
||||||
% a variant cannot be set if it is not a variant on the package
|
% a variant cannot be set if it is not a variant on the package
|
||||||
error(100, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
error(100, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
||||||
:- attr("variant_set", node(X, Package), Variant),
|
:- attr("variant_set", node(X, Package), Variant),
|
||||||
@@ -881,7 +919,7 @@ variant_not_default(node(ID, Package), Variant, Value)
|
|||||||
% variants set explicitly on the CLI don't count as non-default
|
% variants set explicitly on the CLI don't count as non-default
|
||||||
not attr("variant_set", node(ID, Package), Variant, Value),
|
not attr("variant_set", node(ID, Package), Variant, Value),
|
||||||
% variant values forced by propagation don't count as non-default
|
% variant values forced by propagation don't count as non-default
|
||||||
not propagate(node(ID, Package), variant_value(Variant, Value)),
|
not attr("variant_propagate", node(ID, Package), Variant, Value, _),
|
||||||
% variants set on externals that we could use don't count as non-default
|
% variants set on externals that we could use don't count as non-default
|
||||||
% this makes spack prefer to use an external over rebuilding with the
|
% this makes spack prefer to use an external over rebuilding with the
|
||||||
% default configuration
|
% default configuration
|
||||||
@@ -894,7 +932,7 @@ variant_default_not_used(node(ID, Package), Variant, Value)
|
|||||||
:- variant_default_value(Package, Variant, Value),
|
:- variant_default_value(Package, Variant, Value),
|
||||||
node_has_variant(node(ID, Package), Variant),
|
node_has_variant(node(ID, Package), Variant),
|
||||||
not attr("variant_value", node(ID, Package), Variant, Value),
|
not attr("variant_value", node(ID, Package), Variant, Value),
|
||||||
not propagate(node(ID, Package), variant_value(Variant, _)),
|
not attr("variant_propagate", node(ID, Package), Variant, _, _),
|
||||||
attr("node", node(ID, Package)).
|
attr("node", node(ID, Package)).
|
||||||
|
|
||||||
% The variant is set in an external spec
|
% The variant is set in an external spec
|
||||||
@@ -951,101 +989,6 @@ pkg_fact(Package, variant_single_value("dev_path"))
|
|||||||
#defined variant_default_value/3.
|
#defined variant_default_value/3.
|
||||||
#defined variant_default_value_from_packages_yaml/3.
|
#defined variant_default_value_from_packages_yaml/3.
|
||||||
|
|
||||||
%-----------------------------------------------------------------------------
|
|
||||||
% Propagation semantics
|
|
||||||
%-----------------------------------------------------------------------------
|
|
||||||
|
|
||||||
% Propagation roots have a corresponding attr("propagate", ...)
|
|
||||||
propagate(RootNode, PropagatedAttribute) :- attr("propagate", RootNode, PropagatedAttribute).
|
|
||||||
propagate(RootNode, PropagatedAttribute, EdgeTypes) :- attr("propagate", RootNode, PropagatedAttribute, EdgeTypes).
|
|
||||||
|
|
||||||
|
|
||||||
% Propagate an attribute along edges to child nodes
|
|
||||||
propagate(ChildNode, PropagatedAttribute) :-
|
|
||||||
propagate(ParentNode, PropagatedAttribute),
|
|
||||||
depends_on(ParentNode, ChildNode).
|
|
||||||
|
|
||||||
propagate(ChildNode, PropagatedAttribute, edge_types(DepType1, DepType2)) :-
|
|
||||||
propagate(ParentNode, PropagatedAttribute, edge_types(DepType1, DepType2)),
|
|
||||||
depends_on(ParentNode, ChildNode),
|
|
||||||
1 { attr("depends_on", ParentNode, ChildNode, DepType1); attr("depends_on", ParentNode, ChildNode, DepType2) }.
|
|
||||||
|
|
||||||
%-----------------------------------------------------------------------------
|
|
||||||
% Activation of propagated values
|
|
||||||
%-----------------------------------------------------------------------------
|
|
||||||
|
|
||||||
%----
|
|
||||||
% Variants
|
|
||||||
%----
|
|
||||||
|
|
||||||
% If a variant is propagated, and can be accepted, set its value
|
|
||||||
attr("variant_value", node(ID, Package), Variant, Value) :-
|
|
||||||
propagate(node(ID, Package), variant_value(Variant, Value)),
|
|
||||||
node_has_variant(node(ID, Package), Variant),
|
|
||||||
pkg_fact(Package, variant_possible_value(Variant, Value)),
|
|
||||||
not attr("variant_set", node(ID, Package), Variant).
|
|
||||||
|
|
||||||
% If a variant is propagated, we cannot have extraneous values
|
|
||||||
variant_is_propagated(PackageNode, Variant) :-
|
|
||||||
attr("variant_value", PackageNode, Variant, Value),
|
|
||||||
propagate(PackageNode, variant_value(Variant, Value)),
|
|
||||||
not attr("variant_set", PackageNode, Variant).
|
|
||||||
|
|
||||||
:- variant_is_propagated(PackageNode, Variant),
|
|
||||||
attr("variant_value", PackageNode, Variant, Value),
|
|
||||||
not propagate(PackageNode, variant_value(Variant, Value)).
|
|
||||||
|
|
||||||
%----
|
|
||||||
% Flags
|
|
||||||
%----
|
|
||||||
|
|
||||||
% A propagated flag implies:
|
|
||||||
% 1. The same flag type is not set on this node
|
|
||||||
% 2. This node has the same compiler as the propagation source
|
|
||||||
|
|
||||||
propagated_flag(node(PackageID, Package), node_flag(FlagType, Flag), SourceNode) :-
|
|
||||||
propagate(node(PackageID, Package), node_flag(FlagType, Flag), _),
|
|
||||||
not attr("node_flag_set", node(PackageID, Package), FlagType, _),
|
|
||||||
% Same compiler as propagation source
|
|
||||||
node_compiler(node(PackageID, Package), CompilerID),
|
|
||||||
node_compiler(SourceNode, CompilerID),
|
|
||||||
attr("propagate", SourceNode, node_flag(FlagType, Flag), _),
|
|
||||||
node(PackageID, Package) != SourceNode,
|
|
||||||
not runtime(Package).
|
|
||||||
|
|
||||||
attr("node_flag", PackageNode, FlagType, Flag) :- propagated_flag(PackageNode, node_flag(FlagType, Flag), _).
|
|
||||||
attr("node_flag_source", PackageNode, FlagType, SourceNode) :- propagated_flag(PackageNode, node_flag(FlagType, _), SourceNode).
|
|
||||||
|
|
||||||
% Cannot propagate the same flag from two distinct sources
|
|
||||||
error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, Package, FlagType) :-
|
|
||||||
propagated_flag(node(ID, Package), node_flag(FlagType, _), node(_, Source1)),
|
|
||||||
propagated_flag(node(ID, Package), node_flag(FlagType, _), node(_, Source2)),
|
|
||||||
Source1 < Source2.
|
|
||||||
|
|
||||||
%----
|
|
||||||
% Compiler constraints
|
|
||||||
%----
|
|
||||||
|
|
||||||
attr("node_compiler_version_satisfies", node(ID, Package), Compiler, Version) :-
|
|
||||||
propagate(node(ID, Package), node_compiler_version_satisfies(Compiler, Version)),
|
|
||||||
node_compiler(node(ID, Package), CompilerID),
|
|
||||||
compiler_name(CompilerID, Compiler),
|
|
||||||
not runtime(Package),
|
|
||||||
not external(Package).
|
|
||||||
|
|
||||||
%-----------------------------------------------------------------------------
|
|
||||||
% Runtimes
|
|
||||||
%-----------------------------------------------------------------------------
|
|
||||||
|
|
||||||
% Check whether the DAG has any built package
|
|
||||||
has_built_packages() :- build(X), not external(X).
|
|
||||||
|
|
||||||
% If we build packages, the runtime nodes must use an available compiler
|
|
||||||
1 { node_compiler(PackageNode, CompilerID) : build(PackageNode), not external(PackageNode) } :-
|
|
||||||
has_built_packages(),
|
|
||||||
runtime(RuntimePackage),
|
|
||||||
node_compiler(node(_, RuntimePackage), CompilerID).
|
|
||||||
|
|
||||||
%-----------------------------------------------------------------------------
|
%-----------------------------------------------------------------------------
|
||||||
% Platform semantics
|
% Platform semantics
|
||||||
%-----------------------------------------------------------------------------
|
%-----------------------------------------------------------------------------
|
||||||
@@ -1147,18 +1090,10 @@ attr("node_target", PackageNode, Target)
|
|||||||
:- attr("node", PackageNode), attr("node_target_set", PackageNode, Target).
|
:- attr("node", PackageNode), attr("node_target_set", PackageNode, Target).
|
||||||
|
|
||||||
% each node has the weight of its assigned target
|
% each node has the weight of its assigned target
|
||||||
target_weight(Target, 0)
|
node_target_weight(node(ID, Package), Weight)
|
||||||
:- attr("node", PackageNode),
|
:- attr("node", node(ID, Package)),
|
||||||
attr("node_target", PackageNode, Target),
|
attr("node_target", node(ID, Package), Target),
|
||||||
attr("node_target_set", PackageNode, Target).
|
target_weight(Target, Weight).
|
||||||
|
|
||||||
node_target_weight(PackageNode, MinWeight)
|
|
||||||
:- attr("node", PackageNode),
|
|
||||||
attr("node_target", PackageNode, Target),
|
|
||||||
target(Target),
|
|
||||||
MinWeight = #min { Weight : target_weight(Target, Weight) }.
|
|
||||||
|
|
||||||
:- attr("node_target", PackageNode, Target), not node_target_weight(PackageNode, _).
|
|
||||||
|
|
||||||
% compatibility rules for targets among nodes
|
% compatibility rules for targets among nodes
|
||||||
node_target_match(ParentNode, DependencyNode)
|
node_target_match(ParentNode, DependencyNode)
|
||||||
@@ -1220,12 +1155,12 @@ error(10, "No valid compiler for {0} satisfies '%{1}'", Package, Compiler)
|
|||||||
|
|
||||||
% If the compiler of a node must satisfy a constraint, then its version
|
% If the compiler of a node must satisfy a constraint, then its version
|
||||||
% must be chosen among the ones that satisfy said constraint
|
% must be chosen among the ones that satisfy said constraint
|
||||||
error(100, "Package {0} cannot satisfy '%{1}@{2}'", Package, Compiler, Constraint)
|
error(100, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
||||||
:- attr("node", node(X, Package)),
|
:- attr("node", node(X, Package)),
|
||||||
attr("node_compiler_version_satisfies", node(X, Package), Compiler, Constraint),
|
attr("node_compiler_version_satisfies", node(X, Package), Compiler, Constraint),
|
||||||
not compiler_version_satisfies(Compiler, Constraint, _).
|
not compiler_version_satisfies(Compiler, Constraint, _).
|
||||||
|
|
||||||
error(100, "Package {0} cannot satisfy '%{1}@{2}'", Package, Compiler, Constraint)
|
error(100, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
||||||
:- attr("node", node(X, Package)),
|
:- attr("node", node(X, Package)),
|
||||||
attr("node_compiler_version_satisfies", node(X, Package), Compiler, Constraint),
|
attr("node_compiler_version_satisfies", node(X, Package), Compiler, Constraint),
|
||||||
not compiler_version_satisfies(Compiler, Constraint, ID),
|
not compiler_version_satisfies(Compiler, Constraint, ID),
|
||||||
@@ -1306,9 +1241,45 @@ error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_miss
|
|||||||
% Compiler flags
|
% Compiler flags
|
||||||
%-----------------------------------------------------------------------------
|
%-----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
% propagate flags when compiler match
|
||||||
|
can_inherit_flags(PackageNode, DependencyNode, FlagType)
|
||||||
|
:- same_compiler(PackageNode, DependencyNode),
|
||||||
|
not attr("node_flag_set", DependencyNode, FlagType, _),
|
||||||
|
flag_type(FlagType).
|
||||||
|
|
||||||
|
same_compiler(PackageNode, DependencyNode)
|
||||||
|
:- depends_on(PackageNode, DependencyNode),
|
||||||
|
node_compiler(PackageNode, CompilerID),
|
||||||
|
node_compiler(DependencyNode, CompilerID),
|
||||||
|
compiler_id(CompilerID).
|
||||||
|
|
||||||
|
node_flag_inherited(DependencyNode, FlagType, Flag)
|
||||||
|
:- attr("node_flag_set", PackageNode, FlagType, Flag),
|
||||||
|
can_inherit_flags(PackageNode, DependencyNode, FlagType),
|
||||||
|
attr("node_flag_propagate", PackageNode, FlagType).
|
||||||
|
|
||||||
|
% Ensure propagation
|
||||||
|
:- node_flag_inherited(PackageNode, FlagType, Flag),
|
||||||
|
can_inherit_flags(PackageNode, DependencyNode, FlagType),
|
||||||
|
attr("node_flag_propagate", PackageNode, FlagType).
|
||||||
|
|
||||||
|
error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, Package, FlagType) :-
|
||||||
|
depends_on(Source1, Package),
|
||||||
|
depends_on(Source2, Package),
|
||||||
|
attr("node_flag_propagate", Source1, FlagType),
|
||||||
|
attr("node_flag_propagate", Source2, FlagType),
|
||||||
|
can_inherit_flags(Source1, Package, FlagType),
|
||||||
|
can_inherit_flags(Source2, Package, FlagType),
|
||||||
|
Source1 < Source2.
|
||||||
|
|
||||||
% remember where flags came from
|
% remember where flags came from
|
||||||
attr("node_flag_source", PackageNode, FlagType, PackageNode) :- attr("node_flag_set", PackageNode, FlagType, _).
|
attr("node_flag_source", PackageNode, FlagType, PackageNode)
|
||||||
attr("node_flag_source", PackageNode, FlagType, PackageNode) :- attr("node_flag", PackageNode, FlagType, _), attr("hash", PackageNode, _).
|
:- attr("node_flag_set", PackageNode, FlagType, _).
|
||||||
|
|
||||||
|
attr("node_flag_source", DependencyNode, FlagType, Q)
|
||||||
|
:- attr("node_flag_source", PackageNode, FlagType, Q),
|
||||||
|
node_flag_inherited(DependencyNode, FlagType, _),
|
||||||
|
attr("node_flag_propagate", PackageNode, FlagType).
|
||||||
|
|
||||||
% compiler flags from compilers.yaml are put on nodes if compiler matches
|
% compiler flags from compilers.yaml are put on nodes if compiler matches
|
||||||
attr("node_flag", PackageNode, FlagType, Flag)
|
attr("node_flag", PackageNode, FlagType, Flag)
|
||||||
@@ -1328,8 +1299,15 @@ attr("node_flag_compiler_default", PackageNode)
|
|||||||
compiler_name(CompilerID, CompilerName),
|
compiler_name(CompilerID, CompilerName),
|
||||||
compiler_version(CompilerID, Version).
|
compiler_version(CompilerID, Version).
|
||||||
|
|
||||||
% Flag set to something
|
% if a flag is set to something or inherited, it's included
|
||||||
attr("node_flag", PackageNode, FlagType, Flag) :- attr("node_flag_set", PackageNode, FlagType, Flag).
|
attr("node_flag", PackageNode, FlagType, Flag) :- attr("node_flag_set", PackageNode, FlagType, Flag).
|
||||||
|
attr("node_flag", PackageNode, FlagType, Flag) :- node_flag_inherited(PackageNode, FlagType, Flag).
|
||||||
|
|
||||||
|
% if no node flags are set for a type, there are no flags.
|
||||||
|
attr("no_flags", PackageNode, FlagType)
|
||||||
|
:- not attr("node_flag", PackageNode, FlagType, _),
|
||||||
|
attr("node", PackageNode),
|
||||||
|
flag_type(FlagType).
|
||||||
|
|
||||||
#defined compiler_flag/3.
|
#defined compiler_flag/3.
|
||||||
|
|
||||||
@@ -1518,7 +1496,7 @@ opt_criterion(45, "preferred providers (non-roots)").
|
|||||||
}.
|
}.
|
||||||
|
|
||||||
% Try to minimize the number of compiler mismatches in the DAG.
|
% Try to minimize the number of compiler mismatches in the DAG.
|
||||||
opt_criterion(40, "compiler mismatches that are not required").
|
opt_criterion(40, "compiler mismatches that are not from CLI").
|
||||||
#minimize{ 0@240: #true }.
|
#minimize{ 0@240: #true }.
|
||||||
#minimize{ 0@40: #true }.
|
#minimize{ 0@40: #true }.
|
||||||
#minimize{
|
#minimize{
|
||||||
@@ -1528,7 +1506,7 @@ opt_criterion(40, "compiler mismatches that are not required").
|
|||||||
not runtime(Dependency)
|
not runtime(Dependency)
|
||||||
}.
|
}.
|
||||||
|
|
||||||
opt_criterion(39, "compiler mismatches that are required").
|
opt_criterion(39, "compiler mismatches that are not from CLI").
|
||||||
#minimize{ 0@239: #true }.
|
#minimize{ 0@239: #true }.
|
||||||
#minimize{ 0@39: #true }.
|
#minimize{ 0@39: #true }.
|
||||||
#minimize{
|
#minimize{
|
||||||
|
@@ -4,35 +4,21 @@
|
|||||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
%=============================================================================
|
%=============================================================================
|
||||||
% Heuristic to speed-up solves
|
% Heuristic to speed-up solves (node with ID 0)
|
||||||
%=============================================================================
|
%=============================================================================
|
||||||
|
|
||||||
% No duplicates by default (most of them will be true)
|
|
||||||
#heuristic attr("node", node(PackageID, Package)). [100, init]
|
|
||||||
#heuristic attr("node", node(PackageID, Package)). [ 2, factor]
|
|
||||||
#heuristic attr("virtual_node", node(VirtualID, Virtual)). [100, init]
|
|
||||||
#heuristic attr("node", node(1..X-1, Package)) : max_dupes(Package, X), not virtual(Package), X > 1. [-1, sign]
|
|
||||||
#heuristic attr("virtual_node", node(1..X-1, Package)) : max_dupes(Package, X), virtual(Package) , X > 1. [-1, sign]
|
|
||||||
|
|
||||||
% Pick preferred version
|
%-----------------
|
||||||
#heuristic attr("version", node(PackageID, Package), Version) : pkg_fact(Package, version_declared(Version, Weight)), attr("node", node(PackageID, Package)). [40, init]
|
% Domain heuristic
|
||||||
#heuristic version_weight(node(PackageID, Package), 0) : pkg_fact(Package, version_declared(Version, 0 )), attr("node", node(PackageID, Package)). [ 1, sign]
|
%-----------------
|
||||||
#heuristic attr("version", node(PackageID, Package), Version) : pkg_fact(Package, version_declared(Version, 0 )), attr("node", node(PackageID, Package)). [ 1, sign]
|
|
||||||
#heuristic attr("version", node(PackageID, Package), Version) : pkg_fact(Package, version_declared(Version, Weight)), attr("node", node(PackageID, Package)), Weight > 0. [-1, sign]
|
|
||||||
|
|
||||||
% Use default variants
|
% Root node
|
||||||
#heuristic attr("variant_value", node(PackageID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(PackageID, Package)). [40, true]
|
#heuristic attr("version", node(0, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
||||||
#heuristic attr("variant_value", node(PackageID, Package), Variant, Value) : not variant_default_value(Package, Variant, Value), attr("node", node(PackageID, Package)). [40, false]
|
#heuristic version_weight(node(0, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
||||||
|
#heuristic attr("variant_value", node(0, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("root", node(0, Package)). [35, true]
|
||||||
|
#heuristic attr("node_target", node(0, Package), Target) : target_weight(Target, 0), attr("root", node(0, Package)). [35, true]
|
||||||
|
#heuristic node_target_weight(node(0, Package), 0) : attr("root", node(0, Package)). [35, true]
|
||||||
|
#heuristic node_compiler(node(0, Package), CompilerID) : compiler_weight(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true]
|
||||||
|
|
||||||
% Use default operating system and platform
|
% Providers
|
||||||
#heuristic attr("node_os", node(PackageID, Package), OS) : os(OS, 0), attr("root", node(PackageID, Package)). [40, true]
|
#heuristic attr("node", node(0, Package)) : default_provider_preference(Virtual, Package, 0), possible_in_link_run(Package). [30, true]
|
||||||
#heuristic attr("node_platform", node(PackageID, Package), Platform) : allowed_platform(Platform), attr("root", node(PackageID, Package)). [40, true]
|
|
||||||
|
|
||||||
% Use default targets
|
|
||||||
#heuristic attr("node_target", node(PackageID, Package), Target) : target_weight(Target, Weight), attr("node", node(PackageID, Package)). [30, init]
|
|
||||||
#heuristic attr("node_target", node(PackageID, Package), Target) : target_weight(Target, Weight), attr("node", node(PackageID, Package)). [ 2, factor]
|
|
||||||
#heuristic attr("node_target", node(PackageID, Package), Target) : target_weight(Target, 0), attr("node", node(PackageID, Package)). [ 1, sign]
|
|
||||||
#heuristic attr("node_target", node(PackageID, Package), Target) : target_weight(Target, Weight), attr("node", node(PackageID, Package)), Weight > 0. [-1, sign]
|
|
||||||
|
|
||||||
% Use the default compilers
|
|
||||||
#heuristic node_compiler(node(PackageID, Package), ID) : compiler_weight(ID, 0), compiler_id(ID), attr("node", node(PackageID, Package)). [30, init]
|
|
||||||
|
24
lib/spack/spack/solver/heuristic_separate.lp
Normal file
24
lib/spack/spack/solver/heuristic_separate.lp
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
% Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||||
|
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
%
|
||||||
|
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
%=============================================================================
|
||||||
|
% Heuristic to speed-up solves (node with ID > 0)
|
||||||
|
%=============================================================================
|
||||||
|
|
||||||
|
% node(ID, _)
|
||||||
|
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||||
|
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||||
|
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||||
|
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||||
|
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||||
|
#heuristic node_compiler(node(ID, Package), CompilerID) : compiler_weight(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
||||||
|
|
||||||
|
% node(ID, _), split build dependencies
|
||||||
|
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||||
|
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||||
|
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||||
|
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||||
|
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
||||||
|
#heuristic node_compiler(node(ID, Package), CompilerID) : compiler_weight(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
@@ -18,6 +18,9 @@ error(100, "Cannot reuse {0} since we cannot determine libc compatibility", Reus
|
|||||||
ReusedPackage != LibcPackage,
|
ReusedPackage != LibcPackage,
|
||||||
not attr("compatible_libc", node(R, ReusedPackage), LibcPackage, LibcVersion).
|
not attr("compatible_libc", node(R, ReusedPackage), LibcPackage, LibcVersion).
|
||||||
|
|
||||||
|
% Check whether the DAG has any built package
|
||||||
|
has_built_packages() :- build(X), not external(X).
|
||||||
|
|
||||||
% A libc is needed in the DAG
|
% A libc is needed in the DAG
|
||||||
:- has_built_packages(), not provider(_, node(0, "libc")).
|
:- has_built_packages(), not provider(_, node(0, "libc")).
|
||||||
|
|
||||||
|
@@ -12,7 +12,6 @@
|
|||||||
%=============================================================================
|
%=============================================================================
|
||||||
|
|
||||||
% macOS
|
% macOS
|
||||||
os_compatible("sequoia", "sonoma").
|
|
||||||
os_compatible("sonoma", "ventura").
|
os_compatible("sonoma", "ventura").
|
||||||
os_compatible("ventura", "monterey").
|
os_compatible("ventura", "monterey").
|
||||||
os_compatible("monterey", "bigsur").
|
os_compatible("monterey", "bigsur").
|
||||||
|
@@ -1287,104 +1287,6 @@ def copy(self, *args, **kwargs):
|
|||||||
return self.wrapped_obj.copy(*args, **kwargs)
|
return self.wrapped_obj.copy(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def tree(
|
|
||||||
specs: List["spack.spec.Spec"],
|
|
||||||
*,
|
|
||||||
color: Optional[bool] = None,
|
|
||||||
depth: bool = False,
|
|
||||||
hashes: bool = False,
|
|
||||||
hashlen: Optional[int] = None,
|
|
||||||
cover: str = "nodes",
|
|
||||||
indent: int = 0,
|
|
||||||
format: str = DEFAULT_FORMAT,
|
|
||||||
deptypes: Union[Tuple[str, ...], str] = "all",
|
|
||||||
show_types: bool = False,
|
|
||||||
depth_first: bool = False,
|
|
||||||
recurse_dependencies: bool = True,
|
|
||||||
status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
|
|
||||||
prefix: Optional[Callable[["Spec"], str]] = None,
|
|
||||||
key=id,
|
|
||||||
) -> str:
|
|
||||||
"""Prints out specs and their dependencies, tree-formatted with indentation.
|
|
||||||
|
|
||||||
Status function may either output a boolean or an InstallStatus
|
|
||||||
|
|
||||||
Args:
|
|
||||||
color: if True, always colorize the tree. If False, don't colorize the tree. If None,
|
|
||||||
use the default from llnl.tty.color
|
|
||||||
depth: print the depth from the root
|
|
||||||
hashes: if True, print the hash of each node
|
|
||||||
hashlen: length of the hash to be printed
|
|
||||||
cover: either "nodes" or "edges"
|
|
||||||
indent: extra indentation for the tree being printed
|
|
||||||
format: format to be used to print each node
|
|
||||||
deptypes: dependency types to be represented in the tree
|
|
||||||
show_types: if True, show the (merged) dependency type of a node
|
|
||||||
depth_first: if True, traverse the DAG depth first when representing it as a tree
|
|
||||||
recurse_dependencies: if True, recurse on dependencies
|
|
||||||
status_fn: optional callable that takes a node as an argument and return its
|
|
||||||
installation status
|
|
||||||
prefix: optional callable that takes a node as an argument and return its
|
|
||||||
installation prefix
|
|
||||||
"""
|
|
||||||
out = ""
|
|
||||||
|
|
||||||
if color is None:
|
|
||||||
color = clr.get_color_when()
|
|
||||||
|
|
||||||
# reduce deptypes over all in-edges when covering nodes
|
|
||||||
if show_types and cover == "nodes":
|
|
||||||
deptype_lookup: Dict[str, dt.DepFlag] = collections.defaultdict(dt.DepFlag)
|
|
||||||
for edge in traverse.traverse_edges(specs, cover="edges", deptype=deptypes, root=False):
|
|
||||||
deptype_lookup[edge.spec.dag_hash()] |= edge.depflag
|
|
||||||
|
|
||||||
for d, dep_spec in traverse.traverse_tree(
|
|
||||||
sorted(specs), cover=cover, deptype=deptypes, depth_first=depth_first, key=key
|
|
||||||
):
|
|
||||||
node = dep_spec.spec
|
|
||||||
|
|
||||||
if prefix is not None:
|
|
||||||
out += prefix(node)
|
|
||||||
out += " " * indent
|
|
||||||
|
|
||||||
if depth:
|
|
||||||
out += "%-4d" % d
|
|
||||||
|
|
||||||
if status_fn:
|
|
||||||
status = status_fn(node)
|
|
||||||
if status in list(InstallStatus):
|
|
||||||
out += clr.colorize(status.value, color=color)
|
|
||||||
elif status:
|
|
||||||
out += clr.colorize("@g{[+]} ", color=color)
|
|
||||||
else:
|
|
||||||
out += clr.colorize("@r{[-]} ", color=color)
|
|
||||||
|
|
||||||
if hashes:
|
|
||||||
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hashlen)
|
|
||||||
|
|
||||||
if show_types:
|
|
||||||
if cover == "nodes":
|
|
||||||
depflag = deptype_lookup[dep_spec.spec.dag_hash()]
|
|
||||||
else:
|
|
||||||
# when covering edges or paths, we show dependency
|
|
||||||
# types only for the edge through which we visited
|
|
||||||
depflag = dep_spec.depflag
|
|
||||||
|
|
||||||
type_chars = dt.flag_to_chars(depflag)
|
|
||||||
out += "[%s] " % type_chars
|
|
||||||
|
|
||||||
out += " " * d
|
|
||||||
if d > 0:
|
|
||||||
out += "^"
|
|
||||||
out += node.format(format, color=color) + "\n"
|
|
||||||
|
|
||||||
# Check if we wanted just the first line
|
|
||||||
if not recurse_dependencies:
|
|
||||||
break
|
|
||||||
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
@lang.lazy_lexicographic_ordering(set_hash=False)
|
@lang.lazy_lexicographic_ordering(set_hash=False)
|
||||||
class Spec:
|
class Spec:
|
||||||
#: Cache for spec's prefix, computed lazily in the corresponding property
|
#: Cache for spec's prefix, computed lazily in the corresponding property
|
||||||
@@ -4260,21 +4162,29 @@ def __getitem__(self, name: str):
|
|||||||
csv = query_parameters.pop().strip()
|
csv = query_parameters.pop().strip()
|
||||||
query_parameters = re.split(r"\s*,\s*", csv)
|
query_parameters = re.split(r"\s*,\s*", csv)
|
||||||
|
|
||||||
|
# In some cases a package appears multiple times in the same DAG for *distinct*
|
||||||
|
# specs. For example, a build-type dependency may itself depend on a package
|
||||||
|
# the current spec depends on, but their specs may differ. Therefore we iterate
|
||||||
|
# in an order here that prioritizes the build, test and runtime dependencies;
|
||||||
|
# only when we don't find the package do we consider the full DAG.
|
||||||
order = lambda: itertools.chain(
|
order = lambda: itertools.chain(
|
||||||
self.traverse_edges(deptype=dt.LINK, order="breadth", cover="edges"),
|
self.traverse(deptype="link"),
|
||||||
self.edges_to_dependencies(depflag=dt.BUILD | dt.RUN | dt.TEST),
|
self.dependencies(deptype=dt.BUILD | dt.RUN | dt.TEST),
|
||||||
self.traverse_edges(deptype=dt.ALL, order="breadth", cover="edges"),
|
self.traverse(), # fall back to a full search
|
||||||
)
|
)
|
||||||
|
|
||||||
# Consider runtime dependencies and direct build/test deps before transitive dependencies,
|
|
||||||
# and prefer matches closest to the root.
|
|
||||||
try:
|
try:
|
||||||
child: Spec = next(
|
child: Spec = next(
|
||||||
e.spec
|
itertools.chain(
|
||||||
for e in itertools.chain(
|
# Regular specs
|
||||||
(e for e in order() if e.spec.name == name or name in e.virtuals),
|
(x for x in order() if x.name == name),
|
||||||
# for historical reasons
|
(
|
||||||
(e for e in order() if e.spec.concrete and e.spec.package.provides(name)),
|
x
|
||||||
|
for x in order()
|
||||||
|
if (not x.virtual)
|
||||||
|
and any(name in edge.virtuals for edge in x.edges_from_dependents())
|
||||||
|
),
|
||||||
|
(x for x in order() if (not x.virtual) and x.package.provides(name)),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
@@ -4650,7 +4560,7 @@ def colored_str(self):
|
|||||||
spec_str = " ^".join(root_str + sorted_dependencies)
|
spec_str = " ^".join(root_str + sorted_dependencies)
|
||||||
return spec_str.strip()
|
return spec_str.strip()
|
||||||
|
|
||||||
def install_status(self) -> InstallStatus:
|
def install_status(self):
|
||||||
"""Helper for tree to print DB install status."""
|
"""Helper for tree to print DB install status."""
|
||||||
if not self.concrete:
|
if not self.concrete:
|
||||||
return InstallStatus.absent
|
return InstallStatus.absent
|
||||||
@@ -4694,14 +4604,13 @@ def tree(
|
|||||||
recurse_dependencies: bool = True,
|
recurse_dependencies: bool = True,
|
||||||
status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
|
status_fn: Optional[Callable[["Spec"], InstallStatus]] = None,
|
||||||
prefix: Optional[Callable[["Spec"], str]] = None,
|
prefix: Optional[Callable[["Spec"], str]] = None,
|
||||||
key=id,
|
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Prints out this spec and its dependencies, tree-formatted with indentation.
|
"""Prints out this spec and its dependencies, tree-formatted
|
||||||
|
with indentation.
|
||||||
|
|
||||||
See multi-spec ``spack.spec.tree()`` function for details.
|
Status function may either output a boolean or an InstallStatus
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
specs: List of specs to format.
|
|
||||||
color: if True, always colorize the tree. If False, don't colorize the tree. If None,
|
color: if True, always colorize the tree. If False, don't colorize the tree. If None,
|
||||||
use the default from llnl.tty.color
|
use the default from llnl.tty.color
|
||||||
depth: print the depth from the root
|
depth: print the depth from the root
|
||||||
@@ -4719,23 +4628,60 @@ def tree(
|
|||||||
prefix: optional callable that takes a node as an argument and return its
|
prefix: optional callable that takes a node as an argument and return its
|
||||||
installation prefix
|
installation prefix
|
||||||
"""
|
"""
|
||||||
return tree(
|
out = ""
|
||||||
[self],
|
|
||||||
color=color,
|
if color is None:
|
||||||
depth=depth,
|
color = clr.get_color_when()
|
||||||
hashes=hashes,
|
|
||||||
hashlen=hashlen,
|
for d, dep_spec in traverse.traverse_tree(
|
||||||
cover=cover,
|
[self], cover=cover, deptype=deptypes, depth_first=depth_first
|
||||||
indent=indent,
|
):
|
||||||
format=format,
|
node = dep_spec.spec
|
||||||
deptypes=deptypes,
|
|
||||||
show_types=show_types,
|
if prefix is not None:
|
||||||
depth_first=depth_first,
|
out += prefix(node)
|
||||||
recurse_dependencies=recurse_dependencies,
|
out += " " * indent
|
||||||
status_fn=status_fn,
|
|
||||||
prefix=prefix,
|
if depth:
|
||||||
key=key,
|
out += "%-4d" % d
|
||||||
)
|
|
||||||
|
if status_fn:
|
||||||
|
status = status_fn(node)
|
||||||
|
if status in list(InstallStatus):
|
||||||
|
out += clr.colorize(status.value, color=color)
|
||||||
|
elif status:
|
||||||
|
out += clr.colorize("@g{[+]} ", color=color)
|
||||||
|
else:
|
||||||
|
out += clr.colorize("@r{[-]} ", color=color)
|
||||||
|
|
||||||
|
if hashes:
|
||||||
|
out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hashlen)
|
||||||
|
|
||||||
|
if show_types:
|
||||||
|
if cover == "nodes":
|
||||||
|
# when only covering nodes, we merge dependency types
|
||||||
|
# from all dependents before showing them.
|
||||||
|
depflag = 0
|
||||||
|
for ds in node.edges_from_dependents():
|
||||||
|
depflag |= ds.depflag
|
||||||
|
else:
|
||||||
|
# when covering edges or paths, we show dependency
|
||||||
|
# types only for the edge through which we visited
|
||||||
|
depflag = dep_spec.depflag
|
||||||
|
|
||||||
|
type_chars = dt.flag_to_chars(depflag)
|
||||||
|
out += "[%s] " % type_chars
|
||||||
|
|
||||||
|
out += " " * d
|
||||||
|
if d > 0:
|
||||||
|
out += "^"
|
||||||
|
out += node.format(format, color=color) + "\n"
|
||||||
|
|
||||||
|
# Check if we wanted just the first line
|
||||||
|
if not recurse_dependencies:
|
||||||
|
break
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return str(self)
|
return str(self)
|
||||||
|
@@ -212,7 +212,10 @@ def _expand_matrix_constraints(matrix_config):
|
|||||||
results = []
|
results = []
|
||||||
for combo in itertools.product(*expanded_rows):
|
for combo in itertools.product(*expanded_rows):
|
||||||
# Construct a combined spec to test against excludes
|
# Construct a combined spec to test against excludes
|
||||||
flat_combo = [Spec(constraint) for constraints in combo for constraint in constraints]
|
flat_combo = [constraint for constraint_list in combo for constraint in constraint_list]
|
||||||
|
|
||||||
|
# Resolve abstract hashes so we can exclude by their concrete properties
|
||||||
|
flat_combo = [Spec(x).lookup_hash() for x in flat_combo]
|
||||||
|
|
||||||
test_spec = flat_combo[0].copy()
|
test_spec = flat_combo[0].copy()
|
||||||
for constraint in flat_combo[1:]:
|
for constraint in flat_combo[1:]:
|
||||||
@@ -228,9 +231,7 @@ def _expand_matrix_constraints(matrix_config):
|
|||||||
spack.variant.substitute_abstract_variants(test_spec)
|
spack.variant.substitute_abstract_variants(test_spec)
|
||||||
except spack.variant.UnknownVariantError:
|
except spack.variant.UnknownVariantError:
|
||||||
pass
|
pass
|
||||||
|
if any(test_spec.satisfies(x) for x in excludes):
|
||||||
# Resolve abstract hashes for exclusion criteria
|
|
||||||
if any(test_spec.lookup_hash().satisfies(x) for x in excludes):
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if sigil:
|
if sigil:
|
||||||
|
@@ -346,6 +346,8 @@ class Stage(LockableStagingDir):
|
|||||||
similar, and are intended to persist for only one run of spack.
|
similar, and are intended to persist for only one run of spack.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
#: Most staging is managed by Spack. DIYStage is one exception.
|
||||||
|
needs_fetching = True
|
||||||
requires_patch_success = True
|
requires_patch_success = True
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -770,6 +772,8 @@ def __init__(self):
|
|||||||
"cache_mirror",
|
"cache_mirror",
|
||||||
"steal_source",
|
"steal_source",
|
||||||
"disable_mirrors",
|
"disable_mirrors",
|
||||||
|
"needs_fetching",
|
||||||
|
"requires_patch_success",
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -808,10 +812,6 @@ def path(self):
|
|||||||
def archive_file(self):
|
def archive_file(self):
|
||||||
return self[0].archive_file
|
return self[0].archive_file
|
||||||
|
|
||||||
@property
|
|
||||||
def requires_patch_success(self):
|
|
||||||
return self[0].requires_patch_success
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def keep(self):
|
def keep(self):
|
||||||
return self[0].keep
|
return self[0].keep
|
||||||
@@ -822,7 +822,64 @@ def keep(self, value):
|
|||||||
item.keep = value
|
item.keep = value
|
||||||
|
|
||||||
|
|
||||||
|
class DIYStage:
|
||||||
|
"""
|
||||||
|
Simple class that allows any directory to be a spack stage. Consequently,
|
||||||
|
it does not expect or require that the source path adhere to the standard
|
||||||
|
directory naming convention.
|
||||||
|
"""
|
||||||
|
|
||||||
|
needs_fetching = False
|
||||||
|
requires_patch_success = False
|
||||||
|
|
||||||
|
def __init__(self, path):
|
||||||
|
if path is None:
|
||||||
|
raise ValueError("Cannot construct DIYStage without a path.")
|
||||||
|
elif not os.path.isdir(path):
|
||||||
|
raise StagePathError("The stage path directory does not exist:", path)
|
||||||
|
|
||||||
|
self.archive_file = None
|
||||||
|
self.path = path
|
||||||
|
self.source_path = path
|
||||||
|
self.created = True
|
||||||
|
|
||||||
|
# DIY stages do nothing as context managers.
|
||||||
|
def __enter__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def fetch(self, *args, **kwargs):
|
||||||
|
tty.debug("No need to fetch for DIY.")
|
||||||
|
|
||||||
|
def check(self):
|
||||||
|
tty.debug("No checksum needed for DIY.")
|
||||||
|
|
||||||
|
def expand_archive(self):
|
||||||
|
tty.debug("Using source directory: {0}".format(self.source_path))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def expanded(self):
|
||||||
|
"""Returns True since the source_path must exist."""
|
||||||
|
return True
|
||||||
|
|
||||||
|
def restage(self):
|
||||||
|
raise RestageError("Cannot restage a DIY stage.")
|
||||||
|
|
||||||
|
def create(self):
|
||||||
|
self.created = True
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
# No need to destroy DIY stage.
|
||||||
|
pass
|
||||||
|
|
||||||
|
def cache_local(self):
|
||||||
|
tty.debug("Sources for DIY stages are not cached")
|
||||||
|
|
||||||
|
|
||||||
class DevelopStage(LockableStagingDir):
|
class DevelopStage(LockableStagingDir):
|
||||||
|
needs_fetching = False
|
||||||
requires_patch_success = False
|
requires_patch_success = False
|
||||||
|
|
||||||
def __init__(self, name, dev_path, reference_link):
|
def __init__(self, name, dev_path, reference_link):
|
||||||
|
@@ -371,6 +371,7 @@ def use_store(
|
|||||||
data.update(extra_data)
|
data.update(extra_data)
|
||||||
|
|
||||||
# Swap the store with the one just constructed and return it
|
# Swap the store with the one just constructed and return it
|
||||||
|
ensure_singleton_created()
|
||||||
spack.config.CONFIG.push_scope(
|
spack.config.CONFIG.push_scope(
|
||||||
spack.config.InternalConfigScope(name=scope_name, data={"config": {"install_tree": data}})
|
spack.config.InternalConfigScope(name=scope_name, data={"config": {"install_tree": data}})
|
||||||
)
|
)
|
||||||
|
@@ -79,11 +79,9 @@ def restore(self):
|
|||||||
self.test_state.restore()
|
self.test_state.restore()
|
||||||
spack.main.spack_working_dir = self.spack_working_dir
|
spack.main.spack_working_dir = self.spack_working_dir
|
||||||
env = pickle.load(self.serialized_env) if _SERIALIZE else self.env
|
env = pickle.load(self.serialized_env) if _SERIALIZE else self.env
|
||||||
|
pkg = pickle.load(self.serialized_pkg) if _SERIALIZE else self.pkg
|
||||||
if env:
|
if env:
|
||||||
spack.environment.activate(env)
|
spack.environment.activate(env)
|
||||||
# Order of operation is important, since the package might be retrieved
|
|
||||||
# from a repo defined within the environment configuration
|
|
||||||
pkg = pickle.load(self.serialized_pkg) if _SERIALIZE else self.pkg
|
|
||||||
return pkg
|
return pkg
|
||||||
|
|
||||||
|
|
||||||
|
@@ -213,9 +213,7 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
|
|||||||
str(archspec.cpu.host().family) != "x86_64", reason="tests are for x86_64 uarch ranges"
|
str(archspec.cpu.host().family) != "x86_64", reason="tests are for x86_64 uarch ranges"
|
||||||
)
|
)
|
||||||
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
||||||
spec = Spec(
|
spec = Spec(f"a %gcc@10 foobar=bar target={root_target_range} ^b target={dep_target_range}")
|
||||||
f"pkg-a %gcc@10 foobar=bar target={root_target_range} ^pkg-b target={dep_target_range}"
|
|
||||||
)
|
|
||||||
with spack.concretize.disable_compiler_existence_check():
|
with spack.concretize.disable_compiler_existence_check():
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
assert spec.target == spec["pkg-b"].target == result
|
assert spec.target == spec["b"].target == result
|
||||||
|
@@ -105,18 +105,18 @@ def config_directory(tmpdir_factory):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
def default_config(tmpdir, config_directory, monkeypatch, install_mockery):
|
def default_config(tmpdir, config_directory, monkeypatch, install_mockery_mutable_config):
|
||||||
# This fixture depends on install_mockery to ensure
|
# This fixture depends on install_mockery_mutable_config to ensure
|
||||||
# there is a clear order of initialization. The substitution of the
|
# there is a clear order of initialization. The substitution of the
|
||||||
# config scopes here is done on top of the substitution that comes with
|
# config scopes here is done on top of the substitution that comes with
|
||||||
# install_mockery
|
# install_mockery_mutable_config
|
||||||
mutable_dir = tmpdir.mkdir("mutable_config").join("tmp")
|
mutable_dir = tmpdir.mkdir("mutable_config").join("tmp")
|
||||||
config_directory.copy(mutable_dir)
|
config_directory.copy(mutable_dir)
|
||||||
|
|
||||||
cfg = spack.config.Configuration(
|
cfg = spack.config.Configuration(
|
||||||
*[
|
*[
|
||||||
spack.config.DirectoryConfigScope(name, str(mutable_dir))
|
spack.config.ConfigScope(name, str(mutable_dir))
|
||||||
for name in [f"site/{platform.system().lower()}", "site", "user"]
|
for name in ["site/%s" % platform.system().lower(), "site", "user"]
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -398,7 +398,9 @@ def fake_dag_hash(spec, length=None):
|
|||||||
return "tal4c7h4z0gqmixb1eqa92mjoybxn5l6"[:length]
|
return "tal4c7h4z0gqmixb1eqa92mjoybxn5l6"[:length]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("install_mockery", "mock_packages", "mock_fetch", "test_mirror")
|
@pytest.mark.usefixtures(
|
||||||
|
"install_mockery_mutable_config", "mock_packages", "mock_fetch", "test_mirror"
|
||||||
|
)
|
||||||
def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
||||||
"""Make sure needs_rebuild properly compares remote hash
|
"""Make sure needs_rebuild properly compares remote hash
|
||||||
against locally computed one, avoiding unnecessary rebuilds"""
|
against locally computed one, avoiding unnecessary rebuilds"""
|
||||||
@@ -427,7 +429,7 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
|||||||
assert rebuild
|
assert rebuild
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("install_mockery", "mock_packages", "mock_fetch")
|
@pytest.mark.usefixtures("install_mockery_mutable_config", "mock_packages", "mock_fetch")
|
||||||
def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||||
"""Ensure spack buildcache index only reports available packages"""
|
"""Ensure spack buildcache index only reports available packages"""
|
||||||
|
|
||||||
@@ -585,7 +587,9 @@ def test_update_sbang(tmpdir, test_mirror):
|
|||||||
str(archspec.cpu.host().family) != "x86_64",
|
str(archspec.cpu.host().family) != "x86_64",
|
||||||
reason="test data uses gcc 4.5.0 which does not support aarch64",
|
reason="test data uses gcc 4.5.0 which does not support aarch64",
|
||||||
)
|
)
|
||||||
def test_install_legacy_buildcache_layout(mutable_config, compiler_factory, install_mockery):
|
def test_install_legacy_buildcache_layout(
|
||||||
|
mutable_config, compiler_factory, install_mockery_mutable_config
|
||||||
|
):
|
||||||
"""Legacy buildcache layout involved a nested archive structure
|
"""Legacy buildcache layout involved a nested archive structure
|
||||||
where the .spack file contained a repeated spec.json and another
|
where the .spack file contained a repeated spec.json and another
|
||||||
compressed archive file containing the install tree. This test
|
compressed archive file containing the install tree. This test
|
||||||
|
@@ -228,25 +228,3 @@ def test_source_is_disabled(mutable_config):
|
|||||||
spack.config.add("bootstrap:trusted:{0}:{1}".format(conf["name"], False))
|
spack.config.add("bootstrap:trusted:{0}:{1}".format(conf["name"], False))
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
spack.bootstrap.core.source_is_enabled_or_raise(conf)
|
spack.bootstrap.core.source_is_enabled_or_raise(conf)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.regression("45247")
|
|
||||||
def test_use_store_does_not_try_writing_outside_root(tmp_path, monkeypatch, mutable_config):
|
|
||||||
"""Tests that when we use the 'use_store' context manager, there is no attempt at creating
|
|
||||||
a Store outside the given root.
|
|
||||||
"""
|
|
||||||
initial_store = mutable_config.get("config:install_tree:root")
|
|
||||||
user_store = tmp_path / "store"
|
|
||||||
|
|
||||||
fn = spack.store.Store.__init__
|
|
||||||
|
|
||||||
def _checked_init(self, root, *args, **kwargs):
|
|
||||||
fn(self, root, *args, **kwargs)
|
|
||||||
assert self.root == str(user_store)
|
|
||||||
|
|
||||||
monkeypatch.setattr(spack.store.Store, "__init__", _checked_init)
|
|
||||||
|
|
||||||
spack.store.reinitialize()
|
|
||||||
with spack.store.use_store(user_store):
|
|
||||||
assert spack.config.CONFIG.get("config:install_tree:root") == str(user_store)
|
|
||||||
assert spack.config.CONFIG.get("config:install_tree:root") == initial_store
|
|
||||||
|
@@ -177,7 +177,7 @@ def _set_wrong_cc(x):
|
|||||||
|
|
||||||
|
|
||||||
def test_setup_dependent_package_inherited_modules(
|
def test_setup_dependent_package_inherited_modules(
|
||||||
working_env, mock_packages, install_mockery, mock_fetch
|
config, working_env, mock_packages, install_mockery, mock_fetch
|
||||||
):
|
):
|
||||||
# This will raise on regression
|
# This will raise on regression
|
||||||
s = spack.spec.Spec("cmake-client-inheritor").concretized()
|
s = spack.spec.Spec("cmake-client-inheritor").concretized()
|
||||||
@@ -457,14 +457,14 @@ def test_parallel_false_is_not_propagating(default_mock_concretization):
|
|||||||
# a foobar=bar (parallel = False)
|
# a foobar=bar (parallel = False)
|
||||||
# |
|
# |
|
||||||
# b (parallel =True)
|
# b (parallel =True)
|
||||||
s = default_mock_concretization("pkg-a foobar=bar")
|
s = default_mock_concretization("a foobar=bar")
|
||||||
|
|
||||||
spack.build_environment.set_package_py_globals(s.package, context=Context.BUILD)
|
spack.build_environment.set_package_py_globals(s.package, context=Context.BUILD)
|
||||||
assert s["pkg-a"].package.module.make_jobs == 1
|
assert s["a"].package.module.make_jobs == 1
|
||||||
|
|
||||||
spack.build_environment.set_package_py_globals(s["pkg-b"].package, context=Context.BUILD)
|
spack.build_environment.set_package_py_globals(s["b"].package, context=Context.BUILD)
|
||||||
assert s["pkg-b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
|
assert s["b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
|
||||||
parallel=s["pkg-b"].package.parallel
|
parallel=s["b"].package.parallel
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -94,10 +94,10 @@ def test_negative_ninja_check(self, input_dir, test_dir, concretize_and_setup):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.not_on_windows("autotools not available on windows")
|
@pytest.mark.not_on_windows("autotools not available on windows")
|
||||||
@pytest.mark.usefixtures("mock_packages")
|
@pytest.mark.usefixtures("config", "mock_packages")
|
||||||
class TestAutotoolsPackage:
|
class TestAutotoolsPackage:
|
||||||
def test_with_or_without(self, default_mock_concretization):
|
def test_with_or_without(self, default_mock_concretization):
|
||||||
s = default_mock_concretization("pkg-a")
|
s = default_mock_concretization("a")
|
||||||
options = s.package.with_or_without("foo")
|
options = s.package.with_or_without("foo")
|
||||||
|
|
||||||
# Ensure that values that are not representing a feature
|
# Ensure that values that are not representing a feature
|
||||||
@@ -129,7 +129,7 @@ def activate(value):
|
|||||||
assert "--without-lorem-ipsum" in options
|
assert "--without-lorem-ipsum" in options
|
||||||
|
|
||||||
def test_none_is_allowed(self, default_mock_concretization):
|
def test_none_is_allowed(self, default_mock_concretization):
|
||||||
s = default_mock_concretization("pkg-a foo=none")
|
s = default_mock_concretization("a foo=none")
|
||||||
options = s.package.with_or_without("foo")
|
options = s.package.with_or_without("foo")
|
||||||
|
|
||||||
# Ensure that values that are not representing a feature
|
# Ensure that values that are not representing a feature
|
||||||
@@ -139,9 +139,11 @@ def test_none_is_allowed(self, default_mock_concretization):
|
|||||||
assert "--without-baz" in options
|
assert "--without-baz" in options
|
||||||
assert "--no-fee" in options
|
assert "--no-fee" in options
|
||||||
|
|
||||||
def test_libtool_archive_files_are_deleted_by_default(self, mutable_database):
|
def test_libtool_archive_files_are_deleted_by_default(
|
||||||
|
self, default_mock_concretization, mutable_database
|
||||||
|
):
|
||||||
# Install a package that creates a mock libtool archive
|
# Install a package that creates a mock libtool archive
|
||||||
s = Spec("libtool-deletion").concretized()
|
s = default_mock_concretization("libtool-deletion")
|
||||||
s.package.do_install(explicit=True)
|
s.package.do_install(explicit=True)
|
||||||
|
|
||||||
# Assert the libtool archive is not there and we have
|
# Assert the libtool archive is not there and we have
|
||||||
@@ -152,23 +154,25 @@ def test_libtool_archive_files_are_deleted_by_default(self, mutable_database):
|
|||||||
assert libtool_deletion_log
|
assert libtool_deletion_log
|
||||||
|
|
||||||
def test_libtool_archive_files_might_be_installed_on_demand(
|
def test_libtool_archive_files_might_be_installed_on_demand(
|
||||||
self, mutable_database, monkeypatch
|
self, mutable_database, monkeypatch, default_mock_concretization
|
||||||
):
|
):
|
||||||
# Install a package that creates a mock libtool archive,
|
# Install a package that creates a mock libtool archive,
|
||||||
# patch its package to preserve the installation
|
# patch its package to preserve the installation
|
||||||
s = Spec("libtool-deletion").concretized()
|
s = default_mock_concretization("libtool-deletion")
|
||||||
monkeypatch.setattr(type(s.package.builder), "install_libtool_archives", True)
|
monkeypatch.setattr(type(s.package.builder), "install_libtool_archives", True)
|
||||||
s.package.do_install(explicit=True)
|
s.package.do_install(explicit=True)
|
||||||
|
|
||||||
# Assert libtool archives are installed
|
# Assert libtool archives are installed
|
||||||
assert os.path.exists(s.package.builder.libtool_archive_file)
|
assert os.path.exists(s.package.builder.libtool_archive_file)
|
||||||
|
|
||||||
def test_autotools_gnuconfig_replacement(self, mutable_database):
|
def test_autotools_gnuconfig_replacement(self, default_mock_concretization, mutable_database):
|
||||||
"""
|
"""
|
||||||
Tests whether only broken config.sub and config.guess are replaced with
|
Tests whether only broken config.sub and config.guess are replaced with
|
||||||
files from working alternatives from the gnuconfig package.
|
files from working alternatives from the gnuconfig package.
|
||||||
"""
|
"""
|
||||||
s = Spec("autotools-config-replacement +patch_config_files +gnuconfig").concretized()
|
s = default_mock_concretization(
|
||||||
|
"autotools-config-replacement +patch_config_files +gnuconfig"
|
||||||
|
)
|
||||||
s.package.do_install()
|
s.package.do_install()
|
||||||
|
|
||||||
with open(os.path.join(s.prefix.broken, "config.sub")) as f:
|
with open(os.path.join(s.prefix.broken, "config.sub")) as f:
|
||||||
@@ -183,11 +187,15 @@ def test_autotools_gnuconfig_replacement(self, mutable_database):
|
|||||||
with open(os.path.join(s.prefix.working, "config.guess")) as f:
|
with open(os.path.join(s.prefix.working, "config.guess")) as f:
|
||||||
assert "gnuconfig version of config.guess" not in f.read()
|
assert "gnuconfig version of config.guess" not in f.read()
|
||||||
|
|
||||||
def test_autotools_gnuconfig_replacement_disabled(self, mutable_database):
|
def test_autotools_gnuconfig_replacement_disabled(
|
||||||
|
self, default_mock_concretization, mutable_database
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Tests whether disabling patch_config_files
|
Tests whether disabling patch_config_files
|
||||||
"""
|
"""
|
||||||
s = Spec("autotools-config-replacement ~patch_config_files +gnuconfig").concretized()
|
s = default_mock_concretization(
|
||||||
|
"autotools-config-replacement ~patch_config_files +gnuconfig"
|
||||||
|
)
|
||||||
s.package.do_install()
|
s.package.do_install()
|
||||||
|
|
||||||
with open(os.path.join(s.prefix.broken, "config.sub")) as f:
|
with open(os.path.join(s.prefix.broken, "config.sub")) as f:
|
||||||
|
@@ -828,14 +828,14 @@ def test_keep_and_replace(wrapper_environment):
|
|||||||
),
|
),
|
||||||
(
|
(
|
||||||
"config:flags:keep_werror:specific",
|
"config:flags:keep_werror:specific",
|
||||||
["-Werror", "-Werror=specific", "-Werror-specific2", "-bah"],
|
["-Werror", "-Werror=specific", "-bah"],
|
||||||
["-Wno-error", "-Werror=specific", "-Werror-specific2", "-bah"],
|
["-Werror=specific", "-bah"],
|
||||||
["-Werror"],
|
["-Werror"],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"config:flags:keep_werror:none",
|
"config:flags:keep_werror:none",
|
||||||
["-Werror", "-Werror=specific", "-bah"],
|
["-Werror", "-Werror=specific", "-bah"],
|
||||||
["-Wno-error", "-Wno-error=specific", "-bah"],
|
["-bah", "-Wno-error", "-Wno-error=specific"],
|
||||||
["-Werror", "-Werror=specific"],
|
["-Werror", "-Werror=specific"],
|
||||||
),
|
),
|
||||||
# check non-standard -Werror opts like -Werror-implicit-function-declaration
|
# check non-standard -Werror opts like -Werror-implicit-function-declaration
|
||||||
@@ -848,13 +848,13 @@ def test_keep_and_replace(wrapper_environment):
|
|||||||
(
|
(
|
||||||
"config:flags:keep_werror:specific",
|
"config:flags:keep_werror:specific",
|
||||||
["-Werror", "-Werror-implicit-function-declaration", "-bah"],
|
["-Werror", "-Werror-implicit-function-declaration", "-bah"],
|
||||||
["-Wno-error", "-Werror-implicit-function-declaration", "-bah"],
|
["-Werror-implicit-function-declaration", "-bah", "-Wno-error"],
|
||||||
["-Werror"],
|
["-Werror"],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
"config:flags:keep_werror:none",
|
"config:flags:keep_werror:none",
|
||||||
["-Werror", "-Werror-implicit-function-declaration", "-bah"],
|
["-Werror", "-Werror-implicit-function-declaration", "-bah"],
|
||||||
["-Wno-error", "-bah", "-Wno-error=implicit-function-declaration"],
|
["-bah", "-Wno-error=implicit-function-declaration"],
|
||||||
["-Werror", "-Werror-implicit-function-declaration"],
|
["-Werror", "-Werror-implicit-function-declaration"],
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
|
@@ -2,6 +2,7 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import itertools
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
@@ -10,12 +11,15 @@
|
|||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
import spack.ci as ci
|
import spack.ci as ci
|
||||||
|
import spack.ci_needs_workaround as cinw
|
||||||
|
import spack.ci_optimization as ci_opt
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.paths as spack_paths
|
import spack.paths as spack_paths
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
import spack.util.gpg
|
import spack.util.gpg
|
||||||
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
@@ -199,7 +203,165 @@ def __call__(self, *args, **kwargs):
|
|||||||
assert "Unable to merge {0}".format(c1) in err
|
assert "Unable to merge {0}".format(c1) in err
|
||||||
|
|
||||||
|
|
||||||
def test_get_spec_filter_list(mutable_mock_env_path, mutable_mock_repo):
|
@pytest.mark.parametrize("obj, proto", [({}, [])])
|
||||||
|
def test_ci_opt_argument_checking(obj, proto):
|
||||||
|
"""Check that matches() and subkeys() return False when `proto` is not a dict."""
|
||||||
|
assert not ci_opt.matches(obj, proto)
|
||||||
|
assert not ci_opt.subkeys(obj, proto)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("yaml", [{"extends": 1}])
|
||||||
|
def test_ci_opt_add_extends_non_sequence(yaml):
|
||||||
|
"""Check that add_extends() exits if 'extends' is not a sequence."""
|
||||||
|
yaml_copy = yaml.copy()
|
||||||
|
ci_opt.add_extends(yaml, None)
|
||||||
|
assert yaml == yaml_copy
|
||||||
|
|
||||||
|
|
||||||
|
def test_ci_workarounds():
|
||||||
|
fake_root_spec = "x" * 544
|
||||||
|
fake_spack_ref = "x" * 40
|
||||||
|
|
||||||
|
common_variables = {"SPACK_IS_PR_PIPELINE": "False"}
|
||||||
|
|
||||||
|
common_before_script = [
|
||||||
|
'git clone "https://github.com/spack/spack"',
|
||||||
|
" && ".join(("pushd ./spack", 'git checkout "{ref}"'.format(ref=fake_spack_ref), "popd")),
|
||||||
|
'. "./spack/share/spack/setup-env.sh"',
|
||||||
|
]
|
||||||
|
|
||||||
|
def make_build_job(name, deps, stage, use_artifact_buildcache, optimize, use_dependencies):
|
||||||
|
variables = common_variables.copy()
|
||||||
|
variables["SPACK_JOB_SPEC_PKG_NAME"] = name
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"stage": stage,
|
||||||
|
"tags": ["tag-0", "tag-1"],
|
||||||
|
"artifacts": {
|
||||||
|
"paths": ["jobs_scratch_dir", "cdash_report", name + ".spec.json", name],
|
||||||
|
"when": "always",
|
||||||
|
},
|
||||||
|
"retry": {"max": 2, "when": ["always"]},
|
||||||
|
"after_script": ['rm -rf "./spack"'],
|
||||||
|
"script": ["spack ci rebuild"],
|
||||||
|
"image": {"name": "spack/centos7", "entrypoint": [""]},
|
||||||
|
}
|
||||||
|
|
||||||
|
if optimize:
|
||||||
|
result["extends"] = [".c0", ".c1"]
|
||||||
|
else:
|
||||||
|
variables["SPACK_ROOT_SPEC"] = fake_root_spec
|
||||||
|
result["before_script"] = common_before_script
|
||||||
|
|
||||||
|
result["variables"] = variables
|
||||||
|
|
||||||
|
if use_dependencies:
|
||||||
|
result["dependencies"] = list(deps) if use_artifact_buildcache else []
|
||||||
|
else:
|
||||||
|
result["needs"] = [{"job": dep, "artifacts": use_artifact_buildcache} for dep in deps]
|
||||||
|
|
||||||
|
return {name: result}
|
||||||
|
|
||||||
|
def make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies):
|
||||||
|
result = {
|
||||||
|
"stage": "stage-rebuild-index",
|
||||||
|
"script": "spack buildcache update-index s3://mirror",
|
||||||
|
"tags": ["tag-0", "tag-1"],
|
||||||
|
"image": {"name": "spack/centos7", "entrypoint": [""]},
|
||||||
|
"after_script": ['rm -rf "./spack"'],
|
||||||
|
}
|
||||||
|
|
||||||
|
if optimize:
|
||||||
|
result["extends"] = ".c0"
|
||||||
|
else:
|
||||||
|
result["before_script"] = common_before_script
|
||||||
|
|
||||||
|
return {"rebuild-index": result}
|
||||||
|
|
||||||
|
def make_factored_jobs(optimize):
|
||||||
|
return (
|
||||||
|
{
|
||||||
|
".c0": {"before_script": common_before_script},
|
||||||
|
".c1": {"variables": {"SPACK_ROOT_SPEC": fake_root_spec}},
|
||||||
|
}
|
||||||
|
if optimize
|
||||||
|
else {}
|
||||||
|
)
|
||||||
|
|
||||||
|
def make_stage_list(num_build_stages):
|
||||||
|
return {
|
||||||
|
"stages": (
|
||||||
|
["-".join(("stage", str(i))) for i in range(num_build_stages)]
|
||||||
|
+ ["stage-rebuild-index"]
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
def make_yaml_obj(use_artifact_buildcache, optimize, use_dependencies):
|
||||||
|
result = {}
|
||||||
|
|
||||||
|
result.update(
|
||||||
|
make_build_job(
|
||||||
|
"pkg-a", [], "stage-0", use_artifact_buildcache, optimize, use_dependencies
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
result.update(
|
||||||
|
make_build_job(
|
||||||
|
"pkg-b", ["pkg-a"], "stage-1", use_artifact_buildcache, optimize, use_dependencies
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
result.update(
|
||||||
|
make_build_job(
|
||||||
|
"pkg-c",
|
||||||
|
["pkg-a", "pkg-b"],
|
||||||
|
"stage-2",
|
||||||
|
use_artifact_buildcache,
|
||||||
|
optimize,
|
||||||
|
use_dependencies,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
result.update(make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies))
|
||||||
|
|
||||||
|
result.update(make_factored_jobs(optimize))
|
||||||
|
|
||||||
|
result.update(make_stage_list(3))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
# test every combination of:
|
||||||
|
# use artifact buildcache: true or false
|
||||||
|
# run optimization pass: true or false
|
||||||
|
# convert needs to dependencies: true or false
|
||||||
|
for use_ab in (False, True):
|
||||||
|
original = make_yaml_obj(
|
||||||
|
use_artifact_buildcache=use_ab, optimize=False, use_dependencies=False
|
||||||
|
)
|
||||||
|
|
||||||
|
for opt, deps in itertools.product(*(((False, True),) * 2)):
|
||||||
|
# neither optimizing nor converting needs->dependencies
|
||||||
|
if not (opt or deps):
|
||||||
|
# therefore, nothing to test
|
||||||
|
continue
|
||||||
|
|
||||||
|
predicted = make_yaml_obj(
|
||||||
|
use_artifact_buildcache=use_ab, optimize=opt, use_dependencies=deps
|
||||||
|
)
|
||||||
|
|
||||||
|
actual = original.copy()
|
||||||
|
if opt:
|
||||||
|
actual = ci_opt.optimizer(actual)
|
||||||
|
if deps:
|
||||||
|
actual = cinw.needs_to_dependencies(actual)
|
||||||
|
|
||||||
|
predicted = syaml.dump_config(ci_opt.sort_yaml_obj(predicted), default_flow_style=True)
|
||||||
|
actual = syaml.dump_config(ci_opt.sort_yaml_obj(actual), default_flow_style=True)
|
||||||
|
|
||||||
|
assert predicted == actual
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_spec_filter_list(mutable_mock_env_path, config, mutable_mock_repo):
|
||||||
"""Test that given an active environment and list of touched pkgs,
|
"""Test that given an active environment and list of touched pkgs,
|
||||||
we get the right list of possibly-changed env specs"""
|
we get the right list of possibly-changed env specs"""
|
||||||
e1 = ev.create("test")
|
e1 = ev.create("test")
|
||||||
@@ -253,7 +415,7 @@ def test_get_spec_filter_list(mutable_mock_env_path, mutable_mock_repo):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.regression("29947")
|
@pytest.mark.regression("29947")
|
||||||
def test_affected_specs_on_first_concretization(mutable_mock_env_path, mock_packages):
|
def test_affected_specs_on_first_concretization(mutable_mock_env_path, mock_packages, config):
|
||||||
e = ev.create("first_concretization")
|
e = ev.create("first_concretization")
|
||||||
e.add("mpileaks~shared")
|
e.add("mpileaks~shared")
|
||||||
e.add("mpileaks+shared")
|
e.add("mpileaks+shared")
|
||||||
@@ -322,12 +484,12 @@ def test_ci_run_standalone_tests_missing_requirements(
|
|||||||
|
|
||||||
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
||||||
def test_ci_run_standalone_tests_not_installed_junit(
|
def test_ci_run_standalone_tests_not_installed_junit(
|
||||||
tmp_path, repro_dir, working_env, mock_test_stage, capfd, mock_packages
|
tmp_path, repro_dir, working_env, default_mock_concretization, mock_test_stage, capfd
|
||||||
):
|
):
|
||||||
log_file = tmp_path / "junit.xml"
|
log_file = tmp_path / "junit.xml"
|
||||||
args = {
|
args = {
|
||||||
"log_file": str(log_file),
|
"log_file": str(log_file),
|
||||||
"job_spec": spack.spec.Spec("printing-package").concretized(),
|
"job_spec": default_mock_concretization("printing-package"),
|
||||||
"repro_dir": str(repro_dir),
|
"repro_dir": str(repro_dir),
|
||||||
"fail_fast": True,
|
"fail_fast": True,
|
||||||
}
|
}
|
||||||
@@ -340,13 +502,13 @@ def test_ci_run_standalone_tests_not_installed_junit(
|
|||||||
|
|
||||||
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
@pytest.mark.not_on_windows("Reliance on bash script not supported on Windows")
|
||||||
def test_ci_run_standalone_tests_not_installed_cdash(
|
def test_ci_run_standalone_tests_not_installed_cdash(
|
||||||
tmp_path, repro_dir, working_env, mock_test_stage, capfd, mock_packages
|
tmp_path, repro_dir, working_env, default_mock_concretization, mock_test_stage, capfd
|
||||||
):
|
):
|
||||||
"""Test run_standalone_tests with cdash and related options."""
|
"""Test run_standalone_tests with cdash and related options."""
|
||||||
log_file = tmp_path / "junit.xml"
|
log_file = tmp_path / "junit.xml"
|
||||||
args = {
|
args = {
|
||||||
"log_file": str(log_file),
|
"log_file": str(log_file),
|
||||||
"job_spec": spack.spec.Spec("printing-package").concretized(),
|
"job_spec": default_mock_concretization("printing-package"),
|
||||||
"repro_dir": str(repro_dir),
|
"repro_dir": str(repro_dir),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -48,6 +48,11 @@ def mock_get_specs_multiarch(database, monkeypatch):
|
|||||||
monkeypatch.setattr(spack.binary_distribution, "update_cache_and_get_specs", lambda: specs)
|
monkeypatch.setattr(spack.binary_distribution, "update_cache_and_get_specs", lambda: specs)
|
||||||
|
|
||||||
|
|
||||||
|
def test_buildcache_preview_just_runs():
|
||||||
|
# TODO: remove in Spack 0.21
|
||||||
|
buildcache("preview", "mpileaks")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.db
|
@pytest.mark.db
|
||||||
@pytest.mark.regression("13757")
|
@pytest.mark.regression("13757")
|
||||||
def test_buildcache_list_duplicates(mock_get_specs, capsys):
|
def test_buildcache_list_duplicates(mock_get_specs, capsys):
|
||||||
@@ -184,7 +189,12 @@ def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch):
|
|||||||
|
|
||||||
|
|
||||||
def test_buildcache_sync(
|
def test_buildcache_sync(
|
||||||
mutable_mock_env_path, install_mockery, mock_packages, mock_fetch, mock_stage, tmpdir
|
mutable_mock_env_path,
|
||||||
|
install_mockery_mutable_config,
|
||||||
|
mock_packages,
|
||||||
|
mock_fetch,
|
||||||
|
mock_stage,
|
||||||
|
tmpdir,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Make sure buildcache sync works in an environment-aware manner, ignoring
|
Make sure buildcache sync works in an environment-aware manner, ignoring
|
||||||
@@ -313,7 +323,7 @@ def manifest_insert(manifest, spec, dest_url):
|
|||||||
|
|
||||||
def test_buildcache_create_install(
|
def test_buildcache_create_install(
|
||||||
mutable_mock_env_path,
|
mutable_mock_env_path,
|
||||||
install_mockery,
|
install_mockery_mutable_config,
|
||||||
mock_packages,
|
mock_packages,
|
||||||
mock_fetch,
|
mock_fetch,
|
||||||
mock_stage,
|
mock_stage,
|
||||||
|
@@ -83,6 +83,7 @@ def test_checksum_args(arguments, expected):
|
|||||||
assert check == expected
|
assert check == expected
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"arguments,expected",
|
"arguments,expected",
|
||||||
[
|
[
|
||||||
|
@@ -106,24 +106,24 @@ def test_specs_staging(config, tmpdir):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
builder = repo.MockRepositoryBuilder(tmpdir)
|
builder = repo.MockRepositoryBuilder(tmpdir)
|
||||||
builder.add_package("pkg-g")
|
builder.add_package("g")
|
||||||
builder.add_package("pkg-f")
|
builder.add_package("f")
|
||||||
builder.add_package("pkg-e")
|
builder.add_package("e")
|
||||||
builder.add_package("pkg-d", dependencies=[("pkg-f", None, None), ("pkg-g", None, None)])
|
builder.add_package("d", dependencies=[("f", None, None), ("g", None, None)])
|
||||||
builder.add_package("pkg-c")
|
builder.add_package("c")
|
||||||
builder.add_package("pkg-b", dependencies=[("pkg-d", None, None), ("pkg-e", None, None)])
|
builder.add_package("b", dependencies=[("d", None, None), ("e", None, None)])
|
||||||
builder.add_package("pkg-a", dependencies=[("pkg-b", None, None), ("pkg-c", None, None)])
|
builder.add_package("a", dependencies=[("b", None, None), ("c", None, None)])
|
||||||
|
|
||||||
with repo.use_repositories(builder.root):
|
with repo.use_repositories(builder.root):
|
||||||
spec_a = Spec("pkg-a").concretized()
|
spec_a = Spec("a").concretized()
|
||||||
|
|
||||||
spec_a_label = ci._spec_ci_label(spec_a)
|
spec_a_label = ci._spec_ci_label(spec_a)
|
||||||
spec_b_label = ci._spec_ci_label(spec_a["pkg-b"])
|
spec_b_label = ci._spec_ci_label(spec_a["b"])
|
||||||
spec_c_label = ci._spec_ci_label(spec_a["pkg-c"])
|
spec_c_label = ci._spec_ci_label(spec_a["c"])
|
||||||
spec_d_label = ci._spec_ci_label(spec_a["pkg-d"])
|
spec_d_label = ci._spec_ci_label(spec_a["d"])
|
||||||
spec_e_label = ci._spec_ci_label(spec_a["pkg-e"])
|
spec_e_label = ci._spec_ci_label(spec_a["e"])
|
||||||
spec_f_label = ci._spec_ci_label(spec_a["pkg-f"])
|
spec_f_label = ci._spec_ci_label(spec_a["f"])
|
||||||
spec_g_label = ci._spec_ci_label(spec_a["pkg-g"])
|
spec_g_label = ci._spec_ci_label(spec_a["g"])
|
||||||
|
|
||||||
spec_labels, dependencies, stages = ci.stage_spec_jobs([spec_a])
|
spec_labels, dependencies, stages = ci.stage_spec_jobs([spec_a])
|
||||||
|
|
||||||
@@ -748,7 +748,7 @@ def test_ci_rebuild_mock_success(
|
|||||||
tmpdir,
|
tmpdir,
|
||||||
working_env,
|
working_env,
|
||||||
mutable_mock_env_path,
|
mutable_mock_env_path,
|
||||||
install_mockery,
|
install_mockery_mutable_config,
|
||||||
mock_gnupghome,
|
mock_gnupghome,
|
||||||
mock_stage,
|
mock_stage,
|
||||||
mock_fetch,
|
mock_fetch,
|
||||||
@@ -782,7 +782,7 @@ def test_ci_rebuild_mock_failure_to_push(
|
|||||||
tmpdir,
|
tmpdir,
|
||||||
working_env,
|
working_env,
|
||||||
mutable_mock_env_path,
|
mutable_mock_env_path,
|
||||||
install_mockery,
|
install_mockery_mutable_config,
|
||||||
mock_gnupghome,
|
mock_gnupghome,
|
||||||
mock_stage,
|
mock_stage,
|
||||||
mock_fetch,
|
mock_fetch,
|
||||||
@@ -820,7 +820,7 @@ def test_ci_rebuild(
|
|||||||
tmpdir,
|
tmpdir,
|
||||||
working_env,
|
working_env,
|
||||||
mutable_mock_env_path,
|
mutable_mock_env_path,
|
||||||
install_mockery,
|
install_mockery_mutable_config,
|
||||||
mock_packages,
|
mock_packages,
|
||||||
monkeypatch,
|
monkeypatch,
|
||||||
mock_gnupghome,
|
mock_gnupghome,
|
||||||
@@ -1019,7 +1019,7 @@ def fake_dl_method(spec, *args, **kwargs):
|
|||||||
def test_ci_generate_mirror_override(
|
def test_ci_generate_mirror_override(
|
||||||
tmpdir,
|
tmpdir,
|
||||||
mutable_mock_env_path,
|
mutable_mock_env_path,
|
||||||
install_mockery,
|
install_mockery_mutable_config,
|
||||||
mock_packages,
|
mock_packages,
|
||||||
mock_fetch,
|
mock_fetch,
|
||||||
mock_stage,
|
mock_stage,
|
||||||
@@ -1104,7 +1104,7 @@ def test_ci_generate_mirror_override(
|
|||||||
def test_push_to_build_cache(
|
def test_push_to_build_cache(
|
||||||
tmpdir,
|
tmpdir,
|
||||||
mutable_mock_env_path,
|
mutable_mock_env_path,
|
||||||
install_mockery,
|
install_mockery_mutable_config,
|
||||||
mock_packages,
|
mock_packages,
|
||||||
mock_fetch,
|
mock_fetch,
|
||||||
mock_stage,
|
mock_stage,
|
||||||
@@ -1290,7 +1290,7 @@ def test_ci_generate_override_runner_attrs(
|
|||||||
spack:
|
spack:
|
||||||
specs:
|
specs:
|
||||||
- flatten-deps
|
- flatten-deps
|
||||||
- pkg-a
|
- a
|
||||||
mirrors:
|
mirrors:
|
||||||
some-mirror: https://my.fake.mirror
|
some-mirror: https://my.fake.mirror
|
||||||
ci:
|
ci:
|
||||||
@@ -1307,12 +1307,12 @@ def test_ci_generate_override_runner_attrs(
|
|||||||
- match:
|
- match:
|
||||||
- dependency-install
|
- dependency-install
|
||||||
- match:
|
- match:
|
||||||
- pkg-a
|
- a
|
||||||
build-job:
|
build-job:
|
||||||
tags:
|
tags:
|
||||||
- specific-a-2
|
- specific-a-2
|
||||||
- match:
|
- match:
|
||||||
- pkg-a
|
- a
|
||||||
build-job-remove:
|
build-job-remove:
|
||||||
tags:
|
tags:
|
||||||
- toplevel2
|
- toplevel2
|
||||||
@@ -1372,8 +1372,8 @@ def test_ci_generate_override_runner_attrs(
|
|||||||
assert global_vars["SPACK_CHECKOUT_VERSION"] == git_version or "v0.20.0.test0"
|
assert global_vars["SPACK_CHECKOUT_VERSION"] == git_version or "v0.20.0.test0"
|
||||||
|
|
||||||
for ci_key in yaml_contents.keys():
|
for ci_key in yaml_contents.keys():
|
||||||
if ci_key.startswith("pkg-a"):
|
if ci_key.startswith("a"):
|
||||||
# Make sure pkg-a's attributes override variables, and all the
|
# Make sure a's attributes override variables, and all the
|
||||||
# scripts. Also, make sure the 'toplevel' tag doesn't
|
# scripts. Also, make sure the 'toplevel' tag doesn't
|
||||||
# appear twice, but that a's specific extra tag does appear
|
# appear twice, but that a's specific extra tag does appear
|
||||||
the_elt = yaml_contents[ci_key]
|
the_elt = yaml_contents[ci_key]
|
||||||
@@ -1432,6 +1432,55 @@ def test_ci_generate_override_runner_attrs(
|
|||||||
assert the_elt["after_script"][0] == "post step one"
|
assert the_elt["after_script"][0] == "post step one"
|
||||||
|
|
||||||
|
|
||||||
|
def test_ci_generate_with_workarounds(
|
||||||
|
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
|
||||||
|
):
|
||||||
|
"""Make sure the post-processing cli workarounds do what they should"""
|
||||||
|
filename = str(tmpdir.join("spack.yaml"))
|
||||||
|
with open(filename, "w") as f:
|
||||||
|
f.write(
|
||||||
|
"""\
|
||||||
|
spack:
|
||||||
|
specs:
|
||||||
|
- callpath%gcc@=9.5
|
||||||
|
mirrors:
|
||||||
|
some-mirror: https://my.fake.mirror
|
||||||
|
ci:
|
||||||
|
pipeline-gen:
|
||||||
|
- submapping:
|
||||||
|
- match: ['%gcc@9.5']
|
||||||
|
build-job:
|
||||||
|
tags:
|
||||||
|
- donotcare
|
||||||
|
image: donotcare
|
||||||
|
enable-artifacts-buildcache: true
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
with tmpdir.as_cwd():
|
||||||
|
env_cmd("create", "test", "./spack.yaml")
|
||||||
|
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
|
||||||
|
|
||||||
|
with ev.read("test"):
|
||||||
|
ci_cmd("generate", "--output-file", outputfile, "--dependencies")
|
||||||
|
|
||||||
|
with open(outputfile) as f:
|
||||||
|
contents = f.read()
|
||||||
|
yaml_contents = syaml.load(contents)
|
||||||
|
|
||||||
|
found_one = False
|
||||||
|
non_rebuild_keys = ["workflow", "stages", "variables", "rebuild-index"]
|
||||||
|
|
||||||
|
for ci_key in yaml_contents.keys():
|
||||||
|
if ci_key not in non_rebuild_keys:
|
||||||
|
found_one = True
|
||||||
|
job_obj = yaml_contents[ci_key]
|
||||||
|
assert "needs" not in job_obj
|
||||||
|
assert "dependencies" in job_obj
|
||||||
|
|
||||||
|
assert found_one is True
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.disable_clean_stage_check
|
@pytest.mark.disable_clean_stage_check
|
||||||
def test_ci_rebuild_index(
|
def test_ci_rebuild_index(
|
||||||
tmpdir,
|
tmpdir,
|
||||||
@@ -1781,7 +1830,7 @@ def test_ci_generate_read_broken_specs_url(
|
|||||||
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
|
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
|
||||||
):
|
):
|
||||||
"""Verify that `broken-specs-url` works as intended"""
|
"""Verify that `broken-specs-url` works as intended"""
|
||||||
spec_a = Spec("pkg-a")
|
spec_a = Spec("a")
|
||||||
spec_a.concretize()
|
spec_a.concretize()
|
||||||
a_dag_hash = spec_a.dag_hash()
|
a_dag_hash = spec_a.dag_hash()
|
||||||
|
|
||||||
@@ -1807,7 +1856,7 @@ def test_ci_generate_read_broken_specs_url(
|
|||||||
spack:
|
spack:
|
||||||
specs:
|
specs:
|
||||||
- flatten-deps
|
- flatten-deps
|
||||||
- pkg-a
|
- a
|
||||||
mirrors:
|
mirrors:
|
||||||
some-mirror: https://my.fake.mirror
|
some-mirror: https://my.fake.mirror
|
||||||
ci:
|
ci:
|
||||||
@@ -1815,9 +1864,9 @@ def test_ci_generate_read_broken_specs_url(
|
|||||||
pipeline-gen:
|
pipeline-gen:
|
||||||
- submapping:
|
- submapping:
|
||||||
- match:
|
- match:
|
||||||
- pkg-a
|
- a
|
||||||
- flatten-deps
|
- flatten-deps
|
||||||
- pkg-b
|
- b
|
||||||
- dependency-install
|
- dependency-install
|
||||||
build-job:
|
build-job:
|
||||||
tags:
|
tags:
|
||||||
|
@@ -72,6 +72,7 @@ def test_parse_spec_flags_with_spaces(specs, cflags, propagation, negated_varian
|
|||||||
assert "~{0}".format(v) in s
|
assert "~{0}".format(v) in s
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("config")
|
||||||
def test_match_spec_env(mock_packages, mutable_mock_env_path):
|
def test_match_spec_env(mock_packages, mutable_mock_env_path):
|
||||||
"""
|
"""
|
||||||
Concretize a spec with non-default options in an environment. Make
|
Concretize a spec with non-default options in an environment. Make
|
||||||
@@ -80,42 +81,44 @@ def test_match_spec_env(mock_packages, mutable_mock_env_path):
|
|||||||
"""
|
"""
|
||||||
# Initial sanity check: we are planning on choosing a non-default
|
# Initial sanity check: we are planning on choosing a non-default
|
||||||
# value, so make sure that is in fact not the default.
|
# value, so make sure that is in fact not the default.
|
||||||
check_defaults = spack.cmd.parse_specs(["pkg-a"], concretize=True)[0]
|
check_defaults = spack.cmd.parse_specs(["a"], concretize=True)[0]
|
||||||
assert not check_defaults.satisfies("foobar=baz")
|
assert not check_defaults.satisfies("foobar=baz")
|
||||||
|
|
||||||
e = ev.create("test")
|
e = ev.create("test")
|
||||||
e.add("pkg-a foobar=baz")
|
e.add("a foobar=baz")
|
||||||
e.concretize()
|
e.concretize()
|
||||||
with e:
|
with e:
|
||||||
env_spec = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["pkg-a"])[0])
|
env_spec = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["a"])[0])
|
||||||
assert env_spec.satisfies("foobar=baz")
|
assert env_spec.satisfies("foobar=baz")
|
||||||
assert env_spec.concrete
|
assert env_spec.concrete
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("config")
|
||||||
def test_multiple_env_match_raises_error(mock_packages, mutable_mock_env_path):
|
def test_multiple_env_match_raises_error(mock_packages, mutable_mock_env_path):
|
||||||
e = ev.create("test")
|
e = ev.create("test")
|
||||||
e.add("pkg-a foobar=baz")
|
e.add("a foobar=baz")
|
||||||
e.add("pkg-a foobar=fee")
|
e.add("a foobar=fee")
|
||||||
e.concretize()
|
e.concretize()
|
||||||
with e:
|
with e:
|
||||||
with pytest.raises(ev.SpackEnvironmentError) as exc_info:
|
with pytest.raises(ev.SpackEnvironmentError) as exc_info:
|
||||||
spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["pkg-a"])[0])
|
spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["a"])[0])
|
||||||
|
|
||||||
assert "matches multiple specs" in exc_info.value.message
|
assert "matches multiple specs" in exc_info.value.message
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("config")
|
||||||
def test_root_and_dep_match_returns_root(mock_packages, mutable_mock_env_path):
|
def test_root_and_dep_match_returns_root(mock_packages, mutable_mock_env_path):
|
||||||
e = ev.create("test")
|
e = ev.create("test")
|
||||||
e.add("pkg-b@0.9")
|
e.add("b@0.9")
|
||||||
e.add("pkg-a foobar=bar") # Depends on b, should choose b@1.0
|
e.add("a foobar=bar") # Depends on b, should choose b@1.0
|
||||||
e.concretize()
|
e.concretize()
|
||||||
with e:
|
with e:
|
||||||
# This query matches the root b and b as a dependency of a. In that
|
# This query matches the root b and b as a dependency of a. In that
|
||||||
# case the root instance should be preferred.
|
# case the root instance should be preferred.
|
||||||
env_spec1 = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["pkg-b"])[0])
|
env_spec1 = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["b"])[0])
|
||||||
assert env_spec1.satisfies("@0.9")
|
assert env_spec1.satisfies("@0.9")
|
||||||
|
|
||||||
env_spec2 = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["pkg-b@1.0"])[0])
|
env_spec2 = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["b@1.0"])[0])
|
||||||
assert env_spec2
|
assert env_spec2
|
||||||
|
|
||||||
|
|
||||||
|
@@ -10,7 +10,7 @@
|
|||||||
from spack import spack_version
|
from spack import spack_version
|
||||||
from spack.main import SpackCommand
|
from spack.main import SpackCommand
|
||||||
|
|
||||||
pytestmark = pytest.mark.usefixtures("mutable_config", "mutable_mock_repo")
|
pytestmark = pytest.mark.usefixtures("config", "mutable_mock_repo")
|
||||||
|
|
||||||
env = SpackCommand("env")
|
env = SpackCommand("env")
|
||||||
add = SpackCommand("add")
|
add = SpackCommand("add")
|
||||||
@@ -51,8 +51,8 @@ def test_concretize_root_test_dependencies_are_concretized(unify, mutable_mock_e
|
|||||||
|
|
||||||
with ev.read("test") as e:
|
with ev.read("test") as e:
|
||||||
e.unify = unify
|
e.unify = unify
|
||||||
add("pkg-a")
|
add("a")
|
||||||
add("pkg-b")
|
add("b")
|
||||||
concretize("--test", "root")
|
concretize("--test", "root")
|
||||||
assert e.matching_spec("test-dependency")
|
assert e.matching_spec("test-dependency")
|
||||||
|
|
||||||
|
@@ -640,4 +640,4 @@ def update_config(data):
|
|||||||
config("update", "-y", "config")
|
config("update", "-y", "config")
|
||||||
|
|
||||||
with ev.Environment(str(tmpdir)) as e:
|
with ev.Environment(str(tmpdir)) as e:
|
||||||
assert not e.manifest.yaml_content["spack"]["config"]["ccache"]
|
assert not e.manifest.pristine_yaml_content["spack"]["config"]["ccache"]
|
||||||
|
@@ -12,29 +12,29 @@
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function")
|
@pytest.fixture(scope="function")
|
||||||
def test_env(mutable_mock_env_path, mock_packages):
|
def test_env(mutable_mock_env_path, config, mock_packages):
|
||||||
ev.create("test")
|
ev.create("test")
|
||||||
with ev.read("test") as e:
|
with ev.read("test") as e:
|
||||||
e.add("pkg-a@2.0 foobar=bar ^pkg-b@1.0")
|
e.add("a@2.0 foobar=bar ^b@1.0")
|
||||||
e.add("pkg-a@1.0 foobar=bar ^pkg-b@0.9")
|
e.add("a@1.0 foobar=bar ^b@0.9")
|
||||||
e.concretize()
|
e.concretize()
|
||||||
e.write()
|
e.write()
|
||||||
|
|
||||||
|
|
||||||
def test_deconcretize_dep(test_env):
|
def test_deconcretize_dep(test_env):
|
||||||
with ev.read("test") as e:
|
with ev.read("test") as e:
|
||||||
deconcretize("-y", "pkg-b@1.0")
|
deconcretize("-y", "b@1.0")
|
||||||
specs = [s for s, _ in e.concretized_specs()]
|
specs = [s for s, _ in e.concretized_specs()]
|
||||||
|
|
||||||
assert len(specs) == 1
|
assert len(specs) == 1
|
||||||
assert specs[0].satisfies("pkg-a@1.0")
|
assert specs[0].satisfies("a@1.0")
|
||||||
|
|
||||||
|
|
||||||
def test_deconcretize_all_dep(test_env):
|
def test_deconcretize_all_dep(test_env):
|
||||||
with ev.read("test") as e:
|
with ev.read("test") as e:
|
||||||
with pytest.raises(SpackCommandError):
|
with pytest.raises(SpackCommandError):
|
||||||
deconcretize("-y", "pkg-b")
|
deconcretize("-y", "b")
|
||||||
deconcretize("-y", "--all", "pkg-b")
|
deconcretize("-y", "--all", "b")
|
||||||
specs = [s for s, _ in e.concretized_specs()]
|
specs = [s for s, _ in e.concretized_specs()]
|
||||||
|
|
||||||
assert len(specs) == 0
|
assert len(specs) == 0
|
||||||
@@ -42,27 +42,27 @@ def test_deconcretize_all_dep(test_env):
|
|||||||
|
|
||||||
def test_deconcretize_root(test_env):
|
def test_deconcretize_root(test_env):
|
||||||
with ev.read("test") as e:
|
with ev.read("test") as e:
|
||||||
output = deconcretize("-y", "--root", "pkg-b@1.0")
|
output = deconcretize("-y", "--root", "b@1.0")
|
||||||
assert "No matching specs to deconcretize" in output
|
assert "No matching specs to deconcretize" in output
|
||||||
assert len(e.concretized_order) == 2
|
assert len(e.concretized_order) == 2
|
||||||
|
|
||||||
deconcretize("-y", "--root", "pkg-a@2.0")
|
deconcretize("-y", "--root", "a@2.0")
|
||||||
specs = [s for s, _ in e.concretized_specs()]
|
specs = [s for s, _ in e.concretized_specs()]
|
||||||
|
|
||||||
assert len(specs) == 1
|
assert len(specs) == 1
|
||||||
assert specs[0].satisfies("pkg-a@1.0")
|
assert specs[0].satisfies("a@1.0")
|
||||||
|
|
||||||
|
|
||||||
def test_deconcretize_all_root(test_env):
|
def test_deconcretize_all_root(test_env):
|
||||||
with ev.read("test") as e:
|
with ev.read("test") as e:
|
||||||
with pytest.raises(SpackCommandError):
|
with pytest.raises(SpackCommandError):
|
||||||
deconcretize("-y", "--root", "pkg-a")
|
deconcretize("-y", "--root", "a")
|
||||||
|
|
||||||
output = deconcretize("-y", "--root", "--all", "pkg-b")
|
output = deconcretize("-y", "--root", "--all", "b")
|
||||||
assert "No matching specs to deconcretize" in output
|
assert "No matching specs to deconcretize" in output
|
||||||
assert len(e.concretized_order) == 2
|
assert len(e.concretized_order) == 2
|
||||||
|
|
||||||
deconcretize("-y", "--root", "--all", "pkg-a")
|
deconcretize("-y", "--root", "--all", "a")
|
||||||
specs = [s for s, _ in e.concretized_specs()]
|
specs = [s for s, _ in e.concretized_specs()]
|
||||||
|
|
||||||
assert len(specs) == 0
|
assert len(specs) == 0
|
||||||
|
@@ -14,6 +14,8 @@
|
|||||||
deprecate = SpackCommand("deprecate")
|
deprecate = SpackCommand("deprecate")
|
||||||
find = SpackCommand("find")
|
find = SpackCommand("find")
|
||||||
|
|
||||||
|
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||||
|
|
||||||
|
|
||||||
def test_deprecate(mock_packages, mock_archive, mock_fetch, install_mockery):
|
def test_deprecate(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||||
install("libelf@0.8.13")
|
install("libelf@0.8.13")
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user