Compare commits
15 Commits
hs/fix/che
...
v0.22.0
Author | SHA1 | Date | |
---|---|---|---|
![]() |
5fe93fee1e | ||
![]() |
8207f11333 | ||
![]() |
5bb5d2696f | ||
![]() |
55f37dffe5 | ||
![]() |
252a5bd71b | ||
![]() |
f55224f161 | ||
![]() |
189ae4b06e | ||
![]() |
5e9c702fa7 | ||
![]() |
965bb4d3c0 | ||
![]() |
354f98c94a | ||
![]() |
5dce480154 | ||
![]() |
f634d48b7c | ||
![]() |
4daee565ae | ||
![]() |
8e4dbdc2d7 | ||
![]() |
4f6adc03cd |
11
.github/dependabot.yml
vendored
11
.github/dependabot.yml
vendored
@@ -5,10 +5,13 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to run style checks and build documentation
|
||||
# Requirements to build documentation
|
||||
- package-ecosystem: "pip"
|
||||
directories:
|
||||
- "/.github/workflows/requirements/style/*"
|
||||
- "/lib/spack/docs"
|
||||
directory: "/lib/spack/docs"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to run style checks
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/.github/workflows/style"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
10
.github/workflows/audit.yaml
vendored
10
.github/workflows/audit.yaml
vendored
@@ -28,8 +28,8 @@ jobs:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -44,7 +44,6 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage run $(which spack) audit configs
|
||||
coverage run $(which spack) -d audit externals
|
||||
coverage combine
|
||||
coverage xml
|
||||
@@ -53,7 +52,6 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
spack -d audit configs
|
||||
spack -d audit externals
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
@@ -61,11 +59,9 @@ jobs:
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit configs
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
|
37
.github/workflows/bootstrap.yml
vendored
37
.github/workflows/bootstrap.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap clingo
|
||||
@@ -53,33 +53,27 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest' }}
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
${{ env.VALIDATE_LAST_EXIT }}
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
gnupg-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
@@ -90,13 +84,15 @@ jobs:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install tree gawk
|
||||
sudo rm -rf $(command -v gpg gpg2)
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap GnuPG
|
||||
@@ -125,10 +121,10 @@ jobs:
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: |
|
||||
3.8
|
||||
@@ -154,7 +150,7 @@ jobs:
|
||||
not_found=0
|
||||
old_path="$PATH"
|
||||
export PATH="$ver_dir:$PATH"
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bin/bootstrap-test.sh
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
||||
export PATH="$old_path"
|
||||
fi
|
||||
fi
|
||||
@@ -168,3 +164,4 @@ jobs:
|
||||
source share/spack/setup-env.sh
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
21
.github/workflows/build-containers.yml
vendored
21
.github/workflows/build-containers.yml
vendored
@@ -40,7 +40,8 @@ jobs:
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:22.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
||||
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
|
||||
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
@@ -55,7 +56,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
@@ -76,7 +77,7 @@ jobs:
|
||||
env:
|
||||
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
|
||||
run: |
|
||||
.github/workflows/bin/generate_spack_yaml_containerize.sh
|
||||
.github/workflows/generate_spack_yaml_containerize.sh
|
||||
. share/spack/setup-env.sh
|
||||
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
|
||||
@@ -87,19 +88,19 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
|
||||
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
|
||||
with:
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db
|
||||
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -107,13 +108,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85
|
||||
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
@@ -126,7 +127,7 @@ jobs:
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@834a144ee995460fba8ed112a2fc961b36a5ec5a
|
||||
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
|
16
.github/workflows/ci.yaml
vendored
16
.github/workflows/ci.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -53,13 +53,6 @@ jobs:
|
||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||
- 'var/spack/repos/builtin/packages/python/**'
|
||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'var/spack/repos/builtin/packages/gnupg/**'
|
||||
- 'var/spack/repos/builtin/packages/libassuan/**'
|
||||
- 'var/spack/repos/builtin/packages/libgcrypt/**'
|
||||
- 'var/spack/repos/builtin/packages/libgpg-error/**'
|
||||
- 'var/spack/repos/builtin/packages/libksba/**'
|
||||
- 'var/spack/repos/builtin/packages/npth/**'
|
||||
- 'var/spack/repos/builtin/packages/pinentry/**'
|
||||
- 'lib/spack/**'
|
||||
- 'share/spack/**'
|
||||
- '.github/workflows/bootstrap.yml'
|
||||
@@ -84,8 +77,13 @@ jobs:
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/unit_tests.yaml
|
||||
secrets: inherit
|
||||
windows:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks ]
|
||||
uses: ./.github/workflows/windows_python.yml
|
||||
secrets: inherit
|
||||
all:
|
||||
needs: [ unit-tests, bootstrap ]
|
||||
needs: [ windows, unit-tests, bootstrap ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
|
8
.github/workflows/install_spack.sh
vendored
Executable file
8
.github/workflows/install_spack.sh
vendored
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env sh
|
||||
. share/spack/setup-env.sh
|
||||
echo -e "config:\n build_jobs: 2" > etc/spack/config.yaml
|
||||
spack config add "packages:all:target:[x86_64]"
|
||||
spack compiler find
|
||||
spack compiler info apple-clang
|
||||
spack debug report
|
||||
spack solve zlib
|
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
@@ -1,7 +0,0 @@
|
||||
black==24.8.0
|
||||
clingo==5.7.1
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.20240513
|
||||
vermin==1.6.0
|
7
.github/workflows/style/requirements.txt
vendored
Normal file
7
.github/workflows/style/requirements.txt
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
black==24.4.2
|
||||
clingo==5.7.1
|
||||
flake8==7.0.0
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.9
|
||||
vermin==1.6.0
|
81
.github/workflows/unit_tests.yaml
vendored
81
.github/workflows/unit_tests.yaml
vendored
@@ -16,34 +16,45 @@ jobs:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
||||
concretizer: ['clingo']
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
include:
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: original
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
- python-version: '3.6'
|
||||
os: ubuntu-20.04
|
||||
concretizer: clingo
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
exclude:
|
||||
- python-version: '3.7'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.8'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.9'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.10'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -61,7 +72,7 @@ jobs:
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
if: ${{ matrix.concretizer == 'clingo' }}
|
||||
env:
|
||||
@@ -74,12 +85,13 @@ jobs:
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_PYTHON: python
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
SPACK_TEST_PARALLEL: 2
|
||||
COVERAGE: true
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -88,10 +100,10 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -106,13 +118,13 @@ jobs:
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run shell tests
|
||||
env:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -129,13 +141,13 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Run unit tests
|
||||
@@ -148,10 +160,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -166,13 +178,14 @@ jobs:
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
env:
|
||||
COVERAGE: true
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -185,10 +198,10 @@ jobs:
|
||||
os: [macos-13, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -200,48 +213,18 @@ jobs:
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
SPACK_TEST_PARALLEL: 4
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) bootstrap disable spack-install
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Run unit tests on Windows
|
||||
windows:
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/bin/setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
18
.github/workflows/valid-style.yml
vendored
18
.github/workflows/valid-style.yml
vendored
@@ -18,15 +18,15 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
@@ -35,22 +35,22 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
@@ -70,13 +70,13 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap Spack development environment
|
||||
|
83
.github/workflows/windows_python.yml
vendored
Normal file
83
.github/workflows/windows_python.yml
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
name: windows
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
concurrency:
|
||||
group: windows-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools coverage pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
- name: Command Unit Test
|
||||
run: |
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools coverage
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack -d external find cmake ninja
|
||||
spack -d install abseil-cpp
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -7,7 +7,6 @@
|
||||
/var/spack/environments
|
||||
/var/spack/repos/*/index.yaml
|
||||
/var/spack/repos/*/lock
|
||||
/var/spack/repos/*/packages.zip
|
||||
/opt
|
||||
# Ignore everything in /etc/spack except /etc/spack/defaults
|
||||
/etc/spack/*
|
||||
|
102
CHANGELOG.md
102
CHANGELOG.md
@@ -1,4 +1,3 @@
|
||||
|
||||
# v0.22.0 (2024-05-12)
|
||||
|
||||
`v0.22.0` is a major feature release.
|
||||
@@ -83,22 +82,21 @@
|
||||
spack install zlib cflags=\"-O2 -g\"
|
||||
```
|
||||
|
||||
That will now result in an error, but you can now write what you probably expected
|
||||
to work in the first place:
|
||||
That will now result in an error. The correct format (which you probably expected in
|
||||
the first place) is:
|
||||
|
||||
```
|
||||
spack install zlib cflags="-O2 -g"
|
||||
```
|
||||
|
||||
Quoted can also now include special characters, so you can supply flags like:
|
||||
Quoted can also now include special characters, enabling commands like:
|
||||
|
||||
```
|
||||
spack intall zlib ldflags='-Wl,-rpath=$ORIGIN/_libs'
|
||||
spack install zlib ldflags='-Wl,-rpath=$ORIGIN/_libs'
|
||||
```
|
||||
|
||||
To reduce ambiguity in parsing, we now require that you *not* put spaces around `=`
|
||||
and `==` when for flags or variants. This would not have broken before but will now
|
||||
result in an error:
|
||||
To reduce ambiguity in parsing, do *not* put spaces around `=` and `==` in
|
||||
flags or variants, as this will now result in an error:
|
||||
|
||||
```
|
||||
spack install zlib cflags = "-O2 -g"
|
||||
@@ -164,7 +162,7 @@
|
||||
include: ["openmpi"]
|
||||
```
|
||||
|
||||
6. **New `redistribute()` directive**
|
||||
6. **Add new `redistribute()` directive**
|
||||
|
||||
Some packages can't be redistributed in source or binary form. We need an explicit
|
||||
way to say that in a package.
|
||||
@@ -257,59 +255,56 @@
|
||||
|
||||
## New commands, options, and directives
|
||||
|
||||
* Allow packages to be pushed to build cache after install from source (#42423)
|
||||
* `spack develop`: stage build artifacts in same root as non-dev builds #41373
|
||||
* Don't delete `spack develop` build artifacts after install (#43424)
|
||||
* `spack find`: add options for local/upstream only (#42999)
|
||||
* `spack logs`: print log files for packages (either partially built or installed) (#42202)
|
||||
* `patch`: support reversing patches (#43040)
|
||||
* `develop`: Add -b/--build-directory option to set build_directory package attribute (#39606)
|
||||
* `spack list`: add `--namesapce` / `--repo` option (#41948)
|
||||
* directives: add `checked_by` field to `license()`, add some license checks
|
||||
* `spack gc`: add options for environments and build dependencies (#41731)
|
||||
* Add `--create` to `spack env activate` (#40896)
|
||||
* Allow packages to be pushed to build cache after install from source (#42423)
|
||||
* `spack develop`: stage build artifacts in same root as non-dev builds #41373
|
||||
* Don't delete `spack develop` build artifacts after install (#43424)
|
||||
* `spack find`: add options for local/upstream only (#42999)
|
||||
* `spack logs`: print log files for packages (either partially built or installed) (#42202)
|
||||
* `patch`: support reversing patches (#43040)
|
||||
* `develop`: Add -b/--build-directory option to set build_directory package attribute (#39606)
|
||||
* `spack list`: add `--namesapce` / `--repo` option (#41948)
|
||||
* directives: add `checked_by` field to `license()`, add some license checks
|
||||
* `spack gc`: add options for environments and build dependencies (#41731)
|
||||
* Add `--create` to `spack env activate` (#40896)
|
||||
|
||||
## Performance improvements
|
||||
|
||||
* environment.py: fix excessive re-reads (#43746)
|
||||
* ruamel yaml: fix quadratic complexity bug (#43745)
|
||||
* Refactor to improve `spec format` speed (#43712)
|
||||
* Do not acquire a write lock on the env post install if no views (#43505)
|
||||
* asp.py: fewer calls to `spec.copy()` (#43715)
|
||||
* spec.py: early return in `__str__`
|
||||
* avoid `jinja2` import at startup unless needed (#43237)
|
||||
* environment.py: fix excessive re-reads (#43746)
|
||||
* ruamel yaml: fix quadratic complexity bug (#43745)
|
||||
* Refactor to improve `spec format` speed (#43712)
|
||||
* Do not acquire a write lock on the env post install if no views (#43505)
|
||||
* asp.py: fewer calls to `spec.copy()` (#43715)
|
||||
* spec.py: early return in `__str__`
|
||||
* avoid `jinja2` import at startup unless needed (#43237)
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* `archspec`: update to `v0.2.4`: support for Windows, bugfixes for `neoverse-v1` and
|
||||
`neoverse-v2` detection.
|
||||
* `spack config get`/`blame`: with no args, show entire config
|
||||
* `spack env create <env>`: dir if dir-like (#44024)
|
||||
* ASP-based solver: update os compatibility for macOS (#43862)
|
||||
* Add handling of custom ssl certs in urllib ops (#42953)
|
||||
* Add ability to rename environments (#43296)
|
||||
* Add config option and compiler support to reuse across OS's (#42693)
|
||||
* Support for prereleases (#43140)
|
||||
* Only reuse externals when configured (#41707)
|
||||
* Environments: Add support for including views (#42250)
|
||||
|
||||
## Binary caches
|
||||
* Build cache: make signed/unsigned a mirror property (#41507)
|
||||
* tools stack
|
||||
* `archspec`: update to `v0.2.4`: support for Windows, bugfixes for `neoverse-v1` and
|
||||
`neoverse-v2` detection.
|
||||
* `spack config get`/`blame`: with no args, show entire config
|
||||
* `spack env create <env>`: dir if dir-like (#44024)
|
||||
* ASP-based solver: update os compatibility for macOS (#43862)
|
||||
* Add handling of custom ssl certs in urllib ops (#42953)
|
||||
* Add ability to rename environments (#43296)
|
||||
* Add config option and compiler support to reuse across OS's (#42693)
|
||||
* Support for prereleases (#43140)
|
||||
* Only reuse externals when configured (#41707)
|
||||
* Environments: Add support for including views (#42250)
|
||||
* Make signed/unsigned a mirror configuration property (#41507)
|
||||
|
||||
## Removals, deprecations, and syntax changes
|
||||
* remove `dpcpp` compiler and package (#43418)
|
||||
* spack load: remove --only argument (#42120)
|
||||
* remove `dpcpp` compiler and package (#43418)
|
||||
* `spack load`: remove --only argument (#42120)
|
||||
|
||||
## Notable Bugfixes
|
||||
* repo.py: drop deleted packages from provider cache (#43779)
|
||||
* Allow `+` in module file names (#41999)
|
||||
* `cmd/python`: use runpy to allow multiprocessing in scripts (#41789)
|
||||
* Show extension commands with spack -h (#41726)
|
||||
* Support environment variable expansion inside module projections (#42917)
|
||||
* Alert user to failed concretizations (#42655)
|
||||
* shell: fix zsh color formatting for PS1 in environments (#39497)
|
||||
* spack mirror create --all: include patches (#41579)
|
||||
* repo.py: drop deleted packages from provider cache (#43779)
|
||||
* Allow `+` in module file names (#41999)
|
||||
* `cmd/python`: use runpy to allow multiprocessing in scripts (#41789)
|
||||
* Show extension commands with spack -h (#41726)
|
||||
* Support environment variable expansion inside module projections (#42917)
|
||||
* Alert user to failed concretizations (#42655)
|
||||
* shell: fix zsh color formatting for PS1 in environments (#39497)
|
||||
* spack mirror create --all: include patches (#41579)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
@@ -319,6 +314,7 @@
|
||||
* 344 committers to packages
|
||||
* 45 committers to core
|
||||
|
||||
|
||||
# v0.21.2 (2024-03-01)
|
||||
|
||||
## Bugfixes
|
||||
@@ -348,7 +344,7 @@
|
||||
- spack graph: fix coloring with environments (#41240)
|
||||
- spack info: sort variants in --variants-by-name (#41389)
|
||||
- Spec.format: error on old style format strings (#41934)
|
||||
- ASP-based solver:
|
||||
- ASP-based solver:
|
||||
- fix infinite recursion when computing concretization errors (#41061)
|
||||
- don't error for type mismatch on preferences (#41138)
|
||||
- don't emit spurious debug output (#41218)
|
||||
|
@@ -32,7 +32,7 @@
|
||||
|
||||
Spack is a multi-platform package manager that builds and installs
|
||||
multiple versions and configurations of software. It works on Linux,
|
||||
macOS, Windows, and many supercomputers. Spack is non-destructive: installing a
|
||||
macOS, and many supercomputers. Spack is non-destructive: installing a
|
||||
new version of a package does not break existing installations, so many
|
||||
configurations of the same package can coexist.
|
||||
|
||||
|
@@ -22,4 +22,4 @@
|
||||
#
|
||||
# This is compatible across platforms.
|
||||
#
|
||||
exec spack python "$@"
|
||||
exec /usr/bin/env spack python "$@"
|
||||
|
@@ -188,27 +188,25 @@ if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :end_switch
|
||||
|
||||
:case_load
|
||||
if NOT defined _sp_args (
|
||||
exit /B 0
|
||||
)
|
||||
|
||||
:: If args contain --bat, or -h/--help: just execute.
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--list=%" (
|
||||
goto :default_case
|
||||
:: If args contain --sh, --csh, or -h/--help: just execute.
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
)
|
||||
)
|
||||
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`
|
||||
) do %%I
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
|
||||
|
||||
goto :end_switch
|
||||
|
||||
:case_unload
|
||||
goto :case_load
|
||||
|
||||
:default_case
|
||||
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
|
||||
goto :end_switch
|
||||
|
@@ -144,5 +144,3 @@ switch($SpackSubCommand)
|
||||
"unload" {Invoke-SpackLoad}
|
||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
|
||||
exit $LASTEXITCODE
|
||||
|
@@ -170,6 +170,23 @@ config:
|
||||
# If set to true, Spack will use ccache to cache C compiles.
|
||||
ccache: false
|
||||
|
||||
|
||||
# The concretization algorithm to use in Spack. Options are:
|
||||
#
|
||||
# 'clingo': Uses a logic solver under the hood to solve DAGs with full
|
||||
# backtracking and optimization for user preferences. Spack will
|
||||
# try to bootstrap the logic solver, if not already available.
|
||||
#
|
||||
# 'original': Spack's original greedy, fixed-point concretizer. This
|
||||
# algorithm can make decisions too early and will not backtrack
|
||||
# sufficiently for many specs. This will soon be deprecated in
|
||||
# favor of clingo.
|
||||
#
|
||||
# See `concretizer.yaml` for more settings you can fine-tune when
|
||||
# using clingo.
|
||||
concretizer: clingo
|
||||
|
||||
|
||||
# How long to wait to lock the Spack installation database. This lock is used
|
||||
# when Spack needs to manage its own package metadata and all operations are
|
||||
# expected to complete within the default time limit. The timeout should
|
||||
|
16
etc/spack/defaults/cray/modules.yaml
Normal file
16
etc/spack/defaults/cray/modules.yaml
Normal file
@@ -0,0 +1,16 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This is the default configuration for Spack's module file generation.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/modules.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/modules.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
modules: {}
|
19
etc/spack/defaults/cray/packages.yaml
Normal file
19
etc/spack/defaults/cray/packages.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This file controls default concretization preferences for Spack.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/packages.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
iconv: [glibc, musl, libiconv]
|
19
etc/spack/defaults/linux/packages.yaml
Normal file
19
etc/spack/defaults/linux/packages.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This file controls default concretization preferences for Spack.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/packages.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
iconv: [glibc, musl, libiconv]
|
@@ -20,14 +20,11 @@ packages:
|
||||
awk: [gawk]
|
||||
armci: [armcimpi]
|
||||
blas: [openblas, amdblis]
|
||||
c: [gcc]
|
||||
cxx: [gcc]
|
||||
D: [ldc]
|
||||
daal: [intel-oneapi-daal]
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fortran: [gcc]
|
||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
@@ -41,9 +38,10 @@ packages:
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
libgfortran: [ gcc-runtime ]
|
||||
libglx: [mesa+glx]
|
||||
libglx: [mesa+glx, mesa18+glx]
|
||||
libifcore: [ intel-oneapi-runtime ]
|
||||
libllvm: [llvm]
|
||||
libosmesa: [mesa+osmesa, mesa18+osmesa]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
mariadb-client: [mariadb-c-client, mariadb]
|
||||
@@ -64,7 +62,6 @@ packages:
|
||||
tbb: [intel-tbb]
|
||||
unwind: [libunwind]
|
||||
uuid: [util-linux-uuid, libuuid]
|
||||
wasi-sdk: [wasi-sdk-prebuilt]
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
|
@@ -1,5 +1,6 @@
|
||||
config:
|
||||
locks: false
|
||||
concretizer: clingo
|
||||
build_stage::
|
||||
- '$spack/.staging'
|
||||
stage_name: '{name}-{version}-{hash:7}'
|
||||
|
@@ -1433,12 +1433,22 @@ the reserved keywords ``platform``, ``os`` and ``target``:
|
||||
$ spack install libelf os=ubuntu18.04
|
||||
$ spack install libelf target=broadwell
|
||||
|
||||
or together by using the reserved keyword ``arch``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install libelf arch=cray-CNL10-haswell
|
||||
|
||||
Normally users don't have to bother specifying the architecture if they
|
||||
are installing software for their current host, as in that case the
|
||||
values will be detected automatically. If you need fine-grained control
|
||||
over which packages use which targets (or over *all* packages' default
|
||||
target), see :ref:`package-preferences`.
|
||||
|
||||
.. admonition:: Cray machines
|
||||
|
||||
The situation is a little bit different for Cray machines and a detailed
|
||||
explanation on how the architecture can be set on them can be found at :ref:`cray-support`
|
||||
|
||||
.. _support-for-microarchitectures:
|
||||
|
||||
|
@@ -147,15 +147,6 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
|
||||
def autoreconf(self, spec, prefix):
|
||||
which("bash")("autogen.sh")
|
||||
|
||||
If the ``package.py`` has build instructions in a separate
|
||||
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class AutotoolsBuilder(AutotoolsBuilder):
|
||||
def autoreconf(self, pkg, spec, prefix):
|
||||
which("bash")("autogen.sh")
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
patching configure or Makefile.in files
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
|
@@ -25,7 +25,7 @@ use Spack to build packages with the tools.
|
||||
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
||||
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
||||
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
||||
packages. Search for ``oneAPI`` at `packages.spack.io <https://packages.spack.io>`_ for the full
|
||||
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
|
||||
list of available oneAPI packages, or use::
|
||||
|
||||
spack list -d oneAPI
|
||||
|
@@ -11,8 +11,7 @@ Chaining Spack Installations
|
||||
|
||||
You can point your Spack installation to another installation to use any
|
||||
packages that are installed there. To register the other Spack instance,
|
||||
you can add it as an entry to ``upstreams.yaml`` at any of the
|
||||
:ref:`configuration-scopes`:
|
||||
you can add it as an entry to ``upstreams.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -23,8 +22,7 @@ you can add it as an entry to ``upstreams.yaml`` at any of the
|
||||
install_tree: /path/to/another/spack/opt/spack
|
||||
|
||||
``install_tree`` must point to the ``opt/spack`` directory inside of the
|
||||
Spack base directory, or the location of the ``install_tree`` defined
|
||||
in :ref:`config.yaml <config-yaml>`.
|
||||
Spack base directory.
|
||||
|
||||
Once the upstream Spack instance has been added, ``spack find`` will
|
||||
automatically check the upstream instance when querying installed packages,
|
||||
|
@@ -206,7 +206,6 @@ def setup(sphinx):
|
||||
("py:class", "six.moves.urllib.parse.ParseResult"),
|
||||
("py:class", "TextIO"),
|
||||
("py:class", "hashlib._Hash"),
|
||||
("py:class", "concurrent.futures._base.Executor"),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
|
@@ -203,9 +203,12 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Ubuntu 24.04
|
||||
- ``ubuntu:24.04``
|
||||
- ``spack/ubuntu-noble``
|
||||
* - CentOS Stream9
|
||||
- ``quay.io/centos/centos:stream9``
|
||||
- ``spack/centos-stream9``
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
* - CentOS Stream
|
||||
- ``quay.io/centos/centos:stream``
|
||||
- ``spack/centos-stream``
|
||||
* - openSUSE Leap
|
||||
- ``opensuse/leap``
|
||||
- ``spack/leap15``
|
||||
|
@@ -931,84 +931,32 @@ This allows for a much-needed reduction in redundancy between packages
|
||||
and constraints.
|
||||
|
||||
|
||||
-----------------
|
||||
Environment Views
|
||||
-----------------
|
||||
----------------
|
||||
Filesystem Views
|
||||
----------------
|
||||
|
||||
Spack Environments can have an associated filesystem view, which is a directory
|
||||
with a more traditional structure ``<view>/bin``, ``<view>/lib``, ``<view>/include``
|
||||
in which all files of the installed packages are linked.
|
||||
|
||||
By default a view is created for each environment, thanks to the ``view: true``
|
||||
option in the ``spack.yaml`` manifest file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs: [perl, python]
|
||||
view: true
|
||||
|
||||
The view is created in a hidden directory ``.spack-env/view`` relative to the environment.
|
||||
If you've used ``spack env activate``, you may have already interacted with this view. Spack
|
||||
prepends its ``<view>/bin`` dir to ``PATH`` when the environment is activated, so that
|
||||
you can directly run executables from all installed packages in the environment.
|
||||
|
||||
Views are highly customizable: you can control where they are put, modify their structure,
|
||||
include and exclude specs, change how files are linked, and you can even generate multiple
|
||||
views for a single environment.
|
||||
Spack Environments can define filesystem views, which provide a direct access point
|
||||
for software similar to the directory hierarchy that might exist under ``/usr/local``.
|
||||
Filesystem views are updated every time the environment is written out to the lock
|
||||
file ``spack.lock``, so the concrete environment and the view are always compatible.
|
||||
The files of the view's installed packages are brought into the view by symbolic or
|
||||
hard links, referencing the original Spack installation, or by copy.
|
||||
|
||||
.. _configuring_environment_views:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Minimal view configuration
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Configuration in ``spack.yaml``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The minimal configuration
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: true
|
||||
|
||||
lets Spack generate a single view with default settings under the
|
||||
``.spack-env/view`` directory of the environment.
|
||||
|
||||
Another short way to configure a view is to specify just where to put it:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: /path/to/view
|
||||
|
||||
Views can also be disabled by setting ``view: false``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Advanced view configuration
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
One or more **view descriptors** can be defined under ``view``, keyed by a name.
|
||||
The example from the previous section with ``view: /path/to/view`` is equivalent
|
||||
to defining a view descriptor named ``default`` with a ``root`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view:
|
||||
default: # name of the view
|
||||
root: /path/to/view # view descriptor attribute
|
||||
|
||||
The ``default`` view descriptor name is special: when you ``spack env activate`` your
|
||||
environment, this view will be used to update (among other things) your ``PATH``
|
||||
variable.
|
||||
|
||||
View descriptors must contain the root of the view, and optionally projections,
|
||||
``select`` and ``exclude`` lists and link information via ``link`` and
|
||||
The Spack Environment manifest file has a top-level keyword
|
||||
``view``. Each entry under that heading is a **view descriptor**, headed
|
||||
by a name. Any number of views may be defined under the ``view`` heading.
|
||||
The view descriptor contains the root of the view, and
|
||||
optionally the projections for the view, ``select`` and
|
||||
``exclude`` lists for the view and link information via ``link`` and
|
||||
``link_type``.
|
||||
|
||||
As a more advanced example, in the following manifest
|
||||
For example, in the following manifest
|
||||
file snippet we define a view named ``mpis``, rooted at
|
||||
``/path/to/view`` in which all projections use the package name,
|
||||
version, and compiler name to determine the path for a given
|
||||
@@ -1053,10 +1001,59 @@ of ``hardlink`` or ``copy``.
|
||||
when the environment is not activated, and linked libraries will be located
|
||||
*outside* of the view thanks to rpaths.
|
||||
|
||||
|
||||
There are two shorthands for environments with a single view. If the
|
||||
environment at ``/path/to/env`` has a single view, with a root at
|
||||
``/path/to/env/.spack-env/view``, with default selection and exclusion
|
||||
and the default projection, we can put ``view: True`` in the
|
||||
environment manifest. Similarly, if the environment has a view with a
|
||||
different root, but default selection, exclusion, and projections, the
|
||||
manifest can say ``view: /path/to/view``. These views are
|
||||
automatically named ``default``, so that
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: True
|
||||
|
||||
is equivalent to
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view:
|
||||
default:
|
||||
root: .spack-env/view
|
||||
|
||||
and
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view: /path/to/view
|
||||
|
||||
is equivalent to
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
# ...
|
||||
view:
|
||||
default:
|
||||
root: /path/to/view
|
||||
|
||||
By default, Spack environments are configured with ``view: True`` in
|
||||
the manifest. Environments can be configured without views using
|
||||
``view: False``. For backwards compatibility reasons, environments
|
||||
with no ``view`` key are treated the same as ``view: True``.
|
||||
|
||||
From the command line, the ``spack env create`` command takes an
|
||||
argument ``--with-view [PATH]`` that sets the path for a single, default
|
||||
view. If no path is specified, the default path is used (``view:
|
||||
true``). The argument ``--without-view`` can be used to create an
|
||||
True``). The argument ``--without-view`` can be used to create an
|
||||
environment without any view configured.
|
||||
|
||||
The ``spack env view`` command can be used to change the manage views
|
||||
@@ -1122,18 +1119,11 @@ the projection under ``all`` before reaching those entries.
|
||||
Activating environment views
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``spack env activate <env>`` has two effects:
|
||||
|
||||
1. It activates the environment so that further Spack commands such
|
||||
as ``spack install`` will run in the context of the environment.
|
||||
2. It activates the view so that environment variables such as
|
||||
``PATH`` are updated to include the view.
|
||||
|
||||
Without further arguments, the ``default`` view of the environment is
|
||||
activated. If a view with a different name has to be activated,
|
||||
``spack env activate --with-view <name> <env>`` can be
|
||||
used instead. You can also activate the environment without modifying
|
||||
further environment variables using ``--without-view``.
|
||||
The ``spack env activate`` command will put the default view for the
|
||||
environment into the user's path, in addition to activating the
|
||||
environment for Spack commands. The arguments ``-v,--with-view`` and
|
||||
``-V,--without-view`` can be used to tune this behavior. The default
|
||||
behavior is to activate with the environment view if there is one.
|
||||
|
||||
The environment variables affected by the ``spack env activate``
|
||||
command and the paths that are used to update them are determined by
|
||||
@@ -1156,8 +1146,8 @@ relevant variable if the path exists. For this reason, it is not
|
||||
recommended to use non-default projections with the default view of an
|
||||
environment.
|
||||
|
||||
The ``spack env deactivate`` command will remove the active view of
|
||||
the Spack environment from the user's environment variables.
|
||||
The ``spack env deactivate`` command will remove the default view of
|
||||
the environment from the user's path.
|
||||
|
||||
|
||||
.. _env-generate-depfile:
|
||||
@@ -1316,7 +1306,7 @@ index once every package is pushed. Note how this target uses the generated
|
||||
example/push/%: example/install/%
|
||||
@mkdir -p $(dir $@)
|
||||
$(info About to push $(SPEC) to a buildcache)
|
||||
$(SPACK) -e . buildcache push --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
@touch $@
|
||||
|
||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||
|
@@ -1364,6 +1364,187 @@ This will write the private key to the file `dinosaur.priv`.
|
||||
or for help on an issue or the Spack slack.
|
||||
|
||||
|
||||
.. _cray-support:
|
||||
|
||||
-------------
|
||||
Spack on Cray
|
||||
-------------
|
||||
|
||||
Spack differs slightly when used on a Cray system. The architecture spec
|
||||
can differentiate between the front-end and back-end processor and operating system.
|
||||
For example, on Edison at NERSC, the back-end target processor
|
||||
is "Ivy Bridge", so you can specify to use the back-end this way:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib target=ivybridge
|
||||
|
||||
You can also use the operating system to build against the back-end:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib os=CNL10
|
||||
|
||||
Notice that the name includes both the operating system name and the major
|
||||
version number concatenated together.
|
||||
|
||||
Alternatively, if you want to build something for the front-end,
|
||||
you can specify the front-end target processor. The processor for a login node
|
||||
on Edison is "Sandy bridge" so we specify on the command line like so:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib target=sandybridge
|
||||
|
||||
And the front-end operating system is:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install zlib os=SuSE11
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Cray compiler detection
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack can detect compilers using two methods. For the front-end, we treat
|
||||
everything the same. The difference lies in back-end compiler detection.
|
||||
Back-end compiler detection is made via the Tcl module avail command.
|
||||
Once it detects the compiler it writes the appropriate PrgEnv and compiler
|
||||
module name to compilers.yaml and sets the paths to each compiler with Cray\'s
|
||||
compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load
|
||||
the correct PrgEnv and compiler module and will call appropriate wrapper.
|
||||
|
||||
The compilers.yaml config file will also differ. There is a
|
||||
modules section that is filled with the compiler's Programming Environment
|
||||
and module name. On other systems, this field is empty []:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- compiler:
|
||||
modules:
|
||||
- PrgEnv-intel
|
||||
- intel/15.0.109
|
||||
|
||||
As mentioned earlier, the compiler paths will look different on a Cray system.
|
||||
Since most compilers are invoked using cc, CC and ftn, the paths for each
|
||||
compiler are replaced with their respective Cray compiler wrapper names:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
paths:
|
||||
cc: cc
|
||||
cxx: CC
|
||||
f77: ftn
|
||||
fc: ftn
|
||||
|
||||
As opposed to an explicit path to the compiler executable. This allows Spack
|
||||
to call the Cray compiler wrappers during build time.
|
||||
|
||||
For more on compiler configuration, check out :ref:`compiler-config`.
|
||||
|
||||
Spack sets the default Cray link type to dynamic, to better match other
|
||||
other platforms. Individual packages can enable static linking (which is the
|
||||
default outside of Spack on cray systems) using the ``-static`` flag.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting defaults and using Cray modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you want to use default compilers for each PrgEnv and also be able
|
||||
to load cray external modules, you will need to set up a ``packages.yaml``.
|
||||
|
||||
Here's an example of an external configuration for cray modules:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpich:
|
||||
externals:
|
||||
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10"
|
||||
modules:
|
||||
- cray-mpich
|
||||
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10"
|
||||
modules:
|
||||
- cray-mpich
|
||||
all:
|
||||
providers:
|
||||
mpi: [mpich]
|
||||
|
||||
This tells Spack that for whatever package that depends on mpi, load the
|
||||
cray-mpich module into the environment. You can then be able to use whatever
|
||||
environment variables, libraries, etc, that are brought into the environment
|
||||
via module load.
|
||||
|
||||
.. note::
|
||||
|
||||
For Cray-provided packages, it is best to use ``modules:`` instead of ``prefix:``
|
||||
in ``packages.yaml``, because the Cray Programming Environment heavily relies on
|
||||
modules (e.g., loading the ``cray-mpich`` module adds MPI libraries to the
|
||||
compiler wrapper link line).
|
||||
|
||||
You can set the default compiler that Spack can use for each compiler type.
|
||||
If you want to use the Cray defaults, then set them under ``all:`` in packages.yaml.
|
||||
In the compiler field, set the compiler specs in your order of preference.
|
||||
Whenever you build with that compiler type, Spack will concretize to that version.
|
||||
|
||||
Here is an example of a full packages.yaml used at NERSC
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpich:
|
||||
externals:
|
||||
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-mpich
|
||||
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge"
|
||||
modules:
|
||||
- cray-mpich
|
||||
buildable: False
|
||||
netcdf:
|
||||
externals:
|
||||
- spec: "netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-netcdf
|
||||
- spec: "netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-netcdf
|
||||
buildable: False
|
||||
hdf5:
|
||||
externals:
|
||||
- spec: "hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-hdf5
|
||||
- spec: "hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
||||
modules:
|
||||
- cray-hdf5
|
||||
buildable: False
|
||||
all:
|
||||
compiler: [gcc@5.2.0, intel@16.0.0.109]
|
||||
providers:
|
||||
mpi: [mpich]
|
||||
|
||||
Here we tell spack that whenever we want to build with gcc use version 5.2.0 or
|
||||
if we want to build with intel compilers, use version 16.0.0.109. We add a spec
|
||||
for each compiler type for each cray modules. This ensures that for each
|
||||
compiler on our system we can use that external module.
|
||||
|
||||
For more on external packages check out the section :ref:`sec-external-packages`.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Linux containers on Cray machines
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack uses environment variables particular to the Cray programming
|
||||
environment to determine which systems are Cray platforms. These
|
||||
environment variables may be propagated into containers that are not
|
||||
using the Cray programming environment.
|
||||
|
||||
To ensure that Spack does not autodetect the Cray programming
|
||||
environment, unset the environment variable ``MODULEPATH``. This
|
||||
will cause Spack to treat a linux container on a Cray system as a base
|
||||
linux distro.
|
||||
|
||||
.. _windows_support:
|
||||
|
||||
----------------
|
||||
|
@@ -1263,11 +1263,6 @@ Git fetching supports the following parameters to ``version``:
|
||||
option ``--depth 1`` will be used if the version of git and the specified
|
||||
transport protocol support it, and ``--single-branch`` will be used if the
|
||||
version of git supports it.
|
||||
* ``git_sparse_paths``: Use ``sparse-checkout`` to only clone these relative paths.
|
||||
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
|
||||
large repositories that have separate portions that can be built independently.
|
||||
If paths provided are directories then all the subdirectories and associated files
|
||||
will also be cloned.
|
||||
|
||||
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
|
||||
|
||||
@@ -1366,41 +1361,6 @@ Submodules
|
||||
For more information about git submodules see the manpage of git: ``man
|
||||
git-submodule``.
|
||||
|
||||
Sparse-Checkout
|
||||
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
||||
sparse-checkout feature. This will only clone the paths that are specified in the
|
||||
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
|
||||
This feature allows you to only clone what you need from a large repository.
|
||||
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
|
||||
If ``git_sparse_paths`` is supplied and the git version is too old
|
||||
then a warning will be issued and that package will use the standard cloning operations instead.
|
||||
``git_sparse_paths`` should be supplied as a list of paths, a callable function for versions,
|
||||
or a more complex package attribute using the ``@property`` decorator. The return value should be
|
||||
a list for a callable implementation of ``git_sparse_paths``.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def sparse_path_function(package)
|
||||
"""a callable function that can be used in side a version"""
|
||||
# paths can be directories or functions, all subdirectories and files are included
|
||||
paths = ["doe", "rae", "me/file.cpp"]
|
||||
if package.spec.version > Version("1.2.0"):
|
||||
paths.extend(["fae"])
|
||||
return paths
|
||||
|
||||
class MyPackage(package):
|
||||
# can also be a package attribute that will be used if not specified in versions
|
||||
git_sparse_paths = ["doe", "rae"]
|
||||
|
||||
# use the package attribute
|
||||
version("1.0.0")
|
||||
version("1.1.0")
|
||||
# use the function
|
||||
version("1.1.5", git_sparse_paths=sparse_path_func)
|
||||
version("1.2.0", git_sparse_paths=sparse_path_func)
|
||||
version("1.2.5", git_sparse_paths=sparse_path_func)
|
||||
version("1.1.5", git_sparse_paths=sparse_path_func)
|
||||
|
||||
.. _github-fetch:
|
||||
|
||||
^^^^^^
|
||||
@@ -2384,27 +2344,6 @@ you set ``parallel`` to ``False`` at the package level, then each call
|
||||
to ``make()`` will be sequential by default, but packagers can call
|
||||
``make(parallel=True)`` to override it.
|
||||
|
||||
Note that the ``--jobs`` option works out of the box for all standard
|
||||
build systems. If you are using a non-standard build system instead, you
|
||||
can use the variable ``make_jobs`` to extract the number of jobs specified
|
||||
by the ``--jobs`` option:
|
||||
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 7, 11
|
||||
:linenos:
|
||||
|
||||
class Xios(Package):
|
||||
...
|
||||
def install(self, spec, prefix):
|
||||
...
|
||||
options = [
|
||||
...
|
||||
'--jobs', str(make_jobs),
|
||||
]
|
||||
...
|
||||
make_xios = Executable("./make_xios")
|
||||
make_xios(*options)
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Install-level build parallelism
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -5234,6 +5173,12 @@ installed executable. The check is implemented as follows:
|
||||
reframe = Executable(self.prefix.bin.reframe)
|
||||
reframe("-l")
|
||||
|
||||
.. warning::
|
||||
|
||||
The API for adding tests is not yet considered stable and may change
|
||||
in future releases.
|
||||
|
||||
|
||||
""""""""""""""""""""""""""""""""
|
||||
Checking build-time test results
|
||||
""""""""""""""""""""""""""""""""
|
||||
@@ -5271,42 +5216,38 @@ be left in the build stage directory as illustrated below:
|
||||
Stand-alone tests
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
While build-time tests are integrated with the installation process, stand-alone
|
||||
While build-time tests are integrated with the build process, stand-alone
|
||||
tests are expected to run days, weeks, even months after the software is
|
||||
installed. The goal is to provide a mechanism for gaining confidence that
|
||||
packages work as installed **and** *continue* to work as the underlying
|
||||
software evolves. Packages can add and inherit stand-alone tests. The
|
||||
``spack test`` command is used for stand-alone testing.
|
||||
`spack test`` command is used to manage stand-alone testing.
|
||||
|
||||
.. admonition:: Stand-alone test methods should complete within a few minutes.
|
||||
.. note::
|
||||
|
||||
Execution speed is important since these tests are intended to quickly
|
||||
assess whether installed specs work on the system. Spack cannot spare
|
||||
resources for more extensive testing of packages included in CI stacks.
|
||||
assess whether installed specs work on the system. Consequently, they
|
||||
should run relatively quickly -- as in on the order of at most a few
|
||||
minutes -- while ideally executing all, or at least key aspects of the
|
||||
installed software.
|
||||
|
||||
Consequently, stand-alone tests should run relatively quickly -- as in
|
||||
on the order of at most a few minutes -- while testing at least key aspects
|
||||
of the installed software. Save more extensive testing for other tools.
|
||||
.. note::
|
||||
|
||||
Failing stand-alone tests indicate problems with the installation and,
|
||||
therefore, there is no reason to proceed with more resource-intensive
|
||||
tests until those have been investigated.
|
||||
|
||||
Passing stand-alone tests indicate that more thorough testing, such
|
||||
as running extensive unit or regression tests, or tests that run at
|
||||
scale can proceed without wasting resources on a problematic installation.
|
||||
|
||||
Tests are defined in the package using methods with names beginning ``test_``.
|
||||
This allows Spack to support multiple independent checks, or parts. Files
|
||||
needed for testing, such as source, data, and expected outputs, may be saved
|
||||
from the build and or stored with the package in the repository. Regardless
|
||||
of origin, these files are automatically copied to the spec's test stage
|
||||
directory prior to execution of the test method(s). Spack also provides helper
|
||||
functions to facilitate common processing.
|
||||
|
||||
.. tip::
|
||||
|
||||
**The status of stand-alone tests can be used to guide follow-up testing efforts.**
|
||||
|
||||
Passing stand-alone tests justify performing more thorough testing, such
|
||||
as running extensive unit or regression tests or tests that run at scale,
|
||||
when available. These tests are outside of the scope of Spack packaging.
|
||||
|
||||
Failing stand-alone tests indicate problems with the installation and,
|
||||
therefore, no reason to proceed with more resource-intensive tests until
|
||||
the failures have been investigated.
|
||||
directory prior to execution of the test method(s). Spack also provides some
|
||||
helper functions to facilitate processing.
|
||||
|
||||
.. _configure-test-stage:
|
||||
|
||||
@@ -5314,26 +5255,30 @@ functions to facilitate common processing.
|
||||
Configuring the test stage directory
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Stand-alone tests utilize a test stage directory to build, run, and track
|
||||
tests in the same way Spack uses a build stage directory to install software.
|
||||
The default test stage root directory, ``$HOME/.spack/test``, is defined in
|
||||
:ref:`config.yaml <config-yaml>`. This location is customizable by adding or
|
||||
changing the ``test_stage`` path such that:
|
||||
Stand-alone tests utilize a test stage directory for building, running,
|
||||
and tracking results in the same way Spack uses a build stage directory.
|
||||
The default test stage root directory, ``~/.spack/test``, is defined in
|
||||
:ref:`etc/spack/defaults/config.yaml <config-yaml>`. This location is
|
||||
customizable by adding or changing the ``test_stage`` path in the high-level
|
||||
``config`` of the appropriate ``config.yaml`` file such that:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config:
|
||||
test_stage: /path/to/test/stage
|
||||
|
||||
Packages can use the ``self.test_suite.stage`` property to access the path.
|
||||
Packages can use the ``self.test_suite.stage`` property to access this setting.
|
||||
Other package properties that provide access to spec-specific subdirectories
|
||||
and files are described in :ref:`accessing staged files <accessing-files>`.
|
||||
|
||||
.. admonition:: Each spec being tested has its own test stage directory.
|
||||
.. note::
|
||||
|
||||
The ``config:test_stage`` option is the path to the root of a
|
||||
**test suite**'s stage directories.
|
||||
The test stage path is the root directory for the **entire suite**.
|
||||
In other words, it is the root directory for **all specs** being
|
||||
tested by the ``spack test run`` command. Each spec gets its own
|
||||
stage subdirectory. Use ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||
to access the spec-specific test stage directory.
|
||||
|
||||
Other package properties that provide paths to spec-specific subdirectories
|
||||
and files are described in :ref:`accessing-files`.
|
||||
|
||||
.. _adding-standalone-tests:
|
||||
|
||||
@@ -5346,144 +5291,61 @@ Test recipes are defined in the package using methods with names beginning
|
||||
Each method has access to the information Spack tracks on the package, such
|
||||
as options, compilers, and dependencies, supporting the customization of tests
|
||||
to the build. Standard python ``assert`` statements and other error reporting
|
||||
mechanisms can be used. These exceptions are automatically caught and reported
|
||||
mechanisms are available. Such exceptions are automatically caught and reported
|
||||
as test failures.
|
||||
|
||||
Each test method is an *implicit test part* named by the method. Its purpose
|
||||
is the method's docstring. Providing a meaningful purpose for the test gives
|
||||
context that can aid debugging. Spack outputs both the name and purpose at the
|
||||
start of test execution so it's also important that the docstring/purpose be
|
||||
brief.
|
||||
|
||||
.. tip::
|
||||
|
||||
We recommend naming test methods so it is clear *what* is being tested.
|
||||
For example, if a test method is building and or running an executable
|
||||
called ``example``, then call the method ``test_example``. This, together
|
||||
with a similarly meaningful test purpose, will aid test comprehension,
|
||||
debugging, and maintainability.
|
||||
|
||||
Stand-alone tests run in an environment that provides access to information
|
||||
on the installed software, such as build options, dependencies, and compilers.
|
||||
Build options and dependencies are accessed using the same spec checks used
|
||||
by build recipes. Examples of checking :ref:`variant settings <variants>` and
|
||||
:ref:`spec constraints <testing-specs>` can be found at the provided links.
|
||||
|
||||
.. admonition:: Spack automatically sets up the test stage directory and environment.
|
||||
|
||||
Spack automatically creates the test stage directory and copies
|
||||
relevant files *prior to* running tests. It can also ensure build
|
||||
dependencies are available **if** necessary.
|
||||
|
||||
The path to the test stage is configurable (see :ref:`configure-test-stage`).
|
||||
|
||||
Files that Spack knows to copy are those saved from the build (see
|
||||
:ref:`cache_extra_test_sources`) and those added to the package repository
|
||||
(see :ref:`cache_custom_files`).
|
||||
|
||||
Spack will use the value of the ``test_requires_compiler`` property to
|
||||
determine whether it needs to also set up build dependencies (see
|
||||
:ref:`test-build-tests`).
|
||||
|
||||
The ``MyPackage`` package below provides two basic test examples:
|
||||
``test_example`` and ``test_example2``. The first runs the installed
|
||||
``example`` and ensures its output contains an expected string. The second
|
||||
runs ``example2`` without checking output so is only concerned with confirming
|
||||
the executable runs successfully. If the installed spec is not expected to have
|
||||
``example2``, then the check at the top of the method will raise a special
|
||||
``SkipTest`` exception, which is captured to facilitate reporting skipped test
|
||||
parts to tools like CDash.
|
||||
Each test method is an implicit test part named by the method and whose
|
||||
purpose is the method's docstring. Providing a purpose gives context for
|
||||
aiding debugging. A test method may contain embedded test parts. Spack
|
||||
outputs the test name and purpose prior to running each test method and
|
||||
any embedded test parts. For example, ``MyPackage`` below provides two basic
|
||||
examples of installation tests: ``test_always_fails`` and ``test_example``.
|
||||
As the name indicates, the first always fails. The second simply runs the
|
||||
installed example.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_always_fails(self):
|
||||
"""use assert to always fail"""
|
||||
assert False
|
||||
|
||||
def test_example(self):
|
||||
"""ensure installed example works"""
|
||||
expected = "Done."
|
||||
"""run installed example"""
|
||||
example = which(self.prefix.bin.example)
|
||||
|
||||
# Capture stdout and stderr from running the Executable
|
||||
# and check that the expected output was produced.
|
||||
out = example(output=str.split, error=str.split)
|
||||
assert expected in out, f"Expected '{expected}' in the output"
|
||||
|
||||
def test_example2(self):
|
||||
"""run installed example2"""
|
||||
if self.spec.satisfies("@:1.0"):
|
||||
# Raise SkipTest to ensure flagging the test as skipped for
|
||||
# test reporting purposes.
|
||||
raise SkipTest("Test is only available for v1.1 on")
|
||||
|
||||
example2 = which(self.prefix.bin.example2)
|
||||
example2()
|
||||
example()
|
||||
|
||||
Output showing the identification of each test part after running the tests
|
||||
is illustrated below.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack test run --alias mypackage mypackage@2.0
|
||||
$ spack test run --alias mypackage mypackage@1.0
|
||||
==> Spack test mypackage
|
||||
...
|
||||
$ spack test results -l mypackage
|
||||
==> Results for test suite 'mypackage':
|
||||
...
|
||||
==> [2024-03-10-16:03:56.625439] test: test_example: ensure installed example works
|
||||
==> [2023-03-10-16:03:56.625204] test: test_always_fails: use assert to always fail
|
||||
...
|
||||
PASSED: MyPackage::test_example
|
||||
==> [2024-03-10-16:03:56.625439] test: test_example2: run installed example2
|
||||
FAILED
|
||||
==> [2023-03-10-16:03:56.625439] test: test_example: run installed example
|
||||
...
|
||||
PASSED: MyPackage::test_example2
|
||||
PASSED
|
||||
|
||||
.. admonition:: Do NOT implement tests that must run in the installation prefix.
|
||||
|
||||
Use of the package spec's installation prefix for building and running
|
||||
tests is **strongly discouraged**. Doing so causes permission errors for
|
||||
shared spack instances *and* facilities that install the software in
|
||||
read-only file systems or directories.
|
||||
.. note::
|
||||
|
||||
Instead, start these test methods by explicitly copying the needed files
|
||||
from the installation prefix to the test stage directory. Note the test
|
||||
stage directory is the current directory when the test is executed with
|
||||
the ``spack test run`` command.
|
||||
If ``MyPackage`` were a recipe for a library, the tests should build
|
||||
an example or test program that is then executed.
|
||||
|
||||
.. admonition:: Test methods for library packages should build test executables.
|
||||
A test method can include test parts using the ``test_part`` context manager.
|
||||
Each part is treated as an independent check to allow subsequent test parts
|
||||
to execute even after a test part fails.
|
||||
|
||||
Stand-alone tests for library packages *should* build test executables
|
||||
that utilize the *installed* library. Doing so ensures the tests follow
|
||||
a similar build process that users of the library would follow.
|
||||
|
||||
For more information on how to do this, see :ref:`test-build-tests`.
|
||||
|
||||
.. tip::
|
||||
|
||||
If you want to see more examples from packages with stand-alone tests, run
|
||||
``spack pkg grep "def\stest" | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
|
||||
.. _adding-standalone-test-parts:
|
||||
|
||||
"""""""""""""""""""""""""""""
|
||||
Adding stand-alone test parts
|
||||
"""""""""""""""""""""""""""""
|
||||
|
||||
Sometimes dependencies between steps of a test lend themselves to being
|
||||
broken into parts. Tracking the pass/fail status of each part may aid
|
||||
debugging. Spack provides a ``test_part`` context manager for use within
|
||||
test methods.
|
||||
|
||||
Each test part is independently run, tracked, and reported. Test parts are
|
||||
executed in the order they appear. If one fails, subsequent test parts are
|
||||
still performed even if they would also fail. This allows tools like CDash
|
||||
to track and report the status of test parts across runs. The pass/fail status
|
||||
of the enclosing test is derived from the statuses of the embedded test parts.
|
||||
|
||||
.. admonition:: Test method and test part names **must** be unique.
|
||||
|
||||
Test results reporting requires that test methods and embedded test parts
|
||||
within a package have unique names.
|
||||
.. _test-part:
|
||||
|
||||
The signature for ``test_part`` is:
|
||||
|
||||
@@ -5505,68 +5367,40 @@ where each argument has the following meaning:
|
||||
* ``work_dir`` is the path to the directory in which the test will run.
|
||||
|
||||
The default of ``None``, or ``"."``, corresponds to the the spec's test
|
||||
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``).
|
||||
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``.
|
||||
|
||||
.. admonition:: Start test part names with the name of the enclosing test.
|
||||
.. admonition:: Tests should **not** run under the installation directory.
|
||||
|
||||
We **highly recommend** starting the names of test parts with the name
|
||||
of the enclosing test. Doing so helps with the comprehension, readability
|
||||
and debugging of test results.
|
||||
Use of the package spec's installation directory for building and running
|
||||
tests is **strongly** discouraged. Doing so causes permission errors for
|
||||
shared spack instances *and* facilities that install the software in
|
||||
read-only file systems or directories.
|
||||
|
||||
Suppose ``MyPackage`` installs multiple executables that need to run in a
|
||||
specific order since the outputs from one are inputs of others. Further suppose
|
||||
we want to add an integration test that runs the executables in order. We can
|
||||
accomplish this goal by implementing a stand-alone test method consisting of
|
||||
test parts for each executable as follows:
|
||||
Suppose ``MyPackage`` actually installs two examples we want to use for tests.
|
||||
These checks can be implemented as separate checks or, as illustrated below,
|
||||
embedded test parts.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_series(self):
|
||||
"""run setup, perform, and report"""
|
||||
def test_example(self):
|
||||
"""run installed examples"""
|
||||
for example in ["ex1", "ex2"]:
|
||||
with test_part(
|
||||
self,
|
||||
f"test_example_{example}",
|
||||
purpose=f"run installed {example}",
|
||||
):
|
||||
exe = which(join_path(self.prefix.bin, example))
|
||||
exe()
|
||||
|
||||
with test_part(self, "test_series_setup", purpose="setup operation"):
|
||||
exe = which(self.prefix.bin.setup))
|
||||
exe()
|
||||
|
||||
with test_part(self, "test_series_run", purpose="perform operation"):
|
||||
exe = which(self.prefix.bin.run))
|
||||
exe()
|
||||
|
||||
with test_part(self, "test_series_report", purpose="generate report"):
|
||||
exe = which(self.prefix.bin.report))
|
||||
exe()
|
||||
|
||||
The result is ``test_series`` runs the following executable in order: ``setup``,
|
||||
``run``, and ``report``. In this case no options are passed to any of the
|
||||
executables and no outputs from running them are checked. Consequently, the
|
||||
implementation could be simplified with a for-loop as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_series(self):
|
||||
"""execute series setup, run, and report"""
|
||||
|
||||
for exe, reason in [
|
||||
("setup", "setup operation"),
|
||||
("run", "perform operation"),
|
||||
("report", "generate report")
|
||||
]:
|
||||
with test_part(self, f"test_series_{exe}", purpose=reason):
|
||||
exe = which(self.prefix.bin.join(exe))
|
||||
exe()
|
||||
|
||||
In both cases, since we're using a context manager, each test part in
|
||||
``test_series`` will execute regardless of the status of the other test
|
||||
parts.
|
||||
|
||||
Now let's look at the output from running the stand-alone tests where
|
||||
the second test part, ``test_series_run``, fails.
|
||||
In this case, there will be an implicit test part for ``test_example``
|
||||
and separate sub-parts for ``ex1`` and ``ex2``. The second sub-part
|
||||
will be executed regardless of whether the first passes. The test
|
||||
log for a run where the first executable fails and the second passes
|
||||
is illustrated below.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -5576,68 +5410,50 @@ the second test part, ``test_series_run``, fails.
|
||||
$ spack test results -l mypackage
|
||||
==> Results for test suite 'mypackage':
|
||||
...
|
||||
==> [2024-03-10-16:03:56.625204] test: test_series: execute series setup, run, and report
|
||||
==> [2024-03-10-16:03:56.625439] test: test_series_setup: setup operation
|
||||
==> [2023-03-10-16:03:56.625204] test: test_example: run installed examples
|
||||
==> [2023-03-10-16:03:56.625439] test: test_example_ex1: run installed ex1
|
||||
...
|
||||
PASSED: MyPackage::test_series_setup
|
||||
==> [2024-03-10-16:03:56.625555] test: test_series_run: perform operation
|
||||
FAILED
|
||||
==> [2023-03-10-16:03:56.625555] test: test_example_ex2: run installed ex2
|
||||
...
|
||||
FAILED: MyPackage::test_series_run
|
||||
==> [2024-03-10-16:03:57.003456] test: test_series_report: generate report
|
||||
...
|
||||
FAILED: MyPackage::test_series_report
|
||||
FAILED: MyPackage::test_series
|
||||
PASSED
|
||||
...
|
||||
|
||||
Since test parts depended on the success of previous parts, we see that the
|
||||
failure of one results in the failure of subsequent checks and the overall
|
||||
result of the test method, ``test_series``, is failure.
|
||||
.. warning::
|
||||
|
||||
.. tip::
|
||||
Test results reporting requires that each test method and embedded
|
||||
test part for a package have a unique name.
|
||||
|
||||
If you want to see more examples from packages using ``test_part``, run
|
||||
``spack pkg grep "test_part(" | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
Stand-alone tests run in an environment that provides access to information
|
||||
Spack has on how the software was built, such as build options, dependencies,
|
||||
and compilers. Build options and dependencies are accessed with the normal
|
||||
spec checks. Examples of checking :ref:`variant settings <variants>` and
|
||||
:ref:`spec constraints <testing-specs>` can be found at the provided links.
|
||||
Accessing compilers in stand-alone tests that are used by the build requires
|
||||
setting a package property as described :ref:`below <test-compilation>`.
|
||||
|
||||
.. _test-build-tests:
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
Building and running test executables
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
.. _test-compilation:
|
||||
|
||||
.. admonition:: Re-use build-time sources and (small) input data sets when possible.
|
||||
"""""""""""""""""""""""""
|
||||
Enabling test compilation
|
||||
"""""""""""""""""""""""""
|
||||
|
||||
We **highly recommend** re-using build-time test sources and pared down
|
||||
input files for testing installed software. These files are easier
|
||||
to keep synchronized with software capabilities when they reside
|
||||
within the software's repository. More information on saving files from
|
||||
the installation process can be found at :ref:`cache_extra_test_sources`.
|
||||
If you want to build and run binaries in tests, then you'll need to tell
|
||||
Spack to load the package's compiler configuration. This is accomplished
|
||||
by setting the package's ``test_requires_compiler`` property to ``True``.
|
||||
|
||||
If that is not possible, you can add test-related files to the package
|
||||
repository (see :ref:`cache_custom_files`). It will be important to
|
||||
remember to maintain them so they work across listed or supported versions
|
||||
of the package.
|
||||
Setting the property to ``True`` ensures access to the compiler through
|
||||
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
|
||||
It also gives access to build dependencies like ``cmake`` through their
|
||||
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake``).
|
||||
|
||||
Packages that build libraries are good examples of cases where you'll want
|
||||
to build test executables from the installed software before running them.
|
||||
Doing so requires you to let Spack know it needs to load the package's
|
||||
compiler configuration. This is accomplished by setting the package's
|
||||
``test_requires_compiler`` property to ``True``.
|
||||
.. note::
|
||||
|
||||
.. admonition:: ``test_requires_compiler = True`` is required to build test executables.
|
||||
The ``test_requires_compiler`` property should be added at the top of
|
||||
the package near other attributes, such as the ``homepage`` and ``url``.
|
||||
|
||||
Setting the property to ``True`` ensures access to the compiler through
|
||||
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
|
||||
It also gives access to build dependencies like ``cmake`` through their
|
||||
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake`` for the
|
||||
path or ``self.spec["cmake"].command`` for the ``Executable`` instance).
|
||||
|
||||
Be sure to add the property at the top of the package class under other
|
||||
properties like the ``homepage``.
|
||||
|
||||
The example below, which ignores how ``cxx-example.cpp`` is acquired,
|
||||
illustrates the basic process of compiling a test executable using the
|
||||
installed library before running it.
|
||||
Below illustrates using this feature to compile an example.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5661,22 +5477,28 @@ installed library before running it.
|
||||
cxx_example = which(exe)
|
||||
cxx_example()
|
||||
|
||||
Typically the files used to build and or run test executables are either
|
||||
cached from the installation (see :ref:`cache_extra_test_sources`) or added
|
||||
to the package repository (see :ref:`cache_custom_files`). There is nothing
|
||||
preventing the use of both.
|
||||
|
||||
.. _cache_extra_test_sources:
|
||||
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
Saving build- and install-time files
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
"""""""""""""""""""""""
|
||||
Saving build-time files
|
||||
"""""""""""""""""""""""
|
||||
|
||||
You can use the ``cache_extra_test_sources`` helper routine to copy
|
||||
directories and or files from the source build stage directory to the
|
||||
package's installation directory. Spack will automatically copy these
|
||||
files for you when it sets up the test stage directory and before it
|
||||
begins running the tests.
|
||||
.. note::
|
||||
|
||||
We highly recommend re-using build-time test sources and pared down
|
||||
input files for testing installed software. These files are easier
|
||||
to keep synchronized with software capabilities since they reside
|
||||
within the software's repository.
|
||||
|
||||
If that is not possible, you can add test-related files to the package
|
||||
repository (see :ref:`adding custom files <cache_custom_files>`). It
|
||||
will be important to maintain them so they work across listed or supported
|
||||
versions of the package.
|
||||
|
||||
You can use the ``cache_extra_test_sources`` helper to copy directories
|
||||
and or files from the source build stage directory to the package's
|
||||
installation directory.
|
||||
|
||||
The signature for ``cache_extra_test_sources`` is:
|
||||
|
||||
@@ -5691,69 +5513,46 @@ where each argument has the following meaning:
|
||||
* ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
|
||||
.. warning::
|
||||
The paths must be relative to the staged source directory. Contents of
|
||||
subdirectories and files are copied to a special test cache subdirectory
|
||||
of the installation prefix. They are automatically copied to the appropriate
|
||||
relative paths under the test stage directory prior to executing stand-alone
|
||||
tests.
|
||||
|
||||
Paths provided in the ``srcs`` argument **must be relative** to the
|
||||
staged source directory. They will be copied to the equivalent relative
|
||||
location under the test stage directory prior to test execution.
|
||||
|
||||
Contents of subdirectories and files are copied to a special test cache
|
||||
subdirectory of the installation prefix. They are automatically copied to
|
||||
the appropriate relative paths under the test stage directory prior to
|
||||
executing stand-alone tests.
|
||||
|
||||
.. tip::
|
||||
|
||||
*Perform test-related conversions once when copying files.*
|
||||
|
||||
If one or more of the copied files needs to be modified to reference
|
||||
the installed software, it is recommended that those changes be made
|
||||
to the cached files **once** in the post-``install`` copy method
|
||||
**after** the call to ``cache_extra_test_sources``. This will reduce
|
||||
the amount of unnecessary work in the test method **and** avoid problems
|
||||
running stand-alone tests in shared instances and facility deployments.
|
||||
|
||||
The ``filter_file`` function can be quite useful for such changes
|
||||
(see :ref:`file-filtering`).
|
||||
|
||||
Below is a basic example of a test that relies on files from the installation.
|
||||
This package method re-uses the contents of the ``examples`` subdirectory,
|
||||
which is assumed to have all of the files implemented to allow ``make`` to
|
||||
compile and link ``foo.c`` and ``bar.c`` against the package's installed
|
||||
library.
|
||||
For example, a package method for copying everything in the ``tests``
|
||||
subdirectory plus the ``foo.c`` and ``bar.c`` files from ``examples``
|
||||
and using ``foo.c`` in a test method is illustrated below.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class MyLibPackage(MakefilePackage):
|
||||
class MyLibPackage(Package):
|
||||
...
|
||||
|
||||
@run_after("install")
|
||||
def copy_test_files(self):
|
||||
cache_extra_test_sources(self, "examples")
|
||||
srcs = ["tests",
|
||||
join_path("examples", "foo.c"),
|
||||
join_path("examples", "bar.c")]
|
||||
cache_extra_test_sources(self, srcs)
|
||||
|
||||
def test_example(self):
|
||||
"""build and run the examples"""
|
||||
examples_dir = self.test_suite.current_test_cache_dir.examples
|
||||
with working_dir(examples_dir):
|
||||
make = which("make")
|
||||
make()
|
||||
def test_foo(self):
|
||||
exe = "foo"
|
||||
src_dir = self.test_suite.current_test_cache_dir.examples
|
||||
with working_dir(src_dir):
|
||||
cc = which(os.environ["CC"])
|
||||
cc(
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.c",
|
||||
"-o", exe
|
||||
)
|
||||
foo = which(exe)
|
||||
foo()
|
||||
|
||||
for program in ["foo", "bar"]:
|
||||
with test_part(
|
||||
self,
|
||||
f"test_example_{program}",
|
||||
purpose=f"ensure {program} runs"
|
||||
):
|
||||
exe = Executable(program)
|
||||
exe()
|
||||
|
||||
In this case, ``copy_test_files`` copies the associated files from the
|
||||
build stage to the package's test cache directory under the installation
|
||||
prefix. Running ``spack test run`` for the package results in Spack copying
|
||||
the directory and its contents to the the test stage directory. The
|
||||
``working_dir`` context manager ensures the commands within it are executed
|
||||
from the ``examples_dir``. The test builds the software using ``make`` before
|
||||
running each executable, ``foo`` and ``bar``, as independent test parts.
|
||||
In this case, the method copies the associated files from the build
|
||||
stage, **after** the software is installed, to the package's test
|
||||
cache directory. Then ``test_foo`` builds ``foo`` using ``foo.c``
|
||||
before running the program.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -5762,18 +5561,43 @@ running each executable, ``foo`` and ``bar``, as independent test parts.
|
||||
|
||||
The key to copying files for stand-alone testing at build time is use
|
||||
of the ``run_after`` directive, which ensures the associated files are
|
||||
copied **after** the provided build stage (``install``) when the installation
|
||||
prefix **and** files are available.
|
||||
copied **after** the provided build stage where the files **and**
|
||||
installation prefix are available.
|
||||
|
||||
The test method uses the path contained in the package's
|
||||
``self.test_suite.current_test_cache_dir`` property for the root directory
|
||||
of the copied files. In this case, that's the ``examples`` subdirectory.
|
||||
These paths are **automatically copied** from cache to the test stage
|
||||
directory prior to the execution of any stand-alone tests. Tests access
|
||||
the files using the ``self.test_suite.current_test_cache_dir`` property.
|
||||
In our example above, test methods can use the following paths to reference
|
||||
the copy of each entry listed in ``srcs``, respectively:
|
||||
|
||||
.. tip::
|
||||
* ``self.test_suite.current_test_cache_dir.tests``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
|
||||
|
||||
.. admonition:: Library packages should build stand-alone tests
|
||||
|
||||
Library developers will want to build the associated tests
|
||||
against their **installed** libraries before running them.
|
||||
|
||||
.. note::
|
||||
|
||||
While source and input files are generally recommended, binaries
|
||||
**may** also be cached by the build process. Only you, as the package
|
||||
writer or maintainer, know whether these files would be appropriate
|
||||
for testing the installed software weeks to months later.
|
||||
|
||||
.. note::
|
||||
|
||||
If one or more of the copied files needs to be modified to reference
|
||||
the installed software, it is recommended that those changes be made
|
||||
to the cached files **once** in the ``copy_test_sources`` method and
|
||||
***after** the call to ``cache_extra_test_sources()``. This will
|
||||
reduce the amount of unnecessary work in the test method **and** avoid
|
||||
problems testing in shared instances and facility deployments.
|
||||
|
||||
The ``filter_file`` function can be quite useful for such changes.
|
||||
See :ref:`file manipulation <file-manipulation>`.
|
||||
|
||||
If you want to see more examples from packages that cache build files, run
|
||||
``spack pkg grep cache_extra_test_sources | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
|
||||
.. _cache_custom_files:
|
||||
|
||||
@@ -5781,9 +5605,8 @@ running each executable, ``foo`` and ``bar``, as independent test parts.
|
||||
Adding custom files
|
||||
"""""""""""""""""""
|
||||
|
||||
Sometimes it is helpful or necessary to include custom files for building and
|
||||
or checking the results of tests as part of the package. Examples of the types
|
||||
of files that might be useful are:
|
||||
In some cases it can be useful to have files that can be used to build or
|
||||
check the results of tests. Examples include:
|
||||
|
||||
- test source files
|
||||
- test input files
|
||||
@@ -5791,15 +5614,17 @@ of files that might be useful are:
|
||||
- expected test outputs
|
||||
|
||||
While obtaining such files from the software repository is preferred (see
|
||||
:ref:`cache_extra_test_sources`), there are circumstances where doing so is not
|
||||
feasible such as when the software is not being actively maintained. When test
|
||||
files cannot be obtained from the repository or there is a need to supplement
|
||||
files that can, Spack supports the inclusion of additional files under the
|
||||
``test`` subdirectory of the package in the Spack repository.
|
||||
:ref:`adding build-time files <cache_extra_test_sources>`), there are
|
||||
circumstances where that is not feasible (e.g., the software is not being
|
||||
actively maintained). When test files can't be obtained from the repository
|
||||
or as a supplement to files that can, Spack supports the inclusion of
|
||||
additional files under the ``test`` subdirectory of the package in the
|
||||
Spack repository.
|
||||
|
||||
The following example assumes a ``custom-example.c`` is saved in ``MyLibary``
|
||||
package's ``test`` subdirectory. It also assumes the program simply needs to
|
||||
be compiled and linked against the installed ``MyLibrary`` software.
|
||||
Spack **automatically copies** the contents of that directory to the
|
||||
test staging directory prior to running stand-alone tests. Test methods
|
||||
access those files using the ``self.test_suite.current_test_data_dir``
|
||||
property as shown below.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5809,29 +5634,17 @@ be compiled and linked against the installed ``MyLibrary`` software.
|
||||
test_requires_compiler = True
|
||||
...
|
||||
|
||||
def test_custom_example(self):
|
||||
def test_example(self):
|
||||
"""build and run custom-example"""
|
||||
src_dir = self.test_suite.current_test_data_dir
|
||||
data_dir = self.test_suite.current_test_data_dir
|
||||
exe = "custom-example"
|
||||
src = datadir.join(f"{exe}.cpp")
|
||||
...
|
||||
# TODO: Build custom-example using src and exe
|
||||
...
|
||||
custom_example = which(exe)
|
||||
custom_example()
|
||||
|
||||
with working_dir(src_dir):
|
||||
cc = which(os.environ["CC"])
|
||||
cc(
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.cpp",
|
||||
"-o", exe
|
||||
)
|
||||
|
||||
custom_example = Executable(exe)
|
||||
custom_example()
|
||||
|
||||
In this case, ``spack test run`` for the package results in Spack copying
|
||||
the contents of the ``test`` subdirectory to the test stage directory path
|
||||
in ``self.test_suite.current_test_data_dir`` before calling
|
||||
``test_custom_example``. Use of the ``working_dir`` context manager
|
||||
ensures the commands to build and run the program are performed from
|
||||
within the appropriate subdirectory of the test stage.
|
||||
|
||||
.. _expected_test_output_from_file:
|
||||
|
||||
@@ -5840,8 +5653,9 @@ Reading expected output from a file
|
||||
"""""""""""""""""""""""""""""""""""
|
||||
|
||||
The helper function ``get_escaped_text_output`` is available for packages
|
||||
to retrieve properly formatted text from a file potentially containing
|
||||
special characters.
|
||||
to retrieve and properly format the text from a file that contains the
|
||||
expected output from running an executable that may contain special
|
||||
characters.
|
||||
|
||||
The signature for ``get_escaped_text_output`` is:
|
||||
|
||||
@@ -5851,13 +5665,10 @@ The signature for ``get_escaped_text_output`` is:
|
||||
|
||||
where ``filename`` is the path to the file containing the expected output.
|
||||
|
||||
The path provided to ``filename`` for one of the copied custom files
|
||||
(:ref:`custom file <cache_custom_files>`) is in the path rooted at
|
||||
``self.test_suite.current_test_data_dir``.
|
||||
|
||||
The example below shows how to reference both the custom database
|
||||
(``packages.db``) and expected output (``dump.out``) files Spack copies
|
||||
to the test stage:
|
||||
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
|
||||
accessed by tests using the ``self.test_suite.current_test_data_dir``
|
||||
property. The example below illustrates how to read a file that was
|
||||
added to the package's ``test`` subdirectory.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5879,9 +5690,8 @@ to the test stage:
|
||||
for exp in expected:
|
||||
assert re.search(exp, out), f"Expected '{exp}' in output"
|
||||
|
||||
If the files were instead cached from installing the software, the paths to the
|
||||
two files would be found under the ``self.test_suite.current_test_cache_dir``
|
||||
directory as shown below:
|
||||
If the file was instead copied from the ``tests`` subdirectory of the staged
|
||||
source code, the path would be obtained as shown below.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -5889,24 +5699,17 @@ directory as shown below:
|
||||
"""check example table dump"""
|
||||
test_cache_dir = self.test_suite.current_test_cache_dir
|
||||
db_filename = test_cache_dir.join("packages.db")
|
||||
..
|
||||
expected = get_escaped_text_output(test_cache_dir.join("dump.out"))
|
||||
...
|
||||
|
||||
Alternatively, if both files had been installed by the software into the
|
||||
``share/tests`` subdirectory of the installation prefix, the paths to the
|
||||
two files would be referenced as follows:
|
||||
Alternatively, if the file was copied to the ``share/tests`` subdirectory
|
||||
as part of the installation process, the test could access the path as
|
||||
follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def test_example(self):
|
||||
"""check example table dump"""
|
||||
db_filename = self.prefix.share.tests.join("packages.db")
|
||||
..
|
||||
expected = get_escaped_text_output(
|
||||
self.prefix.share.tests.join("dump.out")
|
||||
)
|
||||
...
|
||||
db_filename = join_path(self.prefix.share.tests, "packages.db")
|
||||
|
||||
|
||||
.. _check_outputs:
|
||||
|
||||
@@ -5914,9 +5717,9 @@ two files would be referenced as follows:
|
||||
Comparing expected to actual outputs
|
||||
""""""""""""""""""""""""""""""""""""
|
||||
|
||||
The ``check_outputs`` helper routine is available for packages to ensure
|
||||
multiple expected outputs from running an executable are contained within
|
||||
the actual outputs.
|
||||
The helper function ``check_outputs`` is available for packages to ensure
|
||||
the expected outputs from running an executable are contained within the
|
||||
actual outputs.
|
||||
|
||||
The signature for ``check_outputs`` is:
|
||||
|
||||
@@ -5942,17 +5745,11 @@ Invoking the method is the equivalent of:
|
||||
if errors:
|
||||
raise RuntimeError("\n ".join(errors))
|
||||
|
||||
.. tip::
|
||||
|
||||
If you want to see more examples from packages that use this helper, run
|
||||
``spack pkg grep check_outputs | sed "s/\/package.py.*//g" | sort -u``
|
||||
from the command line to get a list of the packages.
|
||||
|
||||
|
||||
.. _accessing-files:
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""""
|
||||
Finding package- and test-related files
|
||||
Accessing package- and test-related files
|
||||
"""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
You may need to access files from one or more locations when writing
|
||||
@@ -5961,7 +5758,8 @@ include test source files or includes them but has no way to build the
|
||||
executables using the installed headers and libraries. In these cases
|
||||
you may need to reference the files relative to one or more root directory.
|
||||
The table below lists relevant path properties and provides additional
|
||||
examples of their use. See :ref:`expected_test_output_from_file` for
|
||||
examples of their use.
|
||||
:ref:`Reading expected output <expected_test_output_from_file>` provides
|
||||
examples of accessing files saved from the software repository, package
|
||||
repository, and installation.
|
||||
|
||||
@@ -5990,6 +5788,7 @@ repository, and installation.
|
||||
- ``self.test_suite.current_test_data_dir``
|
||||
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
||||
|
||||
|
||||
.. _inheriting-tests:
|
||||
|
||||
""""""""""""""""""""""""""""
|
||||
@@ -6032,7 +5831,7 @@ maintainers provide additional stand-alone tests customized to the package.
|
||||
.. warning::
|
||||
|
||||
Any package that implements a test method with the same name as an
|
||||
inherited method will override the inherited method. If that is not the
|
||||
inherited method overrides the inherited method. If that is not the
|
||||
goal and you are not explicitly calling and adding functionality to
|
||||
the inherited method for the test, then make sure that all test methods
|
||||
and embedded test parts have unique test names.
|
||||
@@ -6197,8 +5996,6 @@ running:
|
||||
This is already part of the boilerplate for packages created with
|
||||
``spack create``.
|
||||
|
||||
.. _file-filtering:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Filtering functions
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -253,6 +253,17 @@ can easily happen if it is not updated frequently, this behavior ensures that
|
||||
spack has a way to know for certain about the status of any concrete spec on
|
||||
the remote mirror, but can slow down pipeline generation significantly.
|
||||
|
||||
The ``--optimize`` argument is experimental and runs the generated pipeline
|
||||
document through a series of optimization passes designed to reduce the size
|
||||
of the generated file.
|
||||
|
||||
The ``--dependencies`` is also experimental and disables what in Gitlab is
|
||||
referred to as DAG scheduling, internally using the ``dependencies`` keyword
|
||||
rather than ``needs`` to list dependency jobs. The drawback of using this option
|
||||
is that before any job can begin, all jobs in previous stages must first
|
||||
complete. The benefit is that Gitlab allows more dependencies to be listed
|
||||
when using ``dependencies`` instead of ``needs``.
|
||||
|
||||
The optional ``--output-file`` argument should be an absolute path (including
|
||||
file name) to the generated pipeline, and if not given, the default is
|
||||
``./.gitlab-ci.yml``.
|
||||
|
@@ -476,3 +476,9 @@ implemented using Python's built-in `sys.path
|
||||
:py:mod:`spack.repo` module implements a custom `Python importer
|
||||
<https://docs.python.org/2/library/imp.html>`_.
|
||||
|
||||
.. warning::
|
||||
|
||||
The mechanism for extending packages is not yet extensively tested,
|
||||
and extending packages across repositories imposes inter-repo
|
||||
dependencies, which may be hard to manage. Use this feature at your
|
||||
own risk, but let us know if you have a use case for it.
|
||||
|
@@ -1,13 +1,13 @@
|
||||
sphinx==7.4.7
|
||||
sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.6.1
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.2
|
||||
pytest==8.3.2
|
||||
pygments==2.17.2
|
||||
urllib3==2.2.1
|
||||
pytest==8.2.0
|
||||
isort==5.13.2
|
||||
black==24.8.0
|
||||
flake8==7.1.1
|
||||
mypy==1.11.1
|
||||
black==24.4.2
|
||||
flake8==7.0.0
|
||||
mypy==1.10.0
|
||||
|
96
lib/spack/env/cc
vendored
96
lib/spack/env/cc
vendored
@@ -174,46 +174,6 @@ preextend() {
|
||||
unset IFS
|
||||
}
|
||||
|
||||
execute() {
|
||||
# dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
|
||||
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
|
||||
case "$SPACK_TEST_COMMAND" in
|
||||
dump-args)
|
||||
IFS="$lsep"
|
||||
for arg in $full_command_list; do
|
||||
echo "$arg"
|
||||
done
|
||||
unset IFS
|
||||
exit
|
||||
;;
|
||||
dump-env-*)
|
||||
var=${SPACK_TEST_COMMAND#dump-env-}
|
||||
eval "printf '%s\n' \"\$0: \$var: \$$var\""
|
||||
;;
|
||||
*)
|
||||
die "Unknown test command: '$SPACK_TEST_COMMAND'"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
#
|
||||
# Write the input and output commands to debug logs if it's asked for.
|
||||
#
|
||||
if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
exit
|
||||
}
|
||||
|
||||
# Fail with a clear message if the input contains any bell characters.
|
||||
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
|
||||
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
|
||||
@@ -271,17 +231,12 @@ fi
|
||||
# ld link
|
||||
# ccld compile & link
|
||||
|
||||
# Note. SPACK_ALWAYS_XFLAGS are applied for all compiler invocations,
|
||||
# including version checks (SPACK_XFLAGS variants are not applied
|
||||
# for version checks).
|
||||
command="${0##*/}"
|
||||
comp="CC"
|
||||
vcheck_flags=""
|
||||
case "$command" in
|
||||
cpp)
|
||||
mode=cpp
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_CPPFLAGS}"
|
||||
;;
|
||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe|craycc)
|
||||
command="$SPACK_CC"
|
||||
@@ -289,7 +244,6 @@ case "$command" in
|
||||
comp="CC"
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_CFLAGS}"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
||||
command="$SPACK_CXX"
|
||||
@@ -297,7 +251,6 @@ case "$command" in
|
||||
comp="CXX"
|
||||
lang_flags=CXX
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_CXXFLAGS}"
|
||||
;;
|
||||
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang|crayftn)
|
||||
command="$SPACK_FC"
|
||||
@@ -305,7 +258,6 @@ case "$command" in
|
||||
comp="FC"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_FFLAGS}"
|
||||
;;
|
||||
f77|xlf|xlf_r|pgf77)
|
||||
command="$SPACK_F77"
|
||||
@@ -313,7 +265,6 @@ case "$command" in
|
||||
comp="F77"
|
||||
lang_flags=F
|
||||
debug_flags="-g"
|
||||
vcheck_flags="${SPACK_ALWAYS_FFLAGS}"
|
||||
;;
|
||||
ld|ld.gold|ld.lld)
|
||||
mode=ld
|
||||
@@ -414,11 +365,7 @@ unset IFS
|
||||
export PATH="$new_dirs"
|
||||
|
||||
if [ "$mode" = vcheck ]; then
|
||||
full_command_list="$command"
|
||||
args="$@"
|
||||
extend full_command_list vcheck_flags
|
||||
extend full_command_list args
|
||||
execute
|
||||
exec "${command}" "$@"
|
||||
fi
|
||||
|
||||
# Darwin's linker has a -r argument that merges object files together.
|
||||
@@ -775,7 +722,6 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
F)
|
||||
extend spack_flags_list SPACK_ALWAYS_FFLAGS
|
||||
extend spack_flags_list SPACK_FFLAGS
|
||||
;;
|
||||
esac
|
||||
@@ -785,7 +731,6 @@ esac
|
||||
# C preprocessor flags come before any C/CXX flags
|
||||
case "$mode" in
|
||||
cpp|as|cc|ccld)
|
||||
extend spack_flags_list SPACK_ALWAYS_CPPFLAGS
|
||||
extend spack_flags_list SPACK_CPPFLAGS
|
||||
;;
|
||||
esac
|
||||
@@ -796,11 +741,9 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
C)
|
||||
extend spack_flags_list SPACK_ALWAYS_CFLAGS
|
||||
extend spack_flags_list SPACK_CFLAGS
|
||||
;;
|
||||
CXX)
|
||||
extend spack_flags_list SPACK_ALWAYS_CXXFLAGS
|
||||
extend spack_flags_list SPACK_CXXFLAGS
|
||||
;;
|
||||
esac
|
||||
@@ -990,4 +933,39 @@ if [ -n "$SPACK_CCACHE_BINARY" ]; then
|
||||
esac
|
||||
fi
|
||||
|
||||
execute
|
||||
# dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
|
||||
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
|
||||
case "$SPACK_TEST_COMMAND" in
|
||||
dump-args)
|
||||
IFS="$lsep"
|
||||
for arg in $full_command_list; do
|
||||
echo "$arg"
|
||||
done
|
||||
unset IFS
|
||||
exit
|
||||
;;
|
||||
dump-env-*)
|
||||
var=${SPACK_TEST_COMMAND#dump-env-}
|
||||
eval "printf '%s\n' \"\$0: \$var: \$$var\""
|
||||
;;
|
||||
*)
|
||||
die "Unknown test command: '$SPACK_TEST_COMMAND'"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
#
|
||||
# Write the input and output commands to debug logs if it's asked for.
|
||||
#
|
||||
if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.5-dev (commit 7e6740012b897ae4a950f0bba7e9726b767e921f)
|
||||
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
12
lib/spack/external/archspec/cpu/detect.py
vendored
12
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -47,11 +47,7 @@ def decorator(factory):
|
||||
|
||||
|
||||
def partial_uarch(
|
||||
name: str = "",
|
||||
vendor: str = "",
|
||||
features: Optional[Set[str]] = None,
|
||||
generation: int = 0,
|
||||
cpu_part: str = "",
|
||||
name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
|
||||
) -> Microarchitecture:
|
||||
"""Construct a partial microarchitecture, from information gathered during system scan."""
|
||||
return Microarchitecture(
|
||||
@@ -61,7 +57,6 @@ def partial_uarch(
|
||||
features=features or set(),
|
||||
compilers={},
|
||||
generation=generation,
|
||||
cpu_part=cpu_part,
|
||||
)
|
||||
|
||||
|
||||
@@ -95,7 +90,6 @@ def proc_cpuinfo() -> Microarchitecture:
|
||||
return partial_uarch(
|
||||
vendor=_canonicalize_aarch64_vendor(data),
|
||||
features=_feature_set(data, key="Features"),
|
||||
cpu_part=data.get("CPU part", ""),
|
||||
)
|
||||
|
||||
if architecture in (PPC64LE, PPC64):
|
||||
@@ -351,10 +345,6 @@ def sorting_fn(item):
|
||||
generic_candidates = [c for c in candidates if c.vendor == "generic"]
|
||||
best_generic = max(generic_candidates, key=sorting_fn)
|
||||
|
||||
# Relevant for AArch64. Filter on "cpu_part" if we have any match
|
||||
if info.cpu_part != "" and any(c for c in candidates if info.cpu_part == c.cpu_part):
|
||||
candidates = [c for c in candidates if info.cpu_part == c.cpu_part]
|
||||
|
||||
# Filter the candidates to be descendant of the best generic candidate.
|
||||
# This is to avoid that the lack of a niche feature that can be disabled
|
||||
# from e.g. BIOS prevents detection of a reasonably performant architecture
|
||||
|
@@ -2,7 +2,9 @@
|
||||
# Archspec Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Types and functions to manage information on CPU microarchitectures."""
|
||||
"""Types and functions to manage information
|
||||
on CPU microarchitectures.
|
||||
"""
|
||||
import functools
|
||||
import platform
|
||||
import re
|
||||
@@ -63,24 +65,21 @@ class Microarchitecture:
|
||||
passed in as argument above.
|
||||
* versions: versions that support this micro-architecture.
|
||||
|
||||
generation (int): generation of the micro-architecture, if relevant.
|
||||
cpu_part (str): cpu part of the architecture, if relevant.
|
||||
generation (int): generation of the micro-architecture, if
|
||||
relevant.
|
||||
"""
|
||||
|
||||
# pylint: disable=too-many-arguments,too-many-instance-attributes
|
||||
# pylint: disable=too-many-arguments
|
||||
#: Aliases for micro-architecture's features
|
||||
feature_aliases = FEATURE_ALIASES
|
||||
|
||||
def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu_part=""):
|
||||
def __init__(self, name, parents, vendor, features, compilers, generation=0):
|
||||
self.name = name
|
||||
self.parents = parents
|
||||
self.vendor = vendor
|
||||
self.features = features
|
||||
self.compilers = compilers
|
||||
# Only relevant for PowerPC
|
||||
self.generation = generation
|
||||
# Only relevant for AArch64
|
||||
self.cpu_part = cpu_part
|
||||
# Cache the ancestor computation
|
||||
self._ancestors = None
|
||||
|
||||
@@ -112,7 +111,6 @@ def __eq__(self, other):
|
||||
and self.parents == other.parents # avoid ancestors here
|
||||
and self.compilers == other.compilers
|
||||
and self.generation == other.generation
|
||||
and self.cpu_part == other.cpu_part
|
||||
)
|
||||
|
||||
@coerce_target_names
|
||||
@@ -145,8 +143,7 @@ def __repr__(self):
|
||||
cls_name = self.__class__.__name__
|
||||
fmt = (
|
||||
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
|
||||
"{0.features!r}, {0.compilers!r}, generation={0.generation!r}, "
|
||||
"cpu_part={0.cpu_part!r})"
|
||||
"{0.features!r}, {0.compilers!r}, {0.generation!r})"
|
||||
)
|
||||
return fmt.format(self)
|
||||
|
||||
@@ -193,7 +190,6 @@ def to_dict(self):
|
||||
"generation": self.generation,
|
||||
"parents": [str(x) for x in self.parents],
|
||||
"compilers": self.compilers,
|
||||
"cpupart": self.cpu_part,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@@ -206,7 +202,6 @@ def from_dict(data) -> "Microarchitecture":
|
||||
features=set(data["features"]),
|
||||
compilers=data.get("compilers", {}),
|
||||
generation=data.get("generation", 0),
|
||||
cpu_part=data.get("cpupart", ""),
|
||||
)
|
||||
|
||||
def optimization_flags(self, compiler, version):
|
||||
@@ -365,11 +360,8 @@ def fill_target_from_dict(name, data, targets):
|
||||
features = set(values["features"])
|
||||
compilers = values.get("compilers", {})
|
||||
generation = values.get("generation", 0)
|
||||
cpu_part = values.get("cpupart", "")
|
||||
|
||||
targets[name] = Microarchitecture(
|
||||
name, parents, vendor, features, compilers, generation=generation, cpu_part=cpu_part
|
||||
)
|
||||
targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
|
||||
|
||||
known_targets = {}
|
||||
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
|
||||
|
@@ -2225,14 +2225,10 @@
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": "21.11:23.8",
|
||||
"versions": "21.11:",
|
||||
"name": "zen3",
|
||||
"flags": "-tp {name}",
|
||||
"warnings": "zen4 is not fully supported by nvhpc versions < 23.9, falling back to zen3"
|
||||
},
|
||||
{
|
||||
"versions": "23.9:",
|
||||
"flags": "-tp {name}"
|
||||
"warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -2715,8 +2711,7 @@
|
||||
"flags": "-mcpu=thunderx2t99"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0x0af"
|
||||
}
|
||||
},
|
||||
"a64fx": {
|
||||
"from": ["armv8.2a"],
|
||||
@@ -2784,8 +2779,7 @@
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0x001"
|
||||
}
|
||||
},
|
||||
"cortex_a72": {
|
||||
"from": ["aarch64"],
|
||||
@@ -2822,8 +2816,7 @@
|
||||
"flags" : "-mcpu=cortex-a72"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd08"
|
||||
}
|
||||
},
|
||||
"neoverse_n1": {
|
||||
"from": ["cortex_a72", "armv8.2a"],
|
||||
@@ -2909,8 +2902,7 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd0c"
|
||||
}
|
||||
},
|
||||
"neoverse_v1": {
|
||||
"from": ["neoverse_n1", "armv8.4a"],
|
||||
@@ -2934,6 +2926,8 @@
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
@@ -3034,8 +3028,7 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd40"
|
||||
}
|
||||
},
|
||||
"neoverse_v2": {
|
||||
"from": ["neoverse_n1", "armv9.0a"],
|
||||
@@ -3059,10 +3052,13 @@
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
@@ -3070,12 +3066,18 @@
|
||||
"sb",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"sveaes",
|
||||
"svepmull",
|
||||
"svebitperm",
|
||||
"svesha3",
|
||||
"svesm4",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16"
|
||||
"bf16",
|
||||
"dgh"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
@@ -3100,19 +3102,15 @@
|
||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:11.3.99",
|
||||
"versions": "10.0:11.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||
},
|
||||
{
|
||||
"versions": "11.4:11.99",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
},
|
||||
{
|
||||
"versions": "12.0:12.2.99",
|
||||
"versions": "12.0:12.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
|
||||
},
|
||||
{
|
||||
"versions": "12.3:",
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
}
|
||||
],
|
||||
@@ -3147,113 +3145,7 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd4f"
|
||||
},
|
||||
"neoverse_n2": {
|
||||
"from": ["neoverse_n1", "armv9.0a"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "4.8:5.99",
|
||||
"flags": "-march=armv8-a"
|
||||
},
|
||||
{
|
||||
"versions": "6:6.99",
|
||||
"flags" : "-march=armv8.1-a"
|
||||
},
|
||||
{
|
||||
"versions": "7.0:7.99",
|
||||
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "8.0:8.99",
|
||||
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "9.0:9.99",
|
||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:10.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||
},
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:10.99",
|
||||
"flags" : "-march=armv8.5-a+sve"
|
||||
},
|
||||
{
|
||||
"versions": "11.0:13.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "14.0:15.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"arm" : [
|
||||
{
|
||||
"versions": "23.04.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
{
|
||||
"versions": "23.3:",
|
||||
"name": "neoverse-n1",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd49"
|
||||
}
|
||||
},
|
||||
"m1": {
|
||||
"from": ["armv8.4a"],
|
||||
@@ -3319,8 +3211,7 @@
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0x022"
|
||||
}
|
||||
},
|
||||
"m2": {
|
||||
"from": ["m1", "armv8.5a"],
|
||||
@@ -3398,8 +3289,7 @@
|
||||
"flags" : "-mcpu=apple-m2"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0x032"
|
||||
}
|
||||
},
|
||||
"arm": {
|
||||
"from": [],
|
||||
|
@@ -52,9 +52,6 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"cpupart": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@@ -110,4 +107,4 @@
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -98,10 +98,3 @@ def path_filter_caller(*args, **kwargs):
|
||||
if _func:
|
||||
return holder_func(_func)
|
||||
return holder_func
|
||||
|
||||
|
||||
def sanitize_win_longpath(path: str) -> str:
|
||||
"""Strip Windows extended path prefix from strings
|
||||
Returns sanitized string.
|
||||
no-op if extended path prefix is not present"""
|
||||
return path.lstrip("\\\\?\\")
|
||||
|
@@ -187,18 +187,12 @@ def polite_filename(filename: str) -> str:
|
||||
return _polite_antipattern().sub("_", filename)
|
||||
|
||||
|
||||
def getuid() -> Union[str, int]:
|
||||
"""Returns os getuid on non Windows
|
||||
On Windows returns 0 for admin users, login string otherwise
|
||||
This is in line with behavior from get_owner_uid which
|
||||
always returns the login string on Windows
|
||||
"""
|
||||
def getuid():
|
||||
if sys.platform == "win32":
|
||||
import ctypes
|
||||
|
||||
# If not admin, use the string name of the login as a unique ID
|
||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||
return os.getlogin()
|
||||
return 1
|
||||
return 0
|
||||
else:
|
||||
return os.getuid()
|
||||
@@ -219,15 +213,6 @@ def _win_rename(src, dst):
|
||||
os.replace(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def msdos_escape_parens(path):
|
||||
"""MS-DOS interprets parens as grouping parameters even in a quoted string"""
|
||||
if sys.platform == "win32":
|
||||
return path.replace("(", "^(").replace(")", "^)")
|
||||
else:
|
||||
return path
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
@@ -568,13 +553,7 @@ def exploding_archive_handler(tarball_container, stage):
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def get_owner_uid(path, err_msg=None) -> Union[str, int]:
|
||||
"""Returns owner UID of path destination
|
||||
On non Windows this is the value of st_uid
|
||||
On Windows this is the login string associated with the
|
||||
owning user.
|
||||
|
||||
"""
|
||||
def get_owner_uid(path, err_msg=None):
|
||||
if not os.path.exists(path):
|
||||
mkdirp(path, mode=stat.S_IRWXU)
|
||||
|
||||
@@ -766,6 +745,7 @@ def copy_tree(
|
||||
src: str,
|
||||
dest: str,
|
||||
symlinks: bool = True,
|
||||
allow_broken_symlinks: bool = sys.platform != "win32",
|
||||
ignore: Optional[Callable[[str], bool]] = None,
|
||||
_permissions: bool = False,
|
||||
):
|
||||
@@ -788,6 +768,8 @@ def copy_tree(
|
||||
src (str): the directory to copy
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception. Defaults to true on unix.
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
@@ -795,6 +777,8 @@ def copy_tree(
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
if allow_broken_symlinks and sys.platform == "win32":
|
||||
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
|
||||
if _permissions:
|
||||
tty.debug("Installing {0} to {1}".format(src, dest))
|
||||
else:
|
||||
@@ -838,7 +822,7 @@ def copy_tree(
|
||||
if islink(s):
|
||||
link_target = resolve_link_target_relative_to_the_link(s)
|
||||
if symlinks:
|
||||
target = readlink(s)
|
||||
target = os.readlink(s)
|
||||
if os.path.isabs(target):
|
||||
|
||||
def escaped_path(path):
|
||||
@@ -867,14 +851,16 @@ def escaped_path(path):
|
||||
copy_mode(s, d)
|
||||
|
||||
for target, d, s in links:
|
||||
symlink(target, d)
|
||||
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
|
||||
if _permissions:
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
def install_tree(
|
||||
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
|
||||
):
|
||||
"""Recursively install an entire directory tree rooted at *src*.
|
||||
|
||||
Same as :py:func:`copy_tree` with the addition of setting proper
|
||||
@@ -885,12 +871,21 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception.
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
copy_tree(
|
||||
src,
|
||||
dest,
|
||||
symlinks=symlinks,
|
||||
allow_broken_symlinks=allow_broken_symlinks,
|
||||
ignore=ignore,
|
||||
_permissions=True,
|
||||
)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1624,12 +1619,6 @@ def remove_linked_tree(path):
|
||||
shutil.rmtree(os.path.realpath(path), **kwargs)
|
||||
os.unlink(path)
|
||||
else:
|
||||
if sys.platform == "win32":
|
||||
# Adding this prefix allows shutil to remove long paths on windows
|
||||
# https://learn.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=registry
|
||||
long_path_pfx = "\\\\?\\"
|
||||
if not path.startswith(long_path_pfx):
|
||||
path = long_path_pfx + path
|
||||
shutil.rmtree(path, **kwargs)
|
||||
|
||||
|
||||
@@ -2440,10 +2429,9 @@ def add_library_dependent(self, *dest):
|
||||
"""
|
||||
for pth in dest:
|
||||
if os.path.isfile(pth):
|
||||
new_pth = pathlib.Path(pth).parent
|
||||
self._additional_library_dependents.add(pathlib.Path(pth).parent)
|
||||
else:
|
||||
new_pth = pathlib.Path(pth)
|
||||
self._additional_library_dependents.add(new_pth)
|
||||
self._additional_library_dependents.add(pathlib.Path(pth))
|
||||
|
||||
@property
|
||||
def rpaths(self):
|
||||
@@ -2521,14 +2509,8 @@ def establish_link(self):
|
||||
|
||||
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
||||
# install a symlink to each dependent library
|
||||
|
||||
# do not rpath for system libraries included in the dag
|
||||
# we should not be modifying libraries managed by the Windows system
|
||||
# as this will negatively impact linker behavior and can result in permission
|
||||
# errors if those system libs are not modifiable by Spack
|
||||
if "windows-system" not in getattr(self.pkg, "tags", []):
|
||||
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
||||
self._link(library, lib_dir)
|
||||
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
||||
self._link(library, lib_dir)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
|
@@ -8,75 +8,100 @@
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Union
|
||||
|
||||
from llnl.util import lang, tty
|
||||
|
||||
from ..path import sanitize_win_longpath, system_path_filter
|
||||
from ..path import system_path_filter
|
||||
|
||||
if sys.platform == "win32":
|
||||
from win32file import CreateHardLink
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
def _windows_symlink(
|
||||
src: str, dst: str, target_is_directory: bool = False, *, dir_fd: Union[int, None] = None
|
||||
):
|
||||
"""On Windows with System Administrator privileges this will be a normal symbolic link via
|
||||
os.symlink. On Windows without privledges the link will be a junction for a directory and a
|
||||
hardlink for a file. On Windows the various link types are:
|
||||
|
||||
Symbolic Link: A link to a file or directory on the same or different volume (drive letter) or
|
||||
even to a remote file or directory (using UNC in its path). Need System Administrator
|
||||
privileges to make these.
|
||||
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
|
||||
"""
|
||||
Create a link.
|
||||
|
||||
Hard Link: A link to a file on the same volume (drive letter) only. Every file (file's data)
|
||||
has at least 1 hard link (file's name). But when this method creates a new hard link there will
|
||||
be 2. Deleting all hard links effectively deletes the file. Don't need System Administrator
|
||||
privileges.
|
||||
On non-Windows and Windows with System Administrator
|
||||
privleges this will be a normal symbolic link via
|
||||
os.symlink.
|
||||
|
||||
Junction: A link to a directory on the same or different volume (drive letter) but not to a
|
||||
remote directory. Don't need System Administrator privileges."""
|
||||
source_path = os.path.normpath(src)
|
||||
On Windows without privledges the link will be a
|
||||
junction for a directory and a hardlink for a file.
|
||||
On Windows the various link types are:
|
||||
|
||||
Symbolic Link: A link to a file or directory on the
|
||||
same or different volume (drive letter) or even to
|
||||
a remote file or directory (using UNC in its path).
|
||||
Need System Administrator privileges to make these.
|
||||
|
||||
Hard Link: A link to a file on the same volume (drive
|
||||
letter) only. Every file (file's data) has at least 1
|
||||
hard link (file's name). But when this method creates
|
||||
a new hard link there will be 2. Deleting all hard
|
||||
links effectively deletes the file. Don't need System
|
||||
Administrator privileges.
|
||||
|
||||
Junction: A link to a directory on the same or different
|
||||
volume (drive letter) but not to a remote directory. Don't
|
||||
need System Administrator privileges.
|
||||
|
||||
Parameters:
|
||||
source_path (str): The real file or directory that the link points to.
|
||||
Must be absolute OR relative to the link.
|
||||
link_path (str): The path where the link will exist.
|
||||
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
|
||||
doesn't exist. This will still raise an exception on Windows.
|
||||
"""
|
||||
source_path = os.path.normpath(source_path)
|
||||
win_source_path = source_path
|
||||
link_path = os.path.normpath(dst)
|
||||
link_path = os.path.normpath(link_path)
|
||||
|
||||
# Perform basic checks to make sure symlinking will succeed
|
||||
if os.path.lexists(link_path):
|
||||
raise AlreadyExistsError(f"Link path ({link_path}) already exists. Cannot create link.")
|
||||
# Never allow broken links on Windows.
|
||||
if sys.platform == "win32" and allow_broken_symlinks:
|
||||
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
|
||||
|
||||
if not os.path.exists(source_path):
|
||||
if os.path.isabs(source_path):
|
||||
# An absolute source path that does not exist will result in a broken link.
|
||||
raise SymlinkError(
|
||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||
f"link would be broken so not making link."
|
||||
if not allow_broken_symlinks:
|
||||
# Perform basic checks to make sure symlinking will succeed
|
||||
if os.path.lexists(link_path):
|
||||
raise AlreadyExistsError(
|
||||
f"Link path ({link_path}) already exists. Cannot create link."
|
||||
)
|
||||
else:
|
||||
# os.symlink can create a link when the given source path is relative to
|
||||
# the link path. Emulate this behavior and check to see if the source exists
|
||||
# relative to the link path ahead of link creation to prevent broken
|
||||
# links from being made.
|
||||
link_parent_dir = os.path.dirname(link_path)
|
||||
relative_path = os.path.join(link_parent_dir, source_path)
|
||||
if os.path.exists(relative_path):
|
||||
# In order to work on windows, the source path needs to be modified to be
|
||||
# relative because hardlink/junction dont resolve relative paths the same
|
||||
# way as os.symlink. This is ignored on other operating systems.
|
||||
win_source_path = relative_path
|
||||
else:
|
||||
|
||||
if not os.path.exists(source_path):
|
||||
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
||||
# An absolute source path that does not exist will result in a broken link.
|
||||
raise SymlinkError(
|
||||
f"The source path ({source_path}) is not relative to the link path "
|
||||
f"({link_path}). Resulting link would be broken so not making link."
|
||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||
f"link would be broken so not making link."
|
||||
)
|
||||
else:
|
||||
# os.symlink can create a link when the given source path is relative to
|
||||
# the link path. Emulate this behavior and check to see if the source exists
|
||||
# relative to the link path ahead of link creation to prevent broken
|
||||
# links from being made.
|
||||
link_parent_dir = os.path.dirname(link_path)
|
||||
relative_path = os.path.join(link_parent_dir, source_path)
|
||||
if os.path.exists(relative_path):
|
||||
# In order to work on windows, the source path needs to be modified to be
|
||||
# relative because hardlink/junction dont resolve relative paths the same
|
||||
# way as os.symlink. This is ignored on other operating systems.
|
||||
win_source_path = relative_path
|
||||
elif not allow_broken_symlinks:
|
||||
raise SymlinkError(
|
||||
f"The source path ({source_path}) is not relative to the link path "
|
||||
f"({link_path}). Resulting link would be broken so not making link."
|
||||
)
|
||||
|
||||
# Create the symlink
|
||||
if not _windows_can_symlink():
|
||||
if sys.platform == "win32" and not _windows_can_symlink():
|
||||
_windows_create_link(win_source_path, link_path)
|
||||
else:
|
||||
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
||||
|
||||
|
||||
def _windows_islink(path: str) -> bool:
|
||||
def islink(path: str) -> bool:
|
||||
"""Override os.islink to give correct answer for spack logic.
|
||||
|
||||
For Non-Windows: a link can be determined with the os.path.islink method.
|
||||
@@ -222,9 +247,9 @@ def _windows_create_junction(source: str, link: str):
|
||||
out, err = proc.communicate()
|
||||
tty.debug(out.decode())
|
||||
if proc.returncode != 0:
|
||||
err_str = err.decode()
|
||||
tty.error(err_str)
|
||||
raise SymlinkError("Make junction command returned a non-zero return code.", err_str)
|
||||
err = err.decode()
|
||||
tty.error(err)
|
||||
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
||||
|
||||
|
||||
def _windows_create_hard_link(path: str, link: str):
|
||||
@@ -244,14 +269,14 @@ def _windows_create_hard_link(path: str, link: str):
|
||||
CreateHardLink(link, path)
|
||||
|
||||
|
||||
def _windows_readlink(path: str, *, dir_fd=None):
|
||||
def readlink(path: str):
|
||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||
if _windows_is_hardlink(path):
|
||||
return _windows_read_hard_link(path)
|
||||
elif _windows_is_junction(path):
|
||||
return _windows_read_junction(path)
|
||||
else:
|
||||
return sanitize_win_longpath(os.readlink(path, dir_fd=dir_fd))
|
||||
return os.readlink(path)
|
||||
|
||||
|
||||
def _windows_read_hard_link(link: str) -> str:
|
||||
@@ -313,16 +338,6 @@ def resolve_link_target_relative_to_the_link(link):
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
if sys.platform == "win32":
|
||||
symlink = _windows_symlink
|
||||
readlink = _windows_readlink
|
||||
islink = _windows_islink
|
||||
else:
|
||||
symlink = os.symlink
|
||||
readlink = os.readlink
|
||||
islink = os.path.islink
|
||||
|
||||
|
||||
class SymlinkError(RuntimeError):
|
||||
"""Exception class for errors raised while creating symlinks,
|
||||
junctions and hard links
|
||||
|
@@ -33,23 +33,8 @@
|
||||
pass
|
||||
|
||||
|
||||
esc, bell, lbracket, bslash, newline = r"\x1b", r"\x07", r"\[", r"\\", r"\n"
|
||||
# Ansi Control Sequence Introducers (CSI) are a well-defined format
|
||||
# Standard ECMA-48: Control Functions for Character-Imaging I/O Devices, section 5.4
|
||||
# https://www.ecma-international.org/wp-content/uploads/ECMA-48_5th_edition_june_1991.pdf
|
||||
csi_pre = f"{esc}{lbracket}"
|
||||
csi_param, csi_inter, csi_post = r"[0-?]", r"[ -/]", r"[@-~]"
|
||||
ansi_csi = f"{csi_pre}{csi_param}*{csi_inter}*{csi_post}"
|
||||
# General ansi escape sequences have well-defined prefixes,
|
||||
# but content and suffixes are less reliable.
|
||||
# Conservatively assume they end with either "<ESC>\" or "<BELL>",
|
||||
# with no intervening "<ESC>"/"<BELL>" keys or newlines
|
||||
esc_pre = f"{esc}[@-_]"
|
||||
esc_content = f"[^{esc}{bell}{newline}]"
|
||||
esc_post = f"(?:{esc}{bslash}|{bell})"
|
||||
ansi_esc = f"{esc_pre}{esc_content}*{esc_post}"
|
||||
# Use this to strip escape sequences
|
||||
_escape = re.compile(f"{ansi_csi}|{ansi_esc}")
|
||||
_escape = re.compile(r"\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07")
|
||||
|
||||
# control characters for enabling/disabling echo
|
||||
#
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.23.0.dev0"
|
||||
__version__ = "0.22.0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -351,22 +351,6 @@ def _wrongly_named_spec(error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _ensure_all_virtual_packages_have_default_providers(error_cls):
|
||||
"""All virtual packages must have a default provider explicitly set."""
|
||||
configuration = spack.config.create()
|
||||
defaults = configuration.get("packages", scope="defaults")
|
||||
default_providers = defaults["all"]["providers"]
|
||||
virtuals = spack.repo.PATH.provider_index.providers
|
||||
default_providers_filename = configuration.scopes["defaults"].get_section_filename("packages")
|
||||
|
||||
return [
|
||||
error_cls(f"'{virtual}' must have a default provider in {default_providers_filename}", [])
|
||||
for virtual in virtuals
|
||||
if virtual not in default_providers
|
||||
]
|
||||
|
||||
|
||||
def _make_config_error(config_data, summary, error_cls):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
@@ -437,10 +421,6 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
)
|
||||
github_pull_commits_re = (
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
r".+/.+/pull/\d+/commits/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
)
|
||||
# Only .diff URLs have stable/full hashes:
|
||||
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
||||
gitlab_patch_url_re = (
|
||||
@@ -456,24 +436,14 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
continue
|
||||
|
||||
if re.match(github_pull_commits_re, patch.url):
|
||||
url = re.sub(r"/pull/\d+/commits/", r"/commit/", patch.url)
|
||||
url = re.sub(r"^(.*)(?<!full_index=1)$", r"\1?full_index=1", url)
|
||||
errors.append(
|
||||
error_cls(
|
||||
f"patch URL in package {pkg_cls.name} "
|
||||
+ "must not be a pull request commit; "
|
||||
+ f"instead use {url}",
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
elif re.match(github_patch_url_re, patch.url):
|
||||
if re.match(github_patch_url_re, patch.url):
|
||||
full_index_arg = "?full_index=1"
|
||||
if not patch.url.endswith(full_index_arg):
|
||||
errors.append(
|
||||
error_cls(
|
||||
f"patch URL in package {pkg_cls.name} "
|
||||
+ f"must end with {full_index_arg}",
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name, full_index_arg
|
||||
),
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
@@ -481,7 +451,9 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
if not patch.url.endswith(".diff"):
|
||||
errors.append(
|
||||
error_cls(
|
||||
f"patch URL in package {pkg_cls.name} must end with .diff",
|
||||
"patch URL in package {0} must end with .diff".format(
|
||||
pkg_cls.name
|
||||
),
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
@@ -807,7 +779,7 @@ def check_virtual_with_variants(spec, msg):
|
||||
return
|
||||
error = error_cls(
|
||||
f"{pkg_name}: {msg}",
|
||||
[f"remove variants from '{spec}' in depends_on directive in {filename}"],
|
||||
f"remove variants from '{spec}' in depends_on directive in {filename}",
|
||||
)
|
||||
errors.append(error)
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -213,18 +213,15 @@ def _root_spec(spec_str: str) -> str:
|
||||
Args:
|
||||
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
||||
"""
|
||||
# Add a compiler and platform requirement to the root spec.
|
||||
# Add a compiler requirement to the root spec.
|
||||
platform = str(spack.platforms.host())
|
||||
|
||||
if platform == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif platform == "windows":
|
||||
spec_str += " %msvc"
|
||||
elif platform == "linux":
|
||||
spec_str += " %gcc"
|
||||
elif platform == "freebsd":
|
||||
spec_str += " %clang"
|
||||
spec_str += f" platform={platform}"
|
||||
|
||||
target = archspec.cpu.host().family
|
||||
spec_str += f" target={target}"
|
||||
|
||||
|
@@ -1,154 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Bootstrap concrete specs for clingo
|
||||
|
||||
Spack uses clingo to concretize specs. When clingo itself needs to be bootstrapped from sources,
|
||||
we need to rely on another mechanism to get a concrete spec that fits the current host.
|
||||
|
||||
This module contains the logic to get a concrete spec for clingo, starting from a prototype
|
||||
JSON file for a similar platform.
|
||||
"""
|
||||
import pathlib
|
||||
import sys
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.platforms
|
||||
import spack.spec
|
||||
import spack.traverse
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
|
||||
class ClingoBootstrapConcretizer:
|
||||
def __init__(self, configuration):
|
||||
self.host_platform = spack.platforms.host()
|
||||
self.host_os = self.host_platform.operating_system("frontend")
|
||||
self.host_target = archspec.cpu.host().family
|
||||
self.host_architecture = spack.spec.ArchSpec.frontend_arch()
|
||||
self.host_architecture.target = str(self.host_target)
|
||||
self.host_compiler = self._valid_compiler_or_raise()
|
||||
self.host_python = self.python_external_spec()
|
||||
if str(self.host_platform) == "linux":
|
||||
self.host_libc = self.libc_external_spec()
|
||||
|
||||
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
|
||||
|
||||
def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
|
||||
if str(self.host_platform) == "linux":
|
||||
compiler_name = "gcc"
|
||||
elif str(self.host_platform) == "darwin":
|
||||
compiler_name = "apple-clang"
|
||||
elif str(self.host_platform) == "windows":
|
||||
compiler_name = "msvc"
|
||||
elif str(self.host_platform) == "freebsd":
|
||||
compiler_name = "clang"
|
||||
else:
|
||||
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
||||
candidates = spack.compilers.compilers_for_spec(
|
||||
compiler_name, arch_spec=self.host_architecture
|
||||
)
|
||||
if not candidates:
|
||||
raise RuntimeError(
|
||||
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
|
||||
)
|
||||
candidates.sort(key=lambda x: x.spec.version, reverse=True)
|
||||
return candidates[0]
|
||||
|
||||
def _externals_from_yaml(
|
||||
self, configuration: "spack.config.Configuration"
|
||||
) -> Tuple[Optional["spack.spec.Spec"], Optional["spack.spec.Spec"]]:
|
||||
packages_yaml = configuration.get("packages")
|
||||
requirements = {"cmake": "@3.20:", "bison": "@2.5:"}
|
||||
selected: Dict[str, Optional["spack.spec.Spec"]] = {"cmake": None, "bison": None}
|
||||
for pkg_name in ["cmake", "bison"]:
|
||||
if pkg_name not in packages_yaml:
|
||||
continue
|
||||
|
||||
candidates = packages_yaml[pkg_name].get("externals", [])
|
||||
for candidate in candidates:
|
||||
s = spack.spec.Spec(candidate["spec"], external_path=candidate["prefix"])
|
||||
if not s.satisfies(requirements[pkg_name]):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"%{self.host_compiler.spec}"):
|
||||
continue
|
||||
|
||||
if not s.intersects(f"arch={self.host_architecture}"):
|
||||
continue
|
||||
|
||||
selected[pkg_name] = self._external_spec(s)
|
||||
break
|
||||
return selected["cmake"], selected["bison"]
|
||||
|
||||
def prototype_path(self) -> pathlib.Path:
|
||||
"""Path to a prototype concrete specfile for clingo"""
|
||||
parent_dir = pathlib.Path(__file__).parent
|
||||
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-{self.host_target}.json"
|
||||
if str(self.host_platform) == "linux":
|
||||
# Using aarch64 as a fallback, since it has gnuconfig (x86_64 doesn't have it)
|
||||
if not result.exists():
|
||||
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-aarch64.json"
|
||||
|
||||
elif str(self.host_platform) == "freebsd":
|
||||
result = parent_dir / "prototypes" / f"clingo-{self.host_platform}-amd64.json"
|
||||
|
||||
elif not result.exists():
|
||||
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
||||
|
||||
return result
|
||||
|
||||
def concretize(self) -> "spack.spec.Spec":
|
||||
# Read the prototype and mark it NOT concrete
|
||||
s = spack.spec.Spec.from_specfile(str(self.prototype_path()))
|
||||
s._mark_concrete(False)
|
||||
|
||||
# Tweak it to conform to the host architecture
|
||||
for node in s.traverse():
|
||||
node.architecture.os = str(self.host_os)
|
||||
node.compiler = self.host_compiler.spec
|
||||
node.architecture = self.host_architecture
|
||||
|
||||
if node.name == "gcc-runtime":
|
||||
node.versions = self.host_compiler.spec.versions
|
||||
|
||||
for edge in spack.traverse.traverse_edges([s], cover="edges"):
|
||||
if edge.spec.name == "python":
|
||||
edge.spec = self.host_python
|
||||
|
||||
if edge.spec.name == "bison" and self.external_bison:
|
||||
edge.spec = self.external_bison
|
||||
|
||||
if edge.spec.name == "cmake" and self.external_cmake:
|
||||
edge.spec = self.external_cmake
|
||||
|
||||
if "libc" in edge.virtuals:
|
||||
edge.spec = self.host_libc
|
||||
|
||||
s._finalize_concretization()
|
||||
|
||||
# Work around the fact that the installer calls Spec.dependents() and
|
||||
# we modified edges inconsistently
|
||||
return s.copy()
|
||||
|
||||
def python_external_spec(self) -> "spack.spec.Spec":
|
||||
"""Python external spec corresponding to the current running interpreter"""
|
||||
result = spack.spec.Spec(spec_for_current_python(), external_path=sys.exec_prefix)
|
||||
return self._external_spec(result)
|
||||
|
||||
def libc_external_spec(self) -> "spack.spec.Spec":
|
||||
result = self.host_compiler.default_libc
|
||||
return self._external_spec(result)
|
||||
|
||||
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
|
||||
initial_spec.namespace = "builtin"
|
||||
initial_spec.compiler = self.host_compiler.spec
|
||||
initial_spec.architecture = self.host_architecture
|
||||
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
|
||||
initial_spec.compiler_flags[flag_type] = []
|
||||
return spack.spec.parse_with_version_concrete(initial_spec)
|
@@ -129,10 +129,10 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.DirectoryConfigScope(
|
||||
f"{name}/{platform}", os.path.join(path, platform)
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
"/".join([name, platform]), os.path.join(path, platform)
|
||||
)
|
||||
generic_scope = spack.config.DirectoryConfigScope(name, path)
|
||||
generic_scope = spack.config.ConfigScope(name, path)
|
||||
config_scopes.extend([generic_scope, platform_scope])
|
||||
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
|
||||
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
||||
|
@@ -54,7 +54,6 @@
|
||||
import spack.version
|
||||
|
||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||
from .clingo import ClingoBootstrapConcretizer
|
||||
from .config import spack_python_interpreter, spec_for_current_python
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
@@ -269,13 +268,15 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
|
||||
# Try to build and install from sources
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||
|
||||
if module == "clingo":
|
||||
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
||||
concrete_spec = bootstrapper.concretize()
|
||||
else:
|
||||
concrete_spec = spack.spec.Spec(
|
||||
abstract_spec_str + " ^" + spec_for_current_python()
|
||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||
@@ -302,7 +303,14 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str)
|
||||
if concrete_spec.name == "patchelf":
|
||||
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||
deprecation_warning=False
|
||||
)
|
||||
else:
|
||||
concrete_spec.concretize()
|
||||
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
with spack.config.override(self.mirror_scope):
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -43,7 +43,7 @@
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from typing import Dict, List, Set, Tuple
|
||||
from typing import List, Set, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
@@ -72,7 +72,6 @@
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.pattern
|
||||
from spack import traverse
|
||||
@@ -92,7 +91,7 @@
|
||||
)
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.log_parse import make_log_context, parse_log_events
|
||||
from spack.util.module_cmd import load_module, path_from_modules
|
||||
from spack.util.module_cmd import load_module, module, path_from_modules
|
||||
|
||||
#
|
||||
# This can be set by the user to globally disable parallel builds.
|
||||
@@ -191,6 +190,14 @@ def __call__(self, *args, **kwargs):
|
||||
return super().__call__(*args, **kwargs)
|
||||
|
||||
|
||||
def _on_cray():
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
on_cray = str(host_platform) == "cray"
|
||||
using_cnl = re.match(r"cnl\d+", str(host_os))
|
||||
return on_cray, using_cnl
|
||||
|
||||
|
||||
def clean_environment():
|
||||
# Stuff in here sanitizes the build environment to eliminate
|
||||
# anything the user has set that may interfere. We apply it immediately
|
||||
@@ -234,6 +241,17 @@ def clean_environment():
|
||||
if varname.endswith("_ROOT") and varname != "SPACK_ROOT":
|
||||
env.unset(varname)
|
||||
|
||||
# On Cray "cluster" systems, unset CRAY_LD_LIBRARY_PATH to avoid
|
||||
# interference with Spack dependencies.
|
||||
# CNL requires these variables to be set (or at least some of them,
|
||||
# depending on the CNL version).
|
||||
on_cray, using_cnl = _on_cray()
|
||||
if on_cray and not using_cnl:
|
||||
env.unset("CRAY_LD_LIBRARY_PATH")
|
||||
for varname in os.environ.keys():
|
||||
if "PKGCONF" in varname:
|
||||
env.unset(varname)
|
||||
|
||||
# Unset the following variables because they can affect installation of
|
||||
# Autotools and CMake packages.
|
||||
build_system_vars = [
|
||||
@@ -363,7 +381,11 @@ def set_compiler_environment_variables(pkg, env):
|
||||
_add_werror_handling(keep_werror, env)
|
||||
|
||||
# Set the target parameters that the compiler will add
|
||||
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
||||
# Don't set on cray platform because the targeting module handles this
|
||||
if spec.satisfies("platform=cray"):
|
||||
isa_arg = ""
|
||||
else:
|
||||
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
||||
env.set("SPACK_TARGET_ARGS", isa_arg)
|
||||
|
||||
# Trap spack-tracked compiler flags as appropriate.
|
||||
@@ -459,7 +481,10 @@ def set_wrapper_variables(pkg, env):
|
||||
|
||||
# Find ccache binary and hand it to build environment
|
||||
if spack.config.get("config:ccache"):
|
||||
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
|
||||
ccache = Executable("ccache")
|
||||
if not ccache:
|
||||
raise RuntimeError("No ccache binary found in PATH")
|
||||
env.set(SPACK_CCACHE_BINARY, ccache)
|
||||
|
||||
# Gather information about various types of dependencies
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||
@@ -705,28 +730,12 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwa
|
||||
return compiler(*compiler_args, output=compiler_output)
|
||||
|
||||
|
||||
def _get_rpath_deps_from_spec(
|
||||
spec: spack.spec.Spec, transitive_rpaths: bool
|
||||
) -> List[spack.spec.Spec]:
|
||||
if not transitive_rpaths:
|
||||
return spec.dependencies(deptype=dt.LINK)
|
||||
|
||||
by_name: Dict[str, spack.spec.Spec] = {}
|
||||
|
||||
for dep in spec.traverse(root=False, deptype=dt.LINK):
|
||||
lookup = by_name.get(dep.name)
|
||||
if lookup is None:
|
||||
by_name[dep.name] = dep
|
||||
elif lookup.version < dep.version:
|
||||
by_name[dep.name] = dep
|
||||
|
||||
return list(by_name.values())
|
||||
|
||||
|
||||
def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]:
|
||||
"""Return immediate or transitive dependencies (depending on the package) that need to be
|
||||
rpath'ed. If a package occurs multiple times, the newest version is kept."""
|
||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||
def get_rpath_deps(pkg):
|
||||
"""Return immediate or transitive RPATHs depending on the package."""
|
||||
if pkg.transitive_rpaths:
|
||||
return [d for d in pkg.spec.traverse(root=False, deptype=("link"))]
|
||||
else:
|
||||
return pkg.spec.dependencies(deptype="link")
|
||||
|
||||
|
||||
def get_rpaths(pkg):
|
||||
@@ -738,9 +747,7 @@ def get_rpaths(pkg):
|
||||
# Second module is our compiler mod name. We use that to get rpaths from
|
||||
# module show output.
|
||||
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
||||
mod_rpath = path_from_modules([pkg.compiler.modules[1]])
|
||||
if mod_rpath:
|
||||
rpaths.append(mod_rpath)
|
||||
rpaths.append(path_from_modules([pkg.compiler.modules[1]]))
|
||||
return list(dedupe(filter_system_paths(rpaths)))
|
||||
|
||||
|
||||
@@ -810,6 +817,14 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
for mod in pkg.compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
# kludge to handle cray mpich and libsci being automatically loaded by
|
||||
# PrgEnv modules on cray platform. Module unload does no damage when
|
||||
# unnecessary
|
||||
on_cray, _ = _on_cray()
|
||||
if on_cray and not dirty:
|
||||
for mod in ["cray-mpich", "cray-libsci"]:
|
||||
module("unload", mod)
|
||||
|
||||
if target and target.module_name:
|
||||
load_module(target.module_name)
|
||||
|
||||
@@ -1473,7 +1488,7 @@ def long_message(self):
|
||||
out.write(" {0}\n".format(self.log_name))
|
||||
|
||||
# Also output the test log path IF it exists
|
||||
if self.context != "test" and have_log:
|
||||
if self.context != "test":
|
||||
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
||||
if os.path.isfile(test_log):
|
||||
out.write("\nSee test log for details:\n")
|
||||
|
@@ -162,9 +162,7 @@ def initconfig_compiler_entries(self):
|
||||
ld_flags = " ".join(flags["ldflags"])
|
||||
ld_format_string = "CMAKE_{0}_LINKER_FLAGS"
|
||||
# CMake has separate linker arguments for types of builds.
|
||||
# 'ldflags' should not be used with CMAKE_STATIC_LINKER_FLAGS which
|
||||
# is used by the archiver, so don't include "STATIC" in this loop:
|
||||
for ld_type in ["EXE", "MODULE", "SHARED"]:
|
||||
for ld_type in ["EXE", "MODULE", "SHARED", "STATIC"]:
|
||||
ld_string = ld_format_string.format(ld_type)
|
||||
entries.append(cmake_cache_string(ld_string, ld_flags))
|
||||
|
||||
|
@@ -110,8 +110,9 @@ def cuda_flags(arch_list):
|
||||
# From the NVIDIA install guide we know of conflicts for particular
|
||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
||||
# platform=linux, since they may apply to platform=darwin. We currently
|
||||
# do not provide conflicts for platform=darwin with %apple-clang.
|
||||
# platform=linux, since they should also apply to platform=cray, and may
|
||||
# apply to platform=darwin. We currently do not provide conflicts for
|
||||
# platform=darwin with %apple-clang.
|
||||
|
||||
# Linux x86_64 compiler conflicts from here:
|
||||
# https://gist.github.com/ax3l/9489132
|
||||
@@ -124,8 +125,6 @@ def cuda_flags(arch_list):
|
||||
# minimum supported versions
|
||||
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
|
||||
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
|
||||
conflicts("%gcc@:7.2", when="+cuda ^cuda@12.4:")
|
||||
conflicts("%clang@:6", when="+cuda ^cuda@12.2:")
|
||||
|
||||
# maximum supported version
|
||||
# NOTE:
|
||||
@@ -138,14 +137,11 @@ def cuda_flags(arch_list):
|
||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.3")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
@@ -213,16 +209,12 @@ def cuda_flags(arch_list):
|
||||
conflicts("%intel@19.0:", when="+cuda ^cuda@:10.0")
|
||||
conflicts("%intel@19.1:", when="+cuda ^cuda@:10.1")
|
||||
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
|
||||
conflicts("%intel@2021:", when="+cuda ^cuda@:11.4.0")
|
||||
|
||||
# XL is mostly relevant for ppc64le Linux
|
||||
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
|
||||
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
|
||||
conflicts("%xl@:12,17:", when="+cuda ^cuda@:11.1.0")
|
||||
|
||||
# PowerPC.
|
||||
conflicts("target=ppc64le", when="+cuda ^cuda@12.5:")
|
||||
|
||||
# Darwin.
|
||||
# TODO: add missing conflicts for %apple-clang cuda@:10
|
||||
conflicts("platform=darwin", when="+cuda ^cuda@11.0.2:")
|
||||
conflicts("platform=darwin", when="+cuda ^cuda@11.0.2: ")
|
||||
|
@@ -72,7 +72,7 @@ def build_directory(self):
|
||||
def build_args(self):
|
||||
"""Arguments for ``go build``."""
|
||||
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
|
||||
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
||||
return ["-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
|
||||
|
||||
@property
|
||||
def check_args(self):
|
||||
|
@@ -846,7 +846,6 @@ def scalapack_libs(self):
|
||||
"^mpich@2:" in spec_root
|
||||
or "^cray-mpich" in spec_root
|
||||
or "^mvapich2" in spec_root
|
||||
or "^mvapich" in spec_root
|
||||
or "^intel-mpi" in spec_root
|
||||
or "^intel-oneapi-mpi" in spec_root
|
||||
or "^intel-parallel-studio" in spec_root
|
||||
@@ -937,15 +936,32 @@ def mpi_setup_dependent_build_environment(self, env, dependent_spec, compilers_o
|
||||
"I_MPI_ROOT": self.normalize_path("mpi"),
|
||||
}
|
||||
|
||||
compiler_wrapper_commands = self.mpi_compiler_wrappers
|
||||
wrapper_vars.update(
|
||||
{
|
||||
"MPICC": compiler_wrapper_commands["MPICC"],
|
||||
"MPICXX": compiler_wrapper_commands["MPICXX"],
|
||||
"MPIF77": compiler_wrapper_commands["MPIF77"],
|
||||
"MPIF90": compiler_wrapper_commands["MPIF90"],
|
||||
}
|
||||
)
|
||||
# CAUTION - SIMILAR code in:
|
||||
# var/spack/repos/builtin/packages/mpich/package.py
|
||||
# var/spack/repos/builtin/packages/openmpi/package.py
|
||||
# var/spack/repos/builtin/packages/mvapich2/package.py
|
||||
#
|
||||
# On Cray, the regular compiler wrappers *are* the MPI wrappers.
|
||||
if "platform=cray" in self.spec:
|
||||
# TODO: Confirm
|
||||
wrapper_vars.update(
|
||||
{
|
||||
"MPICC": compilers_of_client["CC"],
|
||||
"MPICXX": compilers_of_client["CXX"],
|
||||
"MPIF77": compilers_of_client["F77"],
|
||||
"MPIF90": compilers_of_client["F90"],
|
||||
}
|
||||
)
|
||||
else:
|
||||
compiler_wrapper_commands = self.mpi_compiler_wrappers
|
||||
wrapper_vars.update(
|
||||
{
|
||||
"MPICC": compiler_wrapper_commands["MPICC"],
|
||||
"MPICXX": compiler_wrapper_commands["MPICXX"],
|
||||
"MPIF77": compiler_wrapper_commands["MPIF77"],
|
||||
"MPIF90": compiler_wrapper_commands["MPIF90"],
|
||||
}
|
||||
)
|
||||
|
||||
# Ensure that the directory containing the compiler wrappers is in the
|
||||
# PATH. Spack packages add `prefix.bin` to their dependents' paths,
|
||||
|
@@ -24,6 +24,7 @@ class MSBuildPackage(spack.package_base.PackageBase):
|
||||
build_system("msbuild")
|
||||
conflicts("platform=linux", when="build_system=msbuild")
|
||||
conflicts("platform=darwin", when="build_system=msbuild")
|
||||
conflicts("platform=cray", when="build_system=msbuild")
|
||||
|
||||
|
||||
@spack.builder.builder("msbuild")
|
||||
|
@@ -24,6 +24,7 @@ class NMakePackage(spack.package_base.PackageBase):
|
||||
build_system("nmake")
|
||||
conflicts("platform=linux", when="build_system=nmake")
|
||||
conflicts("platform=darwin", when="build_system=nmake")
|
||||
conflicts("platform=cray", when="build_system=nmake")
|
||||
|
||||
|
||||
@spack.builder.builder("nmake")
|
||||
@@ -144,7 +145,7 @@ def install(self, pkg, spec, prefix):
|
||||
opts += self.nmake_install_args()
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
||||
opts.append(self.define("PREFIX", prefix))
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Common utilities for managing intel oneapi packages."""
|
||||
import getpass
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
@@ -12,7 +13,6 @@
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, find_libraries, join_path, mkdirp
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
import spack.util.path
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.directives import conflicts, license, redistribute, variant
|
||||
from spack.package_base import InstallError
|
||||
@@ -36,8 +36,9 @@ class IntelOneApiPackage(Package):
|
||||
"target=ppc64:",
|
||||
"target=ppc64le:",
|
||||
"target=aarch64:",
|
||||
"platform=darwin",
|
||||
"platform=windows",
|
||||
"platform=darwin:",
|
||||
"platform=cray:",
|
||||
"platform=windows:",
|
||||
]:
|
||||
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
||||
|
||||
@@ -99,7 +100,7 @@ def install_component(self, installer_path):
|
||||
# with other install depends on the userid. For root, we
|
||||
# delete the installercache before and after install. For
|
||||
# non root we redefine the HOME environment variable.
|
||||
if spack.util.path.get_user() == "root":
|
||||
if getpass.getuser() == "root":
|
||||
shutil.rmtree("/var/intel/installercache", ignore_errors=True)
|
||||
|
||||
bash = Executable("bash")
|
||||
@@ -122,7 +123,7 @@ def install_component(self, installer_path):
|
||||
self.prefix,
|
||||
)
|
||||
|
||||
if spack.util.path.get_user() == "root":
|
||||
if getpass.getuser() == "root":
|
||||
shutil.rmtree("/var/intel/installercache", ignore_errors=True)
|
||||
|
||||
# Some installers have a bug and do not return an error code when failing
|
||||
|
@@ -139,10 +139,6 @@ def configure(self, pkg, spec, prefix):
|
||||
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
|
||||
args.extend(self.configure_args())
|
||||
|
||||
# https://github.com/Python-SIP/sip/commit/cb0be6cb6e9b756b8b0db3136efb014f6fb9b766
|
||||
if spec["py-sip"].satisfies("@6.1.0:"):
|
||||
args.extend(["--scripts-dir", pkg.prefix.bin])
|
||||
|
||||
sip_build = Executable(spec["py-sip"].prefix.bin.join("sip-build"))
|
||||
sip_build(*args)
|
||||
|
||||
|
@@ -34,8 +34,6 @@ def _misc_cache():
|
||||
return spack.util.file_cache.FileCache(path)
|
||||
|
||||
|
||||
FileCacheType = Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton]
|
||||
|
||||
#: Spack's cache for small data
|
||||
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_misc_cache)
|
||||
|
@@ -22,8 +22,6 @@
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
|
||||
import ruamel.yaml
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
@@ -38,7 +36,6 @@
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.git
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -47,7 +44,6 @@
|
||||
from spack import traverse
|
||||
from spack.error import SpackError
|
||||
from spack.reporters import CDash, CDashConfiguration
|
||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
|
||||
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
||||
@@ -72,7 +68,7 @@
|
||||
# TODO: Remove this in Spack 0.23
|
||||
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
|
||||
JOB_NAME_FORMAT = (
|
||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
)
|
||||
IS_WINDOWS = sys.platform == "win32"
|
||||
spack_gpg = spack.main.SpackCommand("gpg")
|
||||
@@ -554,9 +550,10 @@ def generate_gitlab_ci_yaml(
|
||||
env,
|
||||
print_summary,
|
||||
output_file,
|
||||
*,
|
||||
prune_dag=False,
|
||||
check_index_only=False,
|
||||
run_optimizer=False,
|
||||
use_dependencies=False,
|
||||
artifacts_root=None,
|
||||
remote_mirror_override=None,
|
||||
):
|
||||
@@ -577,6 +574,12 @@ def generate_gitlab_ci_yaml(
|
||||
this mode results in faster yaml generation time). Otherwise, also
|
||||
check each spec directly by url (useful if there is no index or it
|
||||
might be out of date).
|
||||
run_optimizer (bool): If True, post-process the generated yaml to try
|
||||
try to reduce the size (attempts to collect repeated configuration
|
||||
and replace with definitions).)
|
||||
use_dependencies (bool): If true, use "dependencies" rather than "needs"
|
||||
("needs" allows DAG scheduling). Useful if gitlab instance cannot
|
||||
be configured to handle more than a few "needs" per job.
|
||||
artifacts_root (str): Path where artifacts like logs, environment
|
||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||
requires this to be within the project directory.
|
||||
@@ -680,22 +683,6 @@ def generate_gitlab_ci_yaml(
|
||||
"instead.",
|
||||
)
|
||||
|
||||
def ensure_expected_target_path(path):
|
||||
"""Returns passed paths with all Windows path separators exchanged
|
||||
for posix separators only if copy_only_pipeline is enabled
|
||||
|
||||
This is required as copy_only_pipelines are a unique scenario where
|
||||
the generate job and child pipelines are run on different platforms.
|
||||
To make this compatible w/ Windows, we cannot write Windows style path separators
|
||||
that will be consumed on by the Posix copy job runner.
|
||||
|
||||
TODO (johnwparent): Refactor config + cli read/write to deal only in posix
|
||||
style paths
|
||||
"""
|
||||
if copy_only_pipeline and path:
|
||||
path = path.replace("\\", "/")
|
||||
return path
|
||||
|
||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||
deprecated_mirror_config = False
|
||||
buildcache_destination = None
|
||||
@@ -810,8 +797,7 @@ def ensure_expected_target_path(path):
|
||||
cli_scopes = [
|
||||
os.path.relpath(s.path, concrete_env_dir)
|
||||
for s in cfg.scopes().values()
|
||||
if not s.writable
|
||||
and isinstance(s, (cfg.DirectoryConfigScope))
|
||||
if isinstance(s, cfg.ImmutableConfigScope)
|
||||
and s.path not in env_includes
|
||||
and os.path.exists(s.path)
|
||||
]
|
||||
@@ -820,7 +806,7 @@ def ensure_expected_target_path(path):
|
||||
if scope not in include_scopes and scope not in env_includes:
|
||||
include_scopes.insert(0, scope)
|
||||
env_includes.extend(include_scopes)
|
||||
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
|
||||
env_yaml_root["spack"]["include"] = env_includes
|
||||
|
||||
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
||||
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
||||
@@ -1108,7 +1094,7 @@ def main_script_replacements(cmd):
|
||||
if cdash_handler and cdash_handler.auth_token:
|
||||
try:
|
||||
cdash_handler.populate_buildgroup(all_job_names)
|
||||
except (SpackError, HTTPError, URLError, TimeoutError) as err:
|
||||
except (SpackError, HTTPError, URLError) as err:
|
||||
tty.warn(f"Problem populating buildgroup: {err}")
|
||||
else:
|
||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||
@@ -1241,9 +1227,6 @@ def main_script_replacements(cmd):
|
||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||
}
|
||||
output_vars = output_object["variables"]
|
||||
for item, val in output_vars.items():
|
||||
output_vars[item] = ensure_expected_target_path(val)
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
if deprecated_mirror_config and remote_mirror_override:
|
||||
@@ -1268,6 +1251,17 @@ def main_script_replacements(cmd):
|
||||
with open(copy_specs_file, "w") as fd:
|
||||
fd.write(json.dumps(buildcache_copies))
|
||||
|
||||
# TODO(opadron): remove this or refactor
|
||||
if run_optimizer:
|
||||
import spack.ci_optimization as ci_opt
|
||||
|
||||
output_object = ci_opt.optimizer(output_object)
|
||||
|
||||
# TODO(opadron): remove this or refactor
|
||||
if use_dependencies:
|
||||
import spack.ci_needs_workaround as cinw
|
||||
|
||||
output_object = cinw.needs_to_dependencies(output_object)
|
||||
else:
|
||||
# No jobs were generated
|
||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||
@@ -1289,6 +1283,7 @@ def main_script_replacements(cmd):
|
||||
sorted_output = {}
|
||||
for output_key, output_value in sorted(output_object.items()):
|
||||
sorted_output[output_key] = output_value
|
||||
|
||||
if known_broken_specs_encountered:
|
||||
tty.error("This pipeline generated hashes known to be broken on develop:")
|
||||
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
|
||||
@@ -1296,11 +1291,8 @@ def main_script_replacements(cmd):
|
||||
if not rebuild_everything:
|
||||
sys.exit(1)
|
||||
|
||||
# Minimize yaml output size through use of anchors
|
||||
syaml.anchorify(sorted_output)
|
||||
|
||||
with open(output_file, "w") as f:
|
||||
ruamel.yaml.YAML().dump(sorted_output, f)
|
||||
with open(output_file, "w") as outf:
|
||||
outf.write(syaml.dump(sorted_output, default_flow_style=True))
|
||||
|
||||
|
||||
def _url_encode_string(input_string):
|
||||
@@ -1371,6 +1363,15 @@ def can_verify_binaries():
|
||||
return len(gpg_util.public_keys()) >= 1
|
||||
|
||||
|
||||
def _push_to_build_cache(spec: spack.spec.Spec, sign_binaries: bool, mirror_url: str) -> None:
|
||||
"""Unchecked version of the public API, for easier mocking"""
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
spack.mirror.Mirror.from_url(mirror_url).push_url,
|
||||
bindist.PushOptions(force=True, unsigned=not sign_binaries),
|
||||
)
|
||||
|
||||
|
||||
def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: bool) -> bool:
|
||||
"""Push one or more binary packages to the mirror.
|
||||
|
||||
@@ -1381,13 +1382,20 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
|
||||
sign_binaries: If True, spack will attempt to sign binary package before pushing.
|
||||
"""
|
||||
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
|
||||
signing_key = bindist.select_signing_key() if sign_binaries else None
|
||||
try:
|
||||
bindist.push_or_raise([spec], out_url=mirror_url, signing_key=signing_key)
|
||||
_push_to_build_cache(spec, sign_binaries, mirror_url)
|
||||
return True
|
||||
except bindist.PushToBuildCacheError as e:
|
||||
tty.error(f"Problem writing to {mirror_url}: {e}")
|
||||
tty.error(str(e))
|
||||
return False
|
||||
except Exception as e:
|
||||
# TODO (zackgalbreath): write an adapter for boto3 exceptions so we can catch a specific
|
||||
# exception instead of parsing str(e)...
|
||||
msg = str(e)
|
||||
if any(x in msg for x in ["Access Denied", "InvalidAccessKeyId"]):
|
||||
tty.error(f"Permission problem writing to {mirror_url}: {msg}")
|
||||
return False
|
||||
raise
|
||||
|
||||
|
||||
def remove_other_mirrors(mirrors_to_keep, scope=None):
|
||||
@@ -1433,6 +1441,10 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
job_log_dir: path into which build log should be copied
|
||||
"""
|
||||
tty.debug(f"job spec: {job_spec}")
|
||||
if not job_spec:
|
||||
msg = f"Cannot copy stage logs: job spec ({job_spec}) is required"
|
||||
tty.error(msg)
|
||||
return
|
||||
|
||||
try:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||
@@ -1466,12 +1478,6 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||
|
||||
|
||||
def win_quote(quote_str: str) -> str:
|
||||
if IS_WINDOWS:
|
||||
quote_str = f'"{quote_str}"'
|
||||
return quote_str
|
||||
|
||||
|
||||
def download_and_extract_artifacts(url, work_dir):
|
||||
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||
and extract the contents into the given work_dir
|
||||
@@ -1494,7 +1500,7 @@ def download_and_extract_artifacts(url, work_dir):
|
||||
request = Request(url, headers=headers)
|
||||
request.get_method = lambda: "GET"
|
||||
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
@@ -1936,9 +1942,9 @@ def compose_command_err_handling(args):
|
||||
# but we need to handle EXEs (git, etc) ourselves
|
||||
catch_exe_failure = (
|
||||
"""
|
||||
if ($LASTEXITCODE -ne 0){{
|
||||
throw 'Command {} has failed'
|
||||
}}
|
||||
if ($LASTEXITCODE -ne 0){
|
||||
throw "Command {} has failed"
|
||||
}
|
||||
"""
|
||||
if IS_WINDOWS
|
||||
else ""
|
||||
@@ -2064,7 +2070,7 @@ def read_broken_spec(broken_spec_url):
|
||||
"""
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(broken_spec_url)
|
||||
except web_util.SpackWebError:
|
||||
except (URLError, web_util.SpackWebError, HTTPError):
|
||||
tty.warn(f"Unable to read broken spec from {broken_spec_url}")
|
||||
return None
|
||||
|
||||
@@ -2170,13 +2176,13 @@ def __init__(self, ci_cdash):
|
||||
def args(self):
|
||||
return [
|
||||
"--cdash-upload-url",
|
||||
win_quote(self.upload_url),
|
||||
self.upload_url,
|
||||
"--cdash-build",
|
||||
win_quote(self.build_name),
|
||||
self.build_name,
|
||||
"--cdash-site",
|
||||
win_quote(self.site),
|
||||
self.site,
|
||||
"--cdash-buildstamp",
|
||||
win_quote(self.build_stamp),
|
||||
self.build_stamp,
|
||||
]
|
||||
|
||||
@property # type: ignore
|
||||
@@ -2242,7 +2248,7 @@ def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code not in [200, 201]:
|
||||
@@ -2288,7 +2294,7 @@ def populate_buildgroup(self, job_names):
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
request.get_method = lambda: "PUT"
|
||||
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
|
34
lib/spack/spack/ci_needs_workaround.py
Normal file
34
lib/spack/spack/ci_needs_workaround.py
Normal file
@@ -0,0 +1,34 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
|
||||
get_job_name = lambda needs_entry: (
|
||||
needs_entry.get("job")
|
||||
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
|
||||
else needs_entry if isinstance(needs_entry, str) else None
|
||||
)
|
||||
|
||||
|
||||
def convert_job(job_entry):
|
||||
if not isinstance(job_entry, collections.abc.Mapping):
|
||||
return job_entry
|
||||
|
||||
needs = job_entry.get("needs")
|
||||
if needs is None:
|
||||
return job_entry
|
||||
|
||||
new_job = {}
|
||||
new_job.update(job_entry)
|
||||
del new_job["needs"]
|
||||
|
||||
new_job["dependencies"] = list(
|
||||
filter((lambda x: x is not None), (get_job_name(needs_entry) for needs_entry in needs))
|
||||
)
|
||||
|
||||
return new_job
|
||||
|
||||
|
||||
def needs_to_dependencies(yaml):
|
||||
return dict((k, convert_job(v)) for k, v in yaml.items())
|
363
lib/spack/spack/ci_optimization.py
Normal file
363
lib/spack/spack/ci_optimization.py
Normal file
@@ -0,0 +1,363 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import collections.abc
|
||||
import copy
|
||||
import hashlib
|
||||
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
|
||||
def sort_yaml_obj(obj):
|
||||
if isinstance(obj, collections.abc.Mapping):
|
||||
return syaml.syaml_dict(
|
||||
(k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0])))
|
||||
)
|
||||
|
||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
def matches(obj, proto):
|
||||
"""Returns True if the test object "obj" matches the prototype object
|
||||
"proto".
|
||||
|
||||
If obj and proto are mappings, obj matches proto if (key in obj) and
|
||||
(obj[key] matches proto[key]) for every key in proto.
|
||||
|
||||
If obj and proto are sequences, obj matches proto if they are of the same
|
||||
length and (a matches b) for every (a,b) in zip(obj, proto).
|
||||
|
||||
Otherwise, obj matches proto if obj == proto.
|
||||
|
||||
Precondition: proto must not have any reference cycles
|
||||
"""
|
||||
if isinstance(obj, collections.abc.Mapping):
|
||||
if not isinstance(proto, collections.abc.Mapping):
|
||||
return False
|
||||
|
||||
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
|
||||
|
||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
|
||||
return False
|
||||
|
||||
if len(obj) != len(proto):
|
||||
return False
|
||||
|
||||
return all(matches(obj[index], val) for index, val in enumerate(proto))
|
||||
|
||||
return obj == proto
|
||||
|
||||
|
||||
def subkeys(obj, proto):
|
||||
"""Returns the test mapping "obj" after factoring out the items it has in
|
||||
common with the prototype mapping "proto".
|
||||
|
||||
Consider a recursive merge operation, merge(a, b) on mappings a and b, that
|
||||
returns a mapping, m, whose keys are the union of the keys of a and b, and
|
||||
for every such key, "k", its corresponding value is:
|
||||
|
||||
- merge(a[key], b[key]) if a[key] and b[key] are mappings, or
|
||||
- b[key] if (key in b) and not matches(a[key], b[key]),
|
||||
or
|
||||
- a[key] otherwise
|
||||
|
||||
|
||||
If obj and proto are mappings, the returned object is the smallest object,
|
||||
"a", such that merge(a, proto) matches obj.
|
||||
|
||||
Otherwise, obj is returned.
|
||||
"""
|
||||
if not (
|
||||
isinstance(obj, collections.abc.Mapping) and isinstance(proto, collections.abc.Mapping)
|
||||
):
|
||||
return obj
|
||||
|
||||
new_obj = {}
|
||||
for key, value in obj.items():
|
||||
if key not in proto:
|
||||
new_obj[key] = value
|
||||
continue
|
||||
|
||||
if matches(value, proto[key]) and matches(proto[key], value):
|
||||
continue
|
||||
|
||||
if isinstance(value, collections.abc.Mapping):
|
||||
new_obj[key] = subkeys(value, proto[key])
|
||||
continue
|
||||
|
||||
new_obj[key] = value
|
||||
|
||||
return new_obj
|
||||
|
||||
|
||||
def add_extends(yaml, key):
|
||||
"""Modifies the given object "yaml" so that it includes an "extends" key
|
||||
whose value features "key".
|
||||
|
||||
If "extends" is not in yaml, then yaml is modified such that
|
||||
yaml["extends"] == key.
|
||||
|
||||
If yaml["extends"] is a str, then yaml is modified such that
|
||||
yaml["extends"] == [yaml["extends"], key]
|
||||
|
||||
If yaml["extends"] is a list that does not include key, then key is
|
||||
appended to the list.
|
||||
|
||||
Otherwise, yaml is left unchanged.
|
||||
"""
|
||||
|
||||
has_key = "extends" in yaml
|
||||
extends = yaml.get("extends")
|
||||
|
||||
if has_key and not isinstance(extends, (str, collections.abc.Sequence)):
|
||||
return
|
||||
|
||||
if extends is None:
|
||||
yaml["extends"] = key
|
||||
return
|
||||
|
||||
if isinstance(extends, str):
|
||||
if extends != key:
|
||||
yaml["extends"] = [extends, key]
|
||||
return
|
||||
|
||||
if key not in extends:
|
||||
extends.append(key)
|
||||
|
||||
|
||||
def common_subobject(yaml, sub):
|
||||
"""Factor prototype object "sub" out of the values of mapping "yaml".
|
||||
|
||||
Consider a modified copy of yaml, "new", where for each key, "key" in yaml:
|
||||
|
||||
- If yaml[key] matches sub, then new[key] = subkeys(yaml[key], sub).
|
||||
- Otherwise, new[key] = yaml[key].
|
||||
|
||||
If the above match criteria is not satisfied for any such key, then (yaml,
|
||||
None) is returned. The yaml object is returned unchanged.
|
||||
|
||||
Otherwise, each matching value in new is modified as in
|
||||
add_extends(new[key], common_key), and then new[common_key] is set to sub.
|
||||
The common_key value is chosen such that it does not match any preexisting
|
||||
key in new. In this case, (new, common_key) is returned.
|
||||
"""
|
||||
match_list = set(k for k, v in yaml.items() if matches(v, sub))
|
||||
|
||||
if not match_list:
|
||||
return yaml, None
|
||||
|
||||
common_prefix = ".c"
|
||||
common_index = 0
|
||||
|
||||
while True:
|
||||
common_key = "".join((common_prefix, str(common_index)))
|
||||
if common_key not in yaml:
|
||||
break
|
||||
common_index += 1
|
||||
|
||||
new_yaml = {}
|
||||
|
||||
for key, val in yaml.items():
|
||||
new_yaml[key] = copy.deepcopy(val)
|
||||
|
||||
if not matches(val, sub):
|
||||
continue
|
||||
|
||||
new_yaml[key] = subkeys(new_yaml[key], sub)
|
||||
add_extends(new_yaml[key], common_key)
|
||||
|
||||
new_yaml[common_key] = sub
|
||||
|
||||
return new_yaml, common_key
|
||||
|
||||
|
||||
def print_delta(name, old, new, applied=None):
|
||||
delta = new - old
|
||||
reldelta = (1000 * delta) // old
|
||||
reldelta = (reldelta // 10, reldelta % 10)
|
||||
|
||||
if applied is None:
|
||||
applied = new <= old
|
||||
|
||||
print(
|
||||
"\n".join(
|
||||
(
|
||||
"{0} {1}:",
|
||||
" before: {2: 10d}",
|
||||
" after : {3: 10d}",
|
||||
" delta : {4:+10d} ({5:=+3d}.{6}%)",
|
||||
)
|
||||
).format(name, ("+" if applied else "x"), old, new, delta, reldelta[0], reldelta[1])
|
||||
)
|
||||
|
||||
|
||||
def try_optimization_pass(name, yaml, optimization_pass, *args, **kwargs):
|
||||
"""Try applying an optimization pass and return information about the
|
||||
result
|
||||
|
||||
"name" is a string describing the nature of the pass. If it is a non-empty
|
||||
string, summary statistics are also printed to stdout.
|
||||
|
||||
"yaml" is the object to apply the pass to.
|
||||
|
||||
"optimization_pass" is the function implementing the pass to be applied.
|
||||
|
||||
"args" and "kwargs" are the additional arguments to pass to optimization
|
||||
pass. The pass is applied as
|
||||
|
||||
>>> (new_yaml, *other_results) = optimization_pass(yaml, *args, **kwargs)
|
||||
|
||||
The pass's results are greedily rejected if it does not modify the original
|
||||
yaml document, or if it produces a yaml document that serializes to a
|
||||
larger string.
|
||||
|
||||
Returns (new_yaml, yaml, applied, other_results) if applied, or
|
||||
(yaml, new_yaml, applied, other_results) otherwise.
|
||||
"""
|
||||
result = optimization_pass(yaml, *args, **kwargs)
|
||||
new_yaml, other_results = result[0], result[1:]
|
||||
|
||||
if new_yaml is yaml:
|
||||
# pass was not applied
|
||||
return (yaml, new_yaml, False, other_results)
|
||||
|
||||
pre_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||
post_size = len(syaml.dump_config(sort_yaml_obj(new_yaml), default_flow_style=True))
|
||||
|
||||
# pass makes the size worse: not applying
|
||||
applied = post_size <= pre_size
|
||||
if applied:
|
||||
yaml, new_yaml = new_yaml, yaml
|
||||
|
||||
if name:
|
||||
print_delta(name, pre_size, post_size, applied)
|
||||
|
||||
return (yaml, new_yaml, applied, other_results)
|
||||
|
||||
|
||||
def build_histogram(iterator, key):
|
||||
"""Builds a histogram of values given an iterable of mappings and a key.
|
||||
|
||||
For each mapping "m" with key "key" in iterator, the value m[key] is
|
||||
considered.
|
||||
|
||||
Returns a list of tuples (hash, count, proportion, value), where
|
||||
|
||||
- "hash" is a sha1sum hash of the value.
|
||||
- "count" is the number of occurences of values that hash to "hash".
|
||||
- "proportion" is the proportion of all values considered above that
|
||||
hash to "hash".
|
||||
- "value" is one of the values considered above that hash to "hash".
|
||||
Which value is chosen when multiple values hash to the same "hash" is
|
||||
undefined.
|
||||
|
||||
The list is sorted in descending order by count, yielding the most
|
||||
frequently occuring hashes first.
|
||||
"""
|
||||
buckets = collections.defaultdict(int)
|
||||
values = {}
|
||||
|
||||
num_objects = 0
|
||||
for obj in iterator:
|
||||
num_objects += 1
|
||||
|
||||
try:
|
||||
val = obj[key]
|
||||
except (KeyError, TypeError):
|
||||
continue
|
||||
|
||||
value_hash = hashlib.sha1()
|
||||
value_hash.update(syaml.dump_config(sort_yaml_obj(val)).encode())
|
||||
value_hash = value_hash.hexdigest()
|
||||
|
||||
buckets[value_hash] += 1
|
||||
values[value_hash] = val
|
||||
|
||||
return [
|
||||
(h, buckets[h], float(buckets[h]) / num_objects, values[h])
|
||||
for h in sorted(buckets.keys(), key=lambda k: -buckets[k])
|
||||
]
|
||||
|
||||
|
||||
def optimizer(yaml):
|
||||
original_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||
|
||||
# try factoring out commonly repeated portions
|
||||
common_job = {
|
||||
"variables": {"SPACK_COMPILER_ACTION": "NONE"},
|
||||
"after_script": ['rm -rf "./spack"'],
|
||||
"artifacts": {"paths": ["jobs_scratch_dir", "cdash_report"], "when": "always"},
|
||||
}
|
||||
|
||||
# look for a list of tags that appear frequently
|
||||
_, count, proportion, tags = next(iter(build_histogram(yaml.values(), "tags")), (None,) * 4)
|
||||
|
||||
# If a list of tags is found, and there are more than one job that uses it,
|
||||
# *and* the jobs that do use it represent at least 70% of all jobs, then
|
||||
# add the list to the prototype object.
|
||||
if tags and count > 1 and proportion >= 0.70:
|
||||
common_job["tags"] = tags
|
||||
|
||||
# apply common object factorization
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"general common object factorization", yaml, common_subobject, common_job
|
||||
)
|
||||
|
||||
# look for a common script, and try factoring that out
|
||||
_, count, proportion, script = next(
|
||||
iter(build_histogram(yaml.values(), "script")), (None,) * 4
|
||||
)
|
||||
|
||||
if script and count > 1 and proportion >= 0.70:
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"script factorization", yaml, common_subobject, {"script": script}
|
||||
)
|
||||
|
||||
# look for a common before_script, and try factoring that out
|
||||
_, count, proportion, script = next(
|
||||
iter(build_histogram(yaml.values(), "before_script")), (None,) * 4
|
||||
)
|
||||
|
||||
if script and count > 1 and proportion >= 0.70:
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"before_script factorization", yaml, common_subobject, {"before_script": script}
|
||||
)
|
||||
|
||||
# Look specifically for the SPACK_ROOT_SPEC environment variables.
|
||||
# Try to factor them out.
|
||||
h = build_histogram(
|
||||
(getattr(val, "get", lambda *args: {})("variables") for val in yaml.values()),
|
||||
"SPACK_ROOT_SPEC",
|
||||
)
|
||||
|
||||
# In this case, we try to factor out *all* instances of the SPACK_ROOT_SPEC
|
||||
# environment variable; not just the one that appears with the greatest
|
||||
# frequency. We only require that more than 1 job uses a given instance's
|
||||
# value, because we expect the value to be very large, and so expect even
|
||||
# few-to-one factorizations to yield large space savings.
|
||||
counter = 0
|
||||
for _, count, proportion, spec in h:
|
||||
if count <= 1:
|
||||
continue
|
||||
|
||||
counter += 1
|
||||
|
||||
yaml, other, applied, rest = try_optimization_pass(
|
||||
"SPACK_ROOT_SPEC factorization ({count})".format(count=counter),
|
||||
yaml,
|
||||
common_subobject,
|
||||
{"variables": {"SPACK_ROOT_SPEC": spec}},
|
||||
)
|
||||
|
||||
new_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||
|
||||
print("\n")
|
||||
print_delta("overall summary", original_size, new_size)
|
||||
print("\n")
|
||||
return yaml
|
@@ -237,7 +237,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
||||
if len(matching_specs) <= 1:
|
||||
return
|
||||
|
||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{ arch=architecture}"
|
||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||
args += [
|
||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||
@@ -336,7 +336,6 @@ def display_specs(specs, args=None, **kwargs):
|
||||
groups (bool): display specs grouped by arch/compiler (default True)
|
||||
decorator (typing.Callable): function to call to decorate specs
|
||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||
status_fn (typing.Callable): if provided, prepend install-status info
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
|
||||
"""
|
||||
@@ -360,7 +359,6 @@ def get_arg(name, default=None):
|
||||
groups = get_arg("groups", True)
|
||||
all_headers = get_arg("all_headers", False)
|
||||
output = get_arg("output", sys.stdout)
|
||||
status_fn = get_arg("status_fn", None)
|
||||
|
||||
decorator = get_arg("decorator", None)
|
||||
if decorator is None:
|
||||
@@ -388,13 +386,6 @@ def get_arg(name, default=None):
|
||||
def fmt(s, depth=0):
|
||||
"""Formatter function for all output specs"""
|
||||
string = ""
|
||||
|
||||
if status_fn:
|
||||
# This was copied from spec.tree's colorization logic
|
||||
# then shortened because it seems like status_fn should
|
||||
# always return an InstallStatus
|
||||
string += colorize(status_fn(s).value)
|
||||
|
||||
if hashes:
|
||||
string += gray_hash(s, hlen) + " "
|
||||
string += depth * " "
|
||||
@@ -453,7 +444,7 @@ def format_list(specs):
|
||||
def filter_loaded_specs(specs):
|
||||
"""Filter a list of specs returning only those that are
|
||||
currently loaded."""
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
|
||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||
return [x for x in specs if x.dag_hash() in hashes]
|
||||
|
||||
|
||||
|
@@ -165,7 +165,7 @@ def _reset(args):
|
||||
if not ok_to_continue:
|
||||
raise RuntimeError("Aborting")
|
||||
|
||||
for scope in spack.config.CONFIG.writable_scopes:
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
# The default scope should stay untouched
|
||||
if scope.name == "defaults":
|
||||
continue
|
||||
|
@@ -3,24 +3,29 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import copy
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import multiprocessing
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List, Tuple
|
||||
import urllib.request
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
from llnl.util.lang import elide_list, stable_partition
|
||||
from llnl.util.lang import elide_list
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.oci.oci
|
||||
import spack.oci.opener
|
||||
@@ -31,13 +36,27 @@
|
||||
import spack.store
|
||||
import spack.user_environment
|
||||
import spack.util.crypto
|
||||
import spack.util.parallel
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack import traverse
|
||||
from spack.build_environment import determine_number_of_jobs
|
||||
from spack.cmd import display_specs
|
||||
from spack.cmd.common import arguments
|
||||
from spack.oci.image import ImageReference
|
||||
from spack.oci.image import (
|
||||
Digest,
|
||||
ImageReference,
|
||||
default_config,
|
||||
default_index_tag,
|
||||
default_manifest,
|
||||
default_tag,
|
||||
tag_is_spec,
|
||||
)
|
||||
from spack.oci.oci import (
|
||||
copy_missing_layers_with_retry,
|
||||
get_manifest_and_config_with_retry,
|
||||
upload_blob_with_retry,
|
||||
upload_manifest_with_retry,
|
||||
)
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
@@ -51,6 +70,12 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
|
||||
push.add_argument(
|
||||
"--allow-root",
|
||||
"-a",
|
||||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution",
|
||||
)
|
||||
push_sign = push.add_mutually_exclusive_group(required=False)
|
||||
push_sign.add_argument(
|
||||
"--unsigned",
|
||||
@@ -93,17 +118,6 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
"Alternatively, one can decide to build a cache for only the package or only the "
|
||||
"dependencies",
|
||||
)
|
||||
with_or_without_build_deps = push.add_mutually_exclusive_group()
|
||||
with_or_without_build_deps.add_argument(
|
||||
"--with-build-dependencies",
|
||||
action="store_true",
|
||||
help="include build dependencies in the buildcache",
|
||||
)
|
||||
with_or_without_build_deps.add_argument(
|
||||
"--without-build-dependencies",
|
||||
action="store_true",
|
||||
help="exclude build dependencies from the buildcache",
|
||||
)
|
||||
push.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
@@ -176,6 +190,10 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
keys.add_argument("-f", "--force", action="store_true", help="force new download of keys")
|
||||
keys.set_defaults(func=keys_fn)
|
||||
|
||||
preview = subparsers.add_parser("preview", help=preview_fn.__doc__)
|
||||
arguments.add_common_arguments(preview, ["installed_specs"])
|
||||
preview.set_defaults(func=preview_fn)
|
||||
|
||||
# Check if binaries need to be rebuilt on remote mirror
|
||||
check = subparsers.add_parser("check", help=check_fn.__doc__)
|
||||
check.add_argument(
|
||||
@@ -321,6 +339,39 @@ def _format_spec(spec: Spec) -> str:
|
||||
return spec.cformat("{name}{@version}{/hash:7}")
|
||||
|
||||
|
||||
def _progress(i: int, total: int):
|
||||
if total > 1:
|
||||
digits = len(str(total))
|
||||
return f"[{i+1:{digits}}/{total}] "
|
||||
return ""
|
||||
|
||||
|
||||
class NoPool:
|
||||
def map(self, func, args):
|
||||
return [func(a) for a in args]
|
||||
|
||||
def starmap(self, func, args):
|
||||
return [func(*a) for a in args]
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
|
||||
|
||||
MaybePool = Union[multiprocessing.pool.Pool, NoPool]
|
||||
|
||||
|
||||
def _make_pool() -> MaybePool:
|
||||
"""Can't use threading because it's unsafe, and can't use spawned processes because of globals.
|
||||
That leaves only forking"""
|
||||
if multiprocessing.get_start_method() == "fork":
|
||||
return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True))
|
||||
else:
|
||||
return NoPool()
|
||||
|
||||
|
||||
def _skip_no_redistribute_for_public(specs):
|
||||
remaining_specs = list()
|
||||
removed_specs = list()
|
||||
@@ -340,45 +391,6 @@ def _skip_no_redistribute_for_public(specs):
|
||||
return remaining_specs
|
||||
|
||||
|
||||
class PackagesAreNotInstalledError(spack.error.SpackError):
|
||||
"""Raised when a list of specs is not installed but picked to be packaged."""
|
||||
|
||||
def __init__(self, specs: List[Spec]):
|
||||
super().__init__(
|
||||
"Cannot push non-installed packages",
|
||||
", ".join(elide_list([_format_spec(s) for s in specs], 5)),
|
||||
)
|
||||
|
||||
|
||||
class PackageNotInstalledError(spack.error.SpackError):
|
||||
"""Raised when a spec is not installed but picked to be packaged."""
|
||||
|
||||
|
||||
def _specs_to_be_packaged(
|
||||
requested: List[Spec], things_to_install: str, build_deps: bool
|
||||
) -> List[Spec]:
|
||||
"""Collect all non-external with or without roots and dependencies"""
|
||||
if "dependencies" not in things_to_install:
|
||||
deptype = dt.NONE
|
||||
elif build_deps:
|
||||
deptype = dt.ALL
|
||||
else:
|
||||
deptype = dt.RUN | dt.LINK | dt.TEST
|
||||
specs = [
|
||||
s
|
||||
for s in traverse.traverse_nodes(
|
||||
requested,
|
||||
root="package" in things_to_install,
|
||||
deptype=deptype,
|
||||
order="breadth",
|
||||
key=traverse.by_dag_hash,
|
||||
)
|
||||
if not s.external
|
||||
]
|
||||
specs.reverse()
|
||||
return specs
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
@@ -392,6 +404,11 @@ def push_fn(args):
|
||||
else:
|
||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||
|
||||
if args.allow_root:
|
||||
tty.warn(
|
||||
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
|
||||
)
|
||||
|
||||
mirror: spack.mirror.Mirror = args.mirror
|
||||
|
||||
# Check if this is an OCI image.
|
||||
@@ -410,84 +427,91 @@ def push_fn(args):
|
||||
|
||||
# For OCI images, we require dependencies to be pushed for now.
|
||||
if target_image:
|
||||
if "dependencies" not in args.things_to_install:
|
||||
tty.die("Dependencies must be pushed for OCI images.")
|
||||
if not unsigned:
|
||||
tty.warn(
|
||||
"Code signing is currently not supported for OCI images. "
|
||||
"Use --unsigned to silence this warning."
|
||||
)
|
||||
unsigned = True
|
||||
|
||||
# Select a signing key, or None if unsigned.
|
||||
signing_key = None if unsigned else (args.key or bindist.select_signing_key())
|
||||
|
||||
specs = _specs_to_be_packaged(
|
||||
# This is a list of installed, non-external specs.
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
roots,
|
||||
things_to_install=args.things_to_install,
|
||||
build_deps=args.with_build_dependencies or not args.without_build_dependencies,
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
|
||||
if not args.private:
|
||||
specs = _skip_no_redistribute_for_public(specs)
|
||||
|
||||
# When pushing multiple specs, print the url once ahead of time, as well as how
|
||||
# many specs are being pushed.
|
||||
if len(specs) > 1:
|
||||
tty.info(f"Selected {len(specs)} specs to push to {push_url}")
|
||||
|
||||
# Pushing not installed specs is an error. Either fail fast or populate the error list and
|
||||
# push installed package in best effort mode.
|
||||
failed: List[Tuple[Spec, BaseException]] = []
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
if any(not s.installed for s in specs):
|
||||
specs, not_installed = stable_partition(specs, lambda s: s.installed)
|
||||
if args.fail_fast:
|
||||
raise PackagesAreNotInstalledError(not_installed)
|
||||
else:
|
||||
failed.extend(
|
||||
(s, PackageNotInstalledError("package not installed")) for s in not_installed
|
||||
)
|
||||
failed = []
|
||||
|
||||
with bindist.default_push_context() as (tmpdir, executor):
|
||||
if target_image:
|
||||
base_image = ImageReference.from_string(args.base_image) if args.base_image else None
|
||||
skipped, base_images, checksums, upload_errors = bindist._push_oci(
|
||||
# TODO: unify this logic in the future.
|
||||
if target_image:
|
||||
base_image = ImageReference.from_string(args.base_image) if args.base_image else None
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
skipped, base_images, checksums = _push_oci(
|
||||
target_image=target_image,
|
||||
base_image=base_image,
|
||||
installed_specs_with_deps=specs,
|
||||
force=args.force,
|
||||
tmpdir=tmpdir,
|
||||
executor=executor,
|
||||
pool=pool,
|
||||
)
|
||||
|
||||
if upload_errors:
|
||||
failed.extend(upload_errors)
|
||||
|
||||
# Apart from creating manifests for each individual spec, we allow users to create a
|
||||
# separate image tag for all root specs and their runtime dependencies.
|
||||
elif args.tag:
|
||||
if args.tag:
|
||||
tagged_image = target_image.with_tag(args.tag)
|
||||
# _push_oci may not populate base_images if binaries were already in the registry
|
||||
for spec in roots:
|
||||
bindist._oci_update_base_images(
|
||||
_update_base_images(
|
||||
base_image=base_image,
|
||||
target_image=target_image,
|
||||
spec=spec,
|
||||
base_image_cache=base_images,
|
||||
)
|
||||
bindist._oci_put_manifest(
|
||||
base_images, checksums, tagged_image, tmpdir, None, None, *roots
|
||||
)
|
||||
_put_manifest(base_images, checksums, tagged_image, tmpdir, None, None, *roots)
|
||||
tty.info(f"Tagged {tagged_image}")
|
||||
|
||||
else:
|
||||
skipped, upload_errors = bindist._push(
|
||||
specs,
|
||||
out_url=push_url,
|
||||
force=args.force,
|
||||
update_index=args.update_index,
|
||||
signing_key=signing_key,
|
||||
tmpdir=tmpdir,
|
||||
executor=executor,
|
||||
)
|
||||
failed.extend(upload_errors)
|
||||
else:
|
||||
skipped = []
|
||||
|
||||
for i, spec in enumerate(specs):
|
||||
try:
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
push_url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
unsigned=unsigned,
|
||||
key=args.key,
|
||||
regenerate_index=args.update_index,
|
||||
),
|
||||
)
|
||||
|
||||
msg = f"{_progress(i, len(specs))}Pushed {_format_spec(spec)}"
|
||||
if len(specs) == 1:
|
||||
msg += f" to {push_url}"
|
||||
tty.info(msg)
|
||||
|
||||
except bindist.NoOverwriteException:
|
||||
skipped.append(_format_spec(spec))
|
||||
|
||||
# Catch any other exception unless the fail fast option is set
|
||||
except Exception as e:
|
||||
if args.fail_fast or isinstance(
|
||||
e, (bindist.PickKeyException, bindist.NoKeyException)
|
||||
):
|
||||
raise
|
||||
failed.append((_format_spec(spec), e))
|
||||
|
||||
if skipped:
|
||||
if len(specs) == 1:
|
||||
@@ -510,22 +534,392 @@ def push_fn(args):
|
||||
raise spack.error.SpackError(
|
||||
f"The following {len(failed)} errors occurred while pushing specs to the buildcache",
|
||||
"\n".join(
|
||||
elide_list(
|
||||
[
|
||||
f" {_format_spec(spec)}: {e.__class__.__name__}: {e}"
|
||||
for spec, e in failed
|
||||
],
|
||||
5,
|
||||
)
|
||||
elide_list([f" {spec}: {e.__class__.__name__}: {e}" for spec, e in failed], 5)
|
||||
),
|
||||
)
|
||||
|
||||
# Update the OCI index if requested
|
||||
# Update the index if requested
|
||||
# TODO: remove update index logic out of bindist; should be once after all specs are pushed
|
||||
# not once per spec.
|
||||
if target_image and len(skipped) < len(specs) and args.update_index:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
|
||||
bindist._oci_update_index(target_image, tmpdir, executor)
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
_update_index_oci(target_image, tmpdir, pool)
|
||||
|
||||
|
||||
def _get_spack_binary_blob(image_ref: ImageReference) -> Optional[spack.oci.oci.Blob]:
|
||||
"""Get the spack tarball layer digests and size if it exists"""
|
||||
try:
|
||||
manifest, config = get_manifest_and_config_with_retry(image_ref)
|
||||
|
||||
return spack.oci.oci.Blob(
|
||||
compressed_digest=Digest.from_string(manifest["layers"][-1]["digest"]),
|
||||
uncompressed_digest=Digest.from_string(config["rootfs"]["diff_ids"][-1]),
|
||||
size=manifest["layers"][-1]["size"],
|
||||
)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _push_single_spack_binary_blob(image_ref: ImageReference, spec: spack.spec.Spec, tmpdir: str):
|
||||
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
|
||||
|
||||
# Create an oci.image.layer aka tarball of the package
|
||||
compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename)
|
||||
|
||||
blob = spack.oci.oci.Blob(
|
||||
Digest.from_sha256(compressed_tarfile_checksum),
|
||||
Digest.from_sha256(tarfile_checksum),
|
||||
os.path.getsize(filename),
|
||||
)
|
||||
|
||||
# Upload the blob
|
||||
upload_blob_with_retry(image_ref, file=filename, digest=blob.compressed_digest)
|
||||
|
||||
# delete the file
|
||||
os.unlink(filename)
|
||||
|
||||
return blob
|
||||
|
||||
|
||||
def _retrieve_env_dict_from_config(config: dict) -> dict:
|
||||
"""Retrieve the environment variables from the image config file.
|
||||
Sets a default value for PATH if it is not present.
|
||||
|
||||
Args:
|
||||
config (dict): The image config file.
|
||||
|
||||
Returns:
|
||||
dict: The environment variables.
|
||||
"""
|
||||
env = {"PATH": "/bin:/usr/bin"}
|
||||
|
||||
if "Env" in config.get("config", {}):
|
||||
for entry in config["config"]["Env"]:
|
||||
key, value = entry.split("=", 1)
|
||||
env[key] = value
|
||||
return env
|
||||
|
||||
|
||||
def _archspec_to_gooarch(spec: spack.spec.Spec) -> str:
|
||||
name = spec.target.family.name
|
||||
name_map = {"aarch64": "arm64", "x86_64": "amd64"}
|
||||
return name_map.get(name, name)
|
||||
|
||||
|
||||
def _put_manifest(
|
||||
base_images: Dict[str, Tuple[dict, dict]],
|
||||
checksums: Dict[str, spack.oci.oci.Blob],
|
||||
image_ref: ImageReference,
|
||||
tmpdir: str,
|
||||
extra_config: Optional[dict],
|
||||
annotations: Optional[dict],
|
||||
*specs: spack.spec.Spec,
|
||||
):
|
||||
architecture = _archspec_to_gooarch(specs[0])
|
||||
|
||||
dependencies = list(
|
||||
reversed(
|
||||
list(
|
||||
s
|
||||
for s in traverse.traverse_nodes(
|
||||
specs, order="topo", deptype=("link", "run"), root=True
|
||||
)
|
||||
if not s.external
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
base_manifest, base_config = base_images[architecture]
|
||||
env = _retrieve_env_dict_from_config(base_config)
|
||||
|
||||
# If the base image uses `vnd.docker.distribution.manifest.v2+json`, then we use that too.
|
||||
# This is because Singularity / Apptainer is very strict about not mixing them.
|
||||
base_manifest_mediaType = base_manifest.get(
|
||||
"mediaType", "application/vnd.oci.image.manifest.v1+json"
|
||||
)
|
||||
use_docker_format = (
|
||||
base_manifest_mediaType == "application/vnd.docker.distribution.manifest.v2+json"
|
||||
)
|
||||
|
||||
spack.user_environment.environment_modifications_for_specs(*specs).apply_modifications(env)
|
||||
|
||||
# Create an oci.image.config file
|
||||
config = copy.deepcopy(base_config)
|
||||
|
||||
# Add the diff ids of the dependencies
|
||||
for s in dependencies:
|
||||
config["rootfs"]["diff_ids"].append(str(checksums[s.dag_hash()].uncompressed_digest))
|
||||
|
||||
# Set the environment variables
|
||||
config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
|
||||
|
||||
if extra_config:
|
||||
# From the OCI v1.0 spec:
|
||||
# > Any extra fields in the Image JSON struct are considered implementation
|
||||
# > specific and MUST be ignored by any implementations which are unable to
|
||||
# > interpret them.
|
||||
config.update(extra_config)
|
||||
|
||||
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
|
||||
|
||||
with open(config_file, "w") as f:
|
||||
json.dump(config, f, separators=(",", ":"))
|
||||
|
||||
config_file_checksum = Digest.from_sha256(
|
||||
spack.util.crypto.checksum(hashlib.sha256, config_file)
|
||||
)
|
||||
|
||||
# Upload the config file
|
||||
upload_blob_with_retry(image_ref, file=config_file, digest=config_file_checksum)
|
||||
|
||||
manifest = {
|
||||
"mediaType": base_manifest_mediaType,
|
||||
"schemaVersion": 2,
|
||||
"config": {
|
||||
"mediaType": base_manifest["config"]["mediaType"],
|
||||
"digest": str(config_file_checksum),
|
||||
"size": os.path.getsize(config_file),
|
||||
},
|
||||
"layers": [
|
||||
*(layer for layer in base_manifest["layers"]),
|
||||
*(
|
||||
{
|
||||
"mediaType": (
|
||||
"application/vnd.docker.image.rootfs.diff.tar.gzip"
|
||||
if use_docker_format
|
||||
else "application/vnd.oci.image.layer.v1.tar+gzip"
|
||||
),
|
||||
"digest": str(checksums[s.dag_hash()].compressed_digest),
|
||||
"size": checksums[s.dag_hash()].size,
|
||||
}
|
||||
for s in dependencies
|
||||
),
|
||||
],
|
||||
}
|
||||
|
||||
if not use_docker_format and annotations:
|
||||
manifest["annotations"] = annotations
|
||||
|
||||
# Finally upload the manifest
|
||||
upload_manifest_with_retry(image_ref, manifest=manifest)
|
||||
|
||||
# delete the config file
|
||||
os.unlink(config_file)
|
||||
|
||||
|
||||
def _update_base_images(
|
||||
*,
|
||||
base_image: Optional[ImageReference],
|
||||
target_image: ImageReference,
|
||||
spec: spack.spec.Spec,
|
||||
base_image_cache: Dict[str, Tuple[dict, dict]],
|
||||
):
|
||||
"""For a given spec and base image, copy the missing layers of the base image with matching
|
||||
arch to the registry of the target image. If no base image is specified, create a dummy
|
||||
manifest and config file."""
|
||||
architecture = _archspec_to_gooarch(spec)
|
||||
if architecture in base_image_cache:
|
||||
return
|
||||
if base_image is None:
|
||||
base_image_cache[architecture] = (
|
||||
default_manifest(),
|
||||
default_config(architecture, "linux"),
|
||||
)
|
||||
else:
|
||||
base_image_cache[architecture] = copy_missing_layers_with_retry(
|
||||
base_image, target_image, architecture
|
||||
)
|
||||
|
||||
|
||||
def _push_oci(
|
||||
*,
|
||||
target_image: ImageReference,
|
||||
base_image: Optional[ImageReference],
|
||||
installed_specs_with_deps: List[Spec],
|
||||
tmpdir: str,
|
||||
pool: MaybePool,
|
||||
force: bool = False,
|
||||
) -> Tuple[List[str], Dict[str, Tuple[dict, dict]], Dict[str, spack.oci.oci.Blob]]:
|
||||
"""Push specs to an OCI registry
|
||||
|
||||
Args:
|
||||
image_ref: The target OCI image
|
||||
base_image: Optional base image, which will be copied to the target registry.
|
||||
installed_specs_with_deps: The installed specs to push, excluding externals,
|
||||
including deps, ordered from roots to leaves.
|
||||
force: Whether to overwrite existing layers and manifests in the buildcache.
|
||||
|
||||
Returns:
|
||||
A tuple consisting of the list of skipped specs already in the build cache,
|
||||
a dictionary mapping architectures to base image manifests and configs,
|
||||
and a dictionary mapping each spec's dag hash to a blob.
|
||||
"""
|
||||
|
||||
# Reverse the order
|
||||
installed_specs_with_deps = list(reversed(installed_specs_with_deps))
|
||||
|
||||
# Spec dag hash -> blob
|
||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||
|
||||
# arch -> (manifest, config)
|
||||
base_images: Dict[str, Tuple[dict, dict]] = {}
|
||||
|
||||
# Specs not uploaded because they already exist
|
||||
skipped = []
|
||||
|
||||
if not force:
|
||||
tty.info("Checking for existing specs in the buildcache")
|
||||
to_be_uploaded = []
|
||||
|
||||
tags_to_check = (target_image.with_tag(default_tag(s)) for s in installed_specs_with_deps)
|
||||
available_blobs = pool.map(_get_spack_binary_blob, tags_to_check)
|
||||
|
||||
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
|
||||
if maybe_blob is not None:
|
||||
checksums[spec.dag_hash()] = maybe_blob
|
||||
skipped.append(_format_spec(spec))
|
||||
else:
|
||||
to_be_uploaded.append(spec)
|
||||
else:
|
||||
to_be_uploaded = installed_specs_with_deps
|
||||
|
||||
if not to_be_uploaded:
|
||||
return skipped, base_images, checksums
|
||||
|
||||
tty.info(
|
||||
f"{len(to_be_uploaded)} specs need to be pushed to "
|
||||
f"{target_image.domain}/{target_image.name}"
|
||||
)
|
||||
|
||||
# Upload blobs
|
||||
new_blobs = pool.starmap(
|
||||
_push_single_spack_binary_blob, ((target_image, spec, tmpdir) for spec in to_be_uploaded)
|
||||
)
|
||||
|
||||
# And update the spec to blob mapping
|
||||
for spec, blob in zip(to_be_uploaded, new_blobs):
|
||||
checksums[spec.dag_hash()] = blob
|
||||
|
||||
# Copy base images if necessary
|
||||
for spec in to_be_uploaded:
|
||||
_update_base_images(
|
||||
base_image=base_image,
|
||||
target_image=target_image,
|
||||
spec=spec,
|
||||
base_image_cache=base_images,
|
||||
)
|
||||
|
||||
def extra_config(spec: Spec):
|
||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
spec_dict["binary_cache_checksum"] = {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
||||
}
|
||||
return spec_dict
|
||||
|
||||
# Upload manifests
|
||||
tty.info("Uploading manifests")
|
||||
pool.starmap(
|
||||
_put_manifest,
|
||||
(
|
||||
(
|
||||
base_images,
|
||||
checksums,
|
||||
target_image.with_tag(default_tag(spec)),
|
||||
tmpdir,
|
||||
extra_config(spec),
|
||||
{"org.opencontainers.image.description": spec.format()},
|
||||
spec,
|
||||
)
|
||||
for spec in to_be_uploaded
|
||||
),
|
||||
)
|
||||
|
||||
# Print the image names of the top-level specs
|
||||
for spec in to_be_uploaded:
|
||||
tty.info(f"Pushed {_format_spec(spec)} to {target_image.with_tag(default_tag(spec))}")
|
||||
|
||||
return skipped, base_images, checksums
|
||||
|
||||
|
||||
def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
||||
# Don't allow recursion here, since Spack itself always uploads
|
||||
# vnd.oci.image.manifest.v1+json, not vnd.oci.image.index.v1+json
|
||||
_, config = get_manifest_and_config_with_retry(image_ref.with_tag(tag), tag, recurse=0)
|
||||
|
||||
# Do very basic validation: if "spec" is a key in the config, it
|
||||
# must be a Spec object too.
|
||||
return config if "spec" in config else None
|
||||
|
||||
|
||||
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
||||
request = urllib.request.Request(url=image_ref.tags_url())
|
||||
response = spack.oci.opener.urlopen(request)
|
||||
spack.oci.opener.ensure_status(request, response, 200)
|
||||
tags = json.load(response)["tags"]
|
||||
|
||||
# Fetch all image config files in parallel
|
||||
spec_dicts = pool.starmap(
|
||||
_config_from_tag, ((image_ref, tag) for tag in tags if tag_is_spec(tag))
|
||||
)
|
||||
|
||||
# Populate the database
|
||||
db_root_dir = os.path.join(tmpdir, "db_root")
|
||||
db = bindist.BuildCacheDatabase(db_root_dir)
|
||||
|
||||
for spec_dict in spec_dicts:
|
||||
spec = Spec.from_dict(spec_dict)
|
||||
db.add(spec, directory_layout=None)
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
index_json_path = os.path.join(tmpdir, "index.json")
|
||||
with open(index_json_path, "w") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
# Create an empty config.json file
|
||||
empty_config_json_path = os.path.join(tmpdir, "config.json")
|
||||
with open(empty_config_json_path, "wb") as f:
|
||||
f.write(b"{}")
|
||||
|
||||
# Upload the index.json file
|
||||
index_shasum = Digest.from_sha256(spack.util.crypto.checksum(hashlib.sha256, index_json_path))
|
||||
upload_blob_with_retry(image_ref, file=index_json_path, digest=index_shasum)
|
||||
|
||||
# Upload the config.json file
|
||||
empty_config_digest = Digest.from_sha256(
|
||||
spack.util.crypto.checksum(hashlib.sha256, empty_config_json_path)
|
||||
)
|
||||
upload_blob_with_retry(image_ref, file=empty_config_json_path, digest=empty_config_digest)
|
||||
|
||||
# Push a manifest file that references the index.json file as a layer
|
||||
# Notice that we push this as if it is an image, which it of course is not.
|
||||
# When the ORAS spec becomes official, we can use that instead of a fake image.
|
||||
# For now we just use the OCI image spec, so that we don't run into issues with
|
||||
# automatic garbage collection of blobs that are not referenced by any image manifest.
|
||||
oci_manifest = {
|
||||
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
||||
"schemaVersion": 2,
|
||||
# Config is just an empty {} file for now, and irrelevant
|
||||
"config": {
|
||||
"mediaType": "application/vnd.oci.image.config.v1+json",
|
||||
"digest": str(empty_config_digest),
|
||||
"size": os.path.getsize(empty_config_json_path),
|
||||
},
|
||||
# The buildcache index is the only layer, and is not a tarball, we lie here.
|
||||
"layers": [
|
||||
{
|
||||
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
|
||||
"digest": str(index_shasum),
|
||||
"size": os.path.getsize(index_json_path),
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
upload_manifest_with_retry(image_ref.with_tag(default_index_tag), oci_manifest)
|
||||
|
||||
|
||||
def install_fn(args):
|
||||
@@ -569,6 +963,14 @@ def keys_fn(args):
|
||||
bindist.get_keys(args.install, args.trust, args.force)
|
||||
|
||||
|
||||
def preview_fn(args):
|
||||
"""analyze an installed spec and reports whether executables and libraries are relocatable"""
|
||||
tty.warn(
|
||||
"`spack buildcache preview` is deprecated since `spack buildcache push --allow-root` is "
|
||||
"now the default. This command will be removed in Spack 0.22"
|
||||
)
|
||||
|
||||
|
||||
def check_fn(args: argparse.Namespace):
|
||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
||||
|
||||
@@ -806,15 +1208,14 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
if image_ref:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
|
||||
bindist._oci_update_index(image_ref, tmpdir, executor)
|
||||
) as tmpdir, _make_pool() as pool:
|
||||
_update_index_oci(image_ref, tmpdir, pool)
|
||||
return
|
||||
|
||||
# Otherwise, assume a normal mirror.
|
||||
url = mirror.push_url
|
||||
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
bindist.generate_package_index(url, tmpdir)
|
||||
bindist.generate_package_index(url_util.join(url, bindist.build_cache_relative_path()))
|
||||
|
||||
if update_keys:
|
||||
keys_url = url_util.join(
|
||||
@@ -822,8 +1223,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
)
|
||||
|
||||
try:
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
bindist.generate_key_index(keys_url, tmpdir)
|
||||
bindist.generate_key_index(keys_url)
|
||||
except bindist.CannotListKeys as e:
|
||||
# Do not error out if listing keys went wrong. This usually means that the _gpg path
|
||||
# does not exist. TODO: distinguish between this and other errors.
|
||||
|
@@ -6,7 +6,6 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import warnings
|
||||
from urllib.parse import urlparse, urlunparse
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -32,6 +31,7 @@
|
||||
level = "long"
|
||||
|
||||
SPACK_COMMAND = "spack"
|
||||
MAKE_COMMAND = "make"
|
||||
INSTALL_FAIL_CODE = 1
|
||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||
|
||||
@@ -40,12 +40,6 @@ def deindent(desc):
|
||||
return desc.replace(" ", "")
|
||||
|
||||
|
||||
def unicode_escape(path: str) -> str:
|
||||
"""Returns transformed path with any unicode
|
||||
characters replaced with their corresponding escapes"""
|
||||
return path.encode("unicode-escape").decode("utf-8")
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
||||
@@ -74,7 +68,7 @@ def setup_parser(subparser):
|
||||
"--optimize",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(DEPRECATED) optimize the gitlab yaml file for size\n\n"
|
||||
help="(experimental) optimize the gitlab yaml file for size\n\n"
|
||||
"run the generated document through a series of optimization passes "
|
||||
"designed to reduce the size of the generated file",
|
||||
)
|
||||
@@ -82,7 +76,7 @@ def setup_parser(subparser):
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
||||
help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--buildcache-destination",
|
||||
@@ -201,18 +195,6 @@ def ci_generate(args):
|
||||
before invoking this command. the value must be the CDash authorization token needed to create
|
||||
a build group and register all generated jobs under it
|
||||
"""
|
||||
if args.optimize:
|
||||
warnings.warn(
|
||||
"The --optimize option has been deprecated, and currently has no effect. "
|
||||
"It will be removed in Spack v0.24."
|
||||
)
|
||||
|
||||
if args.dependencies:
|
||||
warnings.warn(
|
||||
"The --dependencies option has been deprecated, and currently has no effect. "
|
||||
"It will be removed in Spack v0.24."
|
||||
)
|
||||
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
if args.copy_to:
|
||||
@@ -225,6 +207,8 @@ def ci_generate(args):
|
||||
|
||||
output_file = args.output_file
|
||||
copy_yaml_to = args.copy_to
|
||||
run_optimizer = args.optimize
|
||||
use_dependencies = args.dependencies
|
||||
prune_dag = args.prune_dag
|
||||
index_only = args.index_only
|
||||
artifacts_root = args.artifacts_root
|
||||
@@ -245,6 +229,8 @@ def ci_generate(args):
|
||||
output_file,
|
||||
prune_dag=prune_dag,
|
||||
check_index_only=index_only,
|
||||
run_optimizer=run_optimizer,
|
||||
use_dependencies=use_dependencies,
|
||||
artifacts_root=artifacts_root,
|
||||
remote_mirror_override=buildcache_destination,
|
||||
)
|
||||
@@ -565,35 +551,75 @@ def ci_rebuild(args):
|
||||
# No hash match anywhere means we need to rebuild spec
|
||||
|
||||
# Start with spack arguments
|
||||
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose", "install"]
|
||||
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose"]
|
||||
|
||||
config = cfg.get("config")
|
||||
if not config["verify_ssl"]:
|
||||
spack_cmd.append("-k")
|
||||
|
||||
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
|
||||
install_args = []
|
||||
|
||||
can_verify = spack_ci.can_verify_binaries()
|
||||
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
||||
if not verify_binaries:
|
||||
install_args.append("--no-check-signature")
|
||||
|
||||
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
|
||||
slash_hash = "/{}".format(job_spec.dag_hash())
|
||||
|
||||
# Arguments when installing dependencies from cache
|
||||
deps_install_args = install_args
|
||||
|
||||
# Arguments when installing the root from sources
|
||||
deps_install_args = install_args + ["--only=dependencies"]
|
||||
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||
|
||||
root_install_args = install_args + [
|
||||
"--keep-stage",
|
||||
"--only=package",
|
||||
"--use-buildcache=package:never,dependencies:only",
|
||||
]
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
root_install_args.extend(cdash_handler.args())
|
||||
root_install_args.append(slash_hash)
|
||||
|
||||
# ["x", "y"] -> "'x' 'y'"
|
||||
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
|
||||
|
||||
commands = [
|
||||
# apparently there's a race when spack bootstraps? do it up front once
|
||||
[SPACK_COMMAND, "-e", unicode_escape(env.path), "bootstrap", "now"],
|
||||
spack_cmd + deps_install_args + [slash_hash],
|
||||
spack_cmd + root_install_args + [slash_hash],
|
||||
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
|
||||
[
|
||||
SPACK_COMMAND,
|
||||
"-e",
|
||||
env.path,
|
||||
"env",
|
||||
"depfile",
|
||||
"-o",
|
||||
"Makefile",
|
||||
"--use-buildcache=package:never,dependencies:only",
|
||||
slash_hash, # limit to spec we're building
|
||||
],
|
||||
[
|
||||
# --output-sync requires GNU make 4.x.
|
||||
# Old make errors when you pass it a flag it doesn't recognize,
|
||||
# but it doesn't error or warn when you set unrecognized flags in
|
||||
# this variable.
|
||||
"export",
|
||||
"GNUMAKEFLAGS=--output-sync=recurse",
|
||||
],
|
||||
[
|
||||
MAKE_COMMAND,
|
||||
"SPACK={}".format(args_to_string(spack_cmd)),
|
||||
"SPACK_COLOR=always",
|
||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
||||
"-j$(nproc)",
|
||||
"install-deps/{}".format(
|
||||
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
|
||||
"{name}-{version}-{hash}"
|
||||
)
|
||||
),
|
||||
],
|
||||
spack_cmd + ["install"] + root_install_args,
|
||||
]
|
||||
|
||||
tty.debug("Installing {0} from source".format(job_spec.name))
|
||||
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
||||
|
||||
|
@@ -106,8 +106,7 @@ def clean(parser, args):
|
||||
|
||||
# Then do the cleaning falling through the cases
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
specs = list(spack.cmd.matching_spec_from_env(x) for x in specs)
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
for spec in specs:
|
||||
msg = "Cleaning build stage [{0}]"
|
||||
tty.msg(msg.format(spec.short_spec))
|
||||
|
@@ -11,6 +11,7 @@
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command
|
||||
from llnl.util.tty.colify import colify
|
||||
@@ -866,6 +867,9 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||
prepend_header(args, f)
|
||||
formatter(args, f)
|
||||
|
||||
if args.update_completion:
|
||||
fs.set_executable(args.update)
|
||||
|
||||
else:
|
||||
prepend_header(args, sys.stdout)
|
||||
formatter(args, sys.stdout)
|
||||
|
@@ -3,9 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.environment as ev
|
||||
@@ -46,9 +43,5 @@ def concretize(parser, args):
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||
if not args.quiet:
|
||||
if concretized_specs:
|
||||
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}:")
|
||||
ev.display_specs([concrete for _, concrete in concretized_specs])
|
||||
else:
|
||||
tty.msg("No new specs to concretize.")
|
||||
ev.display_specs(concretized_specs)
|
||||
env.write()
|
||||
|
@@ -156,7 +156,7 @@ def print_flattened_configuration(*, blame: bool) -> None:
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
if env is not None:
|
||||
pristine = env.manifest.yaml_content
|
||||
pristine = env.manifest.pristine_yaml_content
|
||||
flattened = pristine.copy()
|
||||
flattened[spack.schema.env.TOP_LEVEL_KEY] = pristine[spack.schema.env.TOP_LEVEL_KEY].copy()
|
||||
else:
|
||||
@@ -264,9 +264,7 @@ def config_remove(args):
|
||||
def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||
if isinstance(scope, spack.config.SingleFileScope):
|
||||
return fs.can_access(cfg_file)
|
||||
elif isinstance(scope, spack.config.DirectoryConfigScope):
|
||||
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
|
||||
return False
|
||||
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
|
||||
|
||||
|
||||
def _config_change_requires_scope(path, spec, scope, match_spec=None):
|
||||
@@ -364,11 +362,14 @@ def config_change(args):
|
||||
def config_update(args):
|
||||
# Read the configuration files
|
||||
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||
updates: List[spack.config.ConfigScope] = [
|
||||
x
|
||||
for x in spack.config.CONFIG.format_updates[args.section]
|
||||
if not isinstance(x, spack.config.InternalConfigScope) and x.writable
|
||||
]
|
||||
updates: List[spack.config.ConfigScope] = list(
|
||||
filter(
|
||||
lambda s: not isinstance(
|
||||
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||
),
|
||||
spack.config.CONFIG.format_updates[args.section],
|
||||
)
|
||||
)
|
||||
|
||||
cannot_overwrite, skip_system_scope = [], False
|
||||
for scope in updates:
|
||||
@@ -446,7 +447,7 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
||||
|
||||
|
||||
def config_revert(args):
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.writable_scopes]
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
|
||||
|
||||
# Search for backup files in the configuration scopes
|
||||
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@@ -933,7 +934,7 @@ def get_repository(args, name):
|
||||
# Figure out where the new package should live
|
||||
repo_path = args.repo
|
||||
if repo_path is not None:
|
||||
repo = spack.repo.from_path(repo_path)
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
if spec.namespace and spec.namespace != repo.namespace:
|
||||
tty.die(
|
||||
"Can't create package with namespace {0} in repo with "
|
||||
@@ -941,7 +942,9 @@ def get_repository(args, name):
|
||||
)
|
||||
else:
|
||||
if spec.namespace:
|
||||
repo = spack.repo.PATH.get_repo(spec.namespace)
|
||||
repo = spack.repo.PATH.get_repo(spec.namespace, None)
|
||||
if not repo:
|
||||
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
|
||||
else:
|
||||
repo = spack.repo.PATH.first_repo()
|
||||
|
||||
|
@@ -6,7 +6,6 @@
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from glob import glob
|
||||
|
||||
@@ -63,10 +62,9 @@ def create_db_tarball(args):
|
||||
|
||||
base = os.path.basename(str(spack.store.STORE.root))
|
||||
transform_args = []
|
||||
# Currently --transform and -s are not supported by Windows native tar
|
||||
if "GNU" in tar("--version", output=str):
|
||||
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
|
||||
elif sys.platform != "win32":
|
||||
else:
|
||||
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
|
||||
|
||||
wd = os.path.dirname(str(spack.store.STORE.root))
|
||||
@@ -92,6 +90,7 @@ def report(args):
|
||||
print("* **Spack:**", get_version())
|
||||
print("* **Python:**", platform.python_version())
|
||||
print("* **Platform:**", architecture)
|
||||
print("* **Concretizer:**", spack.config.get("config:concretizer"))
|
||||
|
||||
|
||||
def debug(parser, args):
|
||||
|
@@ -47,6 +47,16 @@ def inverted_dependencies():
|
||||
dependents of, e.g., `mpi`, but virtuals are not included as
|
||||
actual dependents.
|
||||
"""
|
||||
dag = {}
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
dag.setdefault(pkg_cls.name, set())
|
||||
for dep in pkg_cls.dependencies_by_name():
|
||||
deps = [dep]
|
||||
|
||||
# expand virtuals if necessary
|
||||
if spack.repo.PATH.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.PATH.providers_for(dep)]
|
||||
|
||||
dag = collections.defaultdict(set)
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
for _, deps_by_name in pkg_cls.dependencies.items():
|
||||
|
@@ -9,8 +9,6 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.fetch_strategy
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
import spack.version
|
||||
@@ -71,15 +69,13 @@ def _retrieve_develop_source(spec, abspath):
|
||||
# We construct a package class ourselves, rather than asking for
|
||||
# Spec.package, since Spec only allows this when it is concrete
|
||||
package = pkg_cls(spec)
|
||||
source_stage = package.stage[0]
|
||||
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
source_stage.fetcher.get_full_repo = True
|
||||
if isinstance(package.stage[0].fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
package.stage[0].fetcher.get_full_repo = True
|
||||
# If we retrieved this version before and cached it, we may have
|
||||
# done so without cloning the full git repo; likewise, any
|
||||
# mirror might store an instance with truncated history.
|
||||
source_stage.disable_mirrors()
|
||||
package.stage[0].disable_mirrors()
|
||||
|
||||
source_stage.fetcher.set_package(package)
|
||||
package.stage.steal_source(abspath)
|
||||
|
||||
|
||||
|
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import errno
|
||||
import glob
|
||||
import os
|
||||
|
||||
@@ -12,13 +11,43 @@
|
||||
import spack.cmd
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.editor
|
||||
from spack.spec import Spec
|
||||
from spack.util.editor import editor
|
||||
|
||||
description = "open package files in $EDITOR"
|
||||
section = "packaging"
|
||||
level = "short"
|
||||
|
||||
|
||||
def edit_package(name, repo_path, namespace):
|
||||
"""Opens the requested package file in your favorite $EDITOR.
|
||||
|
||||
Args:
|
||||
name (str): The name of the package
|
||||
repo_path (str): The path to the repository containing this package
|
||||
namespace (str): A valid namespace registered with Spack
|
||||
"""
|
||||
# Find the location of the package
|
||||
if repo_path:
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
elif namespace:
|
||||
repo = spack.repo.PATH.get_repo(namespace)
|
||||
else:
|
||||
repo = spack.repo.PATH
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
spec = Spec(name)
|
||||
if os.path.exists(path):
|
||||
if not os.path.isfile(path):
|
||||
tty.die("Something is wrong. '{0}' is not a file!".format(path))
|
||||
if not os.access(path, os.R_OK):
|
||||
tty.die("Insufficient permissions on '%s'!" % path)
|
||||
else:
|
||||
raise spack.repo.UnknownPackageError(spec.name)
|
||||
|
||||
editor(path)
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
excl_args = subparser.add_mutually_exclusive_group()
|
||||
|
||||
@@ -69,67 +98,41 @@ def setup_parser(subparser):
|
||||
excl_args.add_argument("-r", "--repo", default=None, help="path to repo to edit package in")
|
||||
excl_args.add_argument("-N", "--namespace", default=None, help="namespace of package to edit")
|
||||
|
||||
subparser.add_argument("package", nargs="*", default=None, help="package name")
|
||||
|
||||
|
||||
def locate_package(name: str, repo: spack.repo.Repo) -> str:
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
try:
|
||||
with open(path, "r"):
|
||||
return path
|
||||
except OSError as e:
|
||||
if e.errno == errno.ENOENT:
|
||||
raise spack.repo.UnknownPackageError(name) from e
|
||||
tty.die(f"Cannot edit package: {e}")
|
||||
|
||||
|
||||
def locate_file(name: str, path: str) -> str:
|
||||
# convert command names to python module name
|
||||
if path == spack.paths.command_path:
|
||||
name = spack.cmd.python_name(name)
|
||||
|
||||
file_path = os.path.join(path, name)
|
||||
|
||||
# Try to open direct match.
|
||||
try:
|
||||
with open(file_path, "r"):
|
||||
return file_path
|
||||
except OSError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
tty.die(f"Cannot edit file: {e}")
|
||||
pass
|
||||
|
||||
# Otherwise try to find a file that starts with the name
|
||||
candidates = glob.glob(file_path + "*")
|
||||
exclude_list = [".pyc", "~"] # exclude binaries and backups
|
||||
files = [f for f in candidates if not any(f.endswith(ext) for ext in exclude_list)]
|
||||
if len(files) > 1:
|
||||
tty.die(
|
||||
f"Multiple files start with `{name}`:\n"
|
||||
+ "\n".join(f" {os.path.basename(f)}" for f in files)
|
||||
)
|
||||
elif not files:
|
||||
tty.die(f"No file for '{name}' was found in {path}")
|
||||
return files[0]
|
||||
subparser.add_argument("package", nargs="?", default=None, help="package name")
|
||||
|
||||
|
||||
def edit(parser, args):
|
||||
names = args.package
|
||||
name = args.package
|
||||
|
||||
# By default, edit package files
|
||||
path = spack.paths.packages_path
|
||||
|
||||
# If `--command`, `--test`, or `--module` is chosen, edit those instead
|
||||
if args.path:
|
||||
paths = [locate_file(name, args.path) for name in names] if names else [args.path]
|
||||
spack.util.editor.editor(*paths)
|
||||
elif names:
|
||||
if args.repo:
|
||||
repo = spack.repo.from_path(args.repo)
|
||||
elif args.namespace:
|
||||
repo = spack.repo.PATH.get_repo(args.namespace)
|
||||
else:
|
||||
repo = spack.repo.PATH
|
||||
paths = [locate_package(name, repo) for name in names]
|
||||
spack.util.editor.editor(*paths)
|
||||
path = args.path
|
||||
if name:
|
||||
# convert command names to python module name
|
||||
if path == spack.paths.command_path:
|
||||
name = spack.cmd.python_name(name)
|
||||
|
||||
path = os.path.join(path, name)
|
||||
if not os.path.exists(path):
|
||||
files = glob.glob(path + "*")
|
||||
exclude_list = [".pyc", "~"] # exclude binaries and backups
|
||||
files = list(filter(lambda x: all(s not in x for s in exclude_list), files))
|
||||
if len(files) > 1:
|
||||
m = "Multiple files exist with the name {0}.".format(name)
|
||||
m += " Please specify a suffix. Files are:\n\n"
|
||||
for f in files:
|
||||
m += " " + os.path.basename(f) + "\n"
|
||||
tty.die(m)
|
||||
if not files:
|
||||
tty.die("No file for '{0}' was found in {1}".format(name, path))
|
||||
path = files[0] # already confirmed only one entry in files
|
||||
|
||||
editor(path)
|
||||
elif name:
|
||||
edit_package(name, args.repo, args.namespace)
|
||||
else:
|
||||
# By default open the directory where packages live
|
||||
spack.util.editor.editor(spack.paths.packages_path)
|
||||
editor(path)
|
||||
|
@@ -468,30 +468,32 @@ def env_remove(args):
|
||||
This removes an environment managed by Spack. Directory environments
|
||||
and manifests embedded in repositories should be removed manually.
|
||||
"""
|
||||
remove_envs = []
|
||||
read_envs = []
|
||||
valid_envs = []
|
||||
bad_envs = []
|
||||
invalid_envs = []
|
||||
|
||||
for env_name in ev.all_environment_names():
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
valid_envs.append(env)
|
||||
valid_envs.append(env_name)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
remove_envs.append(env)
|
||||
read_envs.append(env)
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
invalid_envs.append(env_name)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
bad_envs.append(env_name)
|
||||
|
||||
# Check if remove_env is included from another env before trying to remove
|
||||
for env in valid_envs:
|
||||
for remove_env in remove_envs:
|
||||
# Check if env is linked to another before trying to remove
|
||||
for name in valid_envs:
|
||||
# don't check if environment is included to itself
|
||||
if env.name == remove_env.name:
|
||||
if name == env_name:
|
||||
continue
|
||||
|
||||
if remove_env.path in env.included_concrete_envs:
|
||||
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
|
||||
environ = ev.Environment(ev.root(name))
|
||||
if ev.root(env_name) in environ.included_concrete_envs:
|
||||
msg = f'Environment "{env_name}" is being used by environment "{name}"'
|
||||
if args.force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
@@ -504,7 +506,7 @@ def env_remove(args):
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
for env in remove_envs:
|
||||
for env in read_envs:
|
||||
name = env.name
|
||||
if env.active:
|
||||
tty.die(f"Environment {name} can't be removed while activated.")
|
||||
|
@@ -7,7 +7,7 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import List, Optional, Set
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
@@ -19,7 +19,6 @@
|
||||
import spack.detection
|
||||
import spack.error
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.environment
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
@@ -135,28 +134,18 @@ def external_find(args):
|
||||
candidate_packages = packages_to_search_for(
|
||||
names=args.packages, tags=args.tags, exclude=args.exclude
|
||||
)
|
||||
detected_packages = spack.detection.by_path(candidate_packages, path_hints=args.path)
|
||||
|
||||
new_specs = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
detected_packages = spack.detection.by_path(
|
||||
candidate_packages, path_hints=args.path, max_workers=args.jobs
|
||||
)
|
||||
|
||||
# If the user runs `spack external find --not-buildable mpich` we also mark `mpi` non-buildable
|
||||
# to avoid that the concretizer picks a different mpi provider.
|
||||
if new_specs and args.not_buildable:
|
||||
virtuals: Set[str] = {
|
||||
virtual.name
|
||||
for new_spec in new_specs
|
||||
for virtual_specs in spack.repo.PATH.get_pkg_class(new_spec.name).provided.values()
|
||||
for virtual in virtual_specs
|
||||
}
|
||||
new_virtuals = spack.detection.set_virtuals_nonbuildable(virtuals, scope=args.scope)
|
||||
new_specs.extend(spack.spec.Spec(name) for name in new_virtuals)
|
||||
|
||||
if new_specs:
|
||||
new_entries = spack.detection.update_configuration(
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
)
|
||||
if new_entries:
|
||||
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||
tty.msg(f"The following specs have been detected on this system and added to {path}")
|
||||
spack.cmd.display_specs(new_specs)
|
||||
msg = "The following specs have been detected on this system and added to {0}"
|
||||
tty.msg(msg.format(path))
|
||||
spack.cmd.display_specs(new_entries)
|
||||
else:
|
||||
tty.msg("No new external packages detected")
|
||||
|
||||
|
@@ -46,10 +46,6 @@ def setup_parser(subparser):
|
||||
help="output specs as machine-readable json records",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-I", "--install-status", action="store_true", help="show install status of packages"
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
|
||||
)
|
||||
@@ -297,24 +293,25 @@ def root_decorator(spec, string):
|
||||
)
|
||||
print()
|
||||
|
||||
if args.show_concretized:
|
||||
tty.msg("Concretized roots")
|
||||
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)
|
||||
print()
|
||||
|
||||
# Display a header for the installed packages section IF there are installed
|
||||
# packages. If there aren't any, we'll just end up printing "0 installed packages"
|
||||
# later.
|
||||
if results and not args.only_roots:
|
||||
tty.msg("Installed packages")
|
||||
|
||||
|
||||
def find(parser, args):
|
||||
env = ev.active_environment()
|
||||
q_args = query_arguments(args)
|
||||
results = args.specs(**q_args)
|
||||
|
||||
env = ev.active_environment()
|
||||
if not env and args.only_roots:
|
||||
tty.die("-r / --only-roots requires an active environment")
|
||||
if not env and args.show_concretized:
|
||||
tty.die("-c / --show-concretized requires an active environment")
|
||||
|
||||
if env:
|
||||
if args.constraint:
|
||||
init_specs = spack.cmd.parse_specs(args.constraint)
|
||||
results = env.all_matching_specs(*init_specs)
|
||||
else:
|
||||
results = env.all_specs()
|
||||
else:
|
||||
q_args = query_arguments(args)
|
||||
results = args.specs(**q_args)
|
||||
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
@@ -335,11 +332,6 @@ def find(parser, args):
|
||||
if args.loaded:
|
||||
results = spack.cmd.filter_loaded_specs(results)
|
||||
|
||||
if args.install_status or args.show_concretized:
|
||||
status_fn = spack.spec.Spec.install_status
|
||||
else:
|
||||
status_fn = None
|
||||
|
||||
# Display the result
|
||||
if args.json:
|
||||
cmd.display_specs_as_json(results, deps=args.deps)
|
||||
@@ -348,34 +340,12 @@ def find(parser, args):
|
||||
if env:
|
||||
display_env(env, args, decorator, results)
|
||||
|
||||
count_suffix = " (not shown)"
|
||||
if not args.only_roots:
|
||||
display_results = results
|
||||
if not args.show_concretized:
|
||||
display_results = list(x for x in results if x.installed)
|
||||
cmd.display_specs(
|
||||
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
|
||||
)
|
||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||
count_suffix = ""
|
||||
|
||||
# print number of installed packages last (as the list may be long)
|
||||
if sys.stdout.isatty() and args.groups:
|
||||
installed_suffix = ""
|
||||
concretized_suffix = " to be installed"
|
||||
|
||||
if args.only_roots:
|
||||
installed_suffix += " (not shown)"
|
||||
concretized_suffix += " (not shown)"
|
||||
else:
|
||||
if env and not args.show_concretized:
|
||||
concretized_suffix += " (show with `spack find -c`)"
|
||||
|
||||
pkg_type = "loaded" if args.loaded else "installed"
|
||||
spack.cmd.print_how_many_pkgs(
|
||||
list(x for x in results if x.installed), pkg_type, suffix=installed_suffix
|
||||
)
|
||||
|
||||
if env:
|
||||
spack.cmd.print_how_many_pkgs(
|
||||
list(x for x in results if not x.installed),
|
||||
"concretized",
|
||||
suffix=concretized_suffix,
|
||||
)
|
||||
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)
|
||||
|
@@ -56,6 +56,7 @@ def roots_from_environments(args, active_env):
|
||||
|
||||
# -e says "also preserve things needed by this particular env"
|
||||
for env_name_or_dir in args.except_environment:
|
||||
print("HMM", env_name_or_dir)
|
||||
if ev.exists(env_name_or_dir):
|
||||
env = ev.read(env_name_or_dir)
|
||||
elif ev.is_env_dir(env_name_or_dir):
|
||||
|
@@ -5,12 +5,10 @@
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.stage
|
||||
import spack.util.gpg
|
||||
import spack.util.url
|
||||
from spack.cmd.common import arguments
|
||||
@@ -117,7 +115,6 @@ def setup_parser(subparser):
|
||||
help="URL of the mirror where keys will be published",
|
||||
)
|
||||
publish.add_argument(
|
||||
"--update-index",
|
||||
"--rebuild-index",
|
||||
action="store_true",
|
||||
default=False,
|
||||
@@ -223,10 +220,9 @@ def gpg_publish(args):
|
||||
elif args.mirror_url:
|
||||
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
|
||||
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
spack.binary_distribution.push_keys(
|
||||
mirror, keys=args.keys, tmpdir=tmpdir, update_index=args.update_index
|
||||
)
|
||||
spack.binary_distribution.push_keys(
|
||||
mirror, keys=args.keys, regenerate_index=args.rebuild_index
|
||||
)
|
||||
|
||||
|
||||
def gpg(parser, args):
|
||||
|
@@ -50,7 +50,7 @@
|
||||
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
|
||||
|
||||
architecture variants:
|
||||
@m{platform=platform} linux, darwin, freebsd, windows
|
||||
@m{platform=platform} linux, darwin, cray, etc.
|
||||
@m{os=operating_system} specific <operating_system>
|
||||
@m{target=target} specific <target> processor
|
||||
@m{arch=platform-os-target} shortcut for all three above
|
||||
|
@@ -502,7 +502,7 @@ def print_licenses(pkg, args):
|
||||
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.fullname)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
# Output core package information
|
||||
|
@@ -10,7 +10,6 @@
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.string import plural
|
||||
from llnl.util import lang, tty
|
||||
|
||||
import spack.build_environment
|
||||
@@ -62,6 +61,7 @@ def install_kwargs_from_args(args):
|
||||
"dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc),
|
||||
"dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc),
|
||||
"include_build_deps": args.include_build_deps,
|
||||
"explicit": True, # Use true as a default for install command
|
||||
"stop_at": args.until,
|
||||
"unsigned": args.unsigned,
|
||||
"install_deps": ("dependencies" in args.things_to_install),
|
||||
@@ -376,9 +376,7 @@ def _maybe_add_and_concretize(args, env, specs):
|
||||
# `spack concretize`
|
||||
tests = compute_tests_install_kwargs(env.user_specs, args.test)
|
||||
concretized_specs = env.concretize(tests=tests)
|
||||
if concretized_specs:
|
||||
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}")
|
||||
ev.display_specs([concrete for _, concrete in concretized_specs])
|
||||
ev.display_specs(concretized_specs)
|
||||
|
||||
# save view regeneration for later, so that we only do it
|
||||
# once, as it can be slow.
|
||||
@@ -475,7 +473,6 @@ def install_without_active_env(args, install_kwargs, reporter_factory):
|
||||
require_user_confirmation_for_overwrite(concrete_specs, args)
|
||||
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
||||
|
||||
installs = [s.package for s in concrete_specs]
|
||||
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
|
||||
builder = PackageInstaller(installs, install_kwargs)
|
||||
installs = [(s.package, install_kwargs) for s in concrete_specs]
|
||||
builder = PackageInstaller(installs)
|
||||
builder.install()
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user