Compare commits

..

1 Commits

Author SHA1 Message Date
Wouter Deconinck
23197b78f9 feat: spack graph --mermaid 2024-05-04 16:13:12 -07:00
4568 changed files with 20620 additions and 40579 deletions

View File

@@ -5,10 +5,13 @@ updates:
directory: "/"
schedule:
interval: "daily"
# Requirements to run style checks and build documentation
# Requirements to build documentation
- package-ecosystem: "pip"
directories:
- "/.github/workflows/requirements/style/*"
- "/lib/spack/docs"
directory: "/lib/spack/docs"
schedule:
interval: "daily"
# Requirements to run style checks
- package-ecosystem: "pip"
directory: "/.github/workflows/style"
schedule:
interval: "daily"

View File

@@ -28,8 +28,8 @@ jobs:
run:
shell: ${{ matrix.system.shell }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: ${{inputs.python_version}}
- name: Install Python packages
@@ -44,7 +44,6 @@ jobs:
run: |
. share/spack/setup-env.sh
coverage run $(which spack) audit packages
coverage run $(which spack) audit configs
coverage run $(which spack) -d audit externals
coverage combine
coverage xml
@@ -53,7 +52,6 @@ jobs:
run: |
. share/spack/setup-env.sh
spack -d audit packages
spack -d audit configs
spack -d audit externals
- name: Package audits (without coverage)
if: ${{ runner.os == 'Windows' }}
@@ -61,11 +59,9 @@ jobs:
. share/spack/setup-env.sh
spack -d audit packages
./share/spack/qa/validate_last_exit.ps1
spack -d audit configs
./share/spack/qa/validate_last_exit.ps1
spack -d audit externals
./share/spack/qa/validate_last_exit.ps1
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
if: ${{ inputs.with_coverage == 'true' }}
with:
flags: unittests,audits

View File

@@ -1,8 +1,7 @@
#!/bin/bash
set -e
set -ex
source share/spack/setup-env.sh
$PYTHON bin/spack bootstrap disable github-actions-v0.4
$PYTHON bin/spack bootstrap disable spack-install
$PYTHON bin/spack $SPACK_FLAGS solve zlib
$PYTHON bin/spack -d solve zlib
tree $BOOTSTRAP/store
exit 0

View File

@@ -13,22 +13,118 @@ concurrency:
cancel-in-progress: true
jobs:
distros-clingo-sources:
fedora-clingo-sources:
runs-on: ubuntu-latest
container: ${{ matrix.image }}
strategy:
matrix:
image: ["fedora:latest", "opensuse/leap:latest"]
container: "fedora:latest"
steps:
- name: Setup Fedora
if: ${{ matrix.image == 'fedora:latest' }}
- name: Install dependencies
run: |
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch unzip which xz python3 python3-devel tree \
cmake bison bison-devel libstdc++-static
- name: Setup OpenSUSE
if: ${{ matrix.image == 'opensuse/leap:latest' }}
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup non-root user
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
useradd spack-test && mkdir -p ~spack-test
chown -R spack-test . ~spack-test
- name: Setup repo
shell: runuser -u spack-test -- bash {0}
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack external find cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
ubuntu-clingo-sources:
runs-on: ubuntu-latest
container: "ubuntu:latest"
steps:
- name: Install dependencies
env:
DEBIAN_FRONTEND: noninteractive
run: |
apt-get update -y && apt-get upgrade -y
apt-get install -y \
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
make patch unzip xz-utils python3 python3-dev tree \
cmake bison
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup non-root user
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
useradd spack-test && mkdir -p ~spack-test
chown -R spack-test . ~spack-test
- name: Setup repo
shell: runuser -u spack-test -- bash {0}
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack external find cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
ubuntu-clingo-binaries-and-patchelf:
runs-on: ubuntu-latest
container: "ubuntu:latest"
steps:
- name: Install dependencies
env:
DEBIAN_FRONTEND: noninteractive
run: |
apt-get update -y && apt-get upgrade -y
apt-get install -y \
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup non-root user
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
useradd spack-test && mkdir -p ~spack-test
chown -R spack-test . ~spack-test
- name: Setup repo
shell: runuser -u spack-test -- bash {0}
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack -d solve zlib
tree ~/.spack/bootstrap/store/
opensuse-clingo-sources:
runs-on: ubuntu-latest
container: "opensuse/leap:latest"
steps:
- name: Install dependencies
run: |
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
zypper update -y || zypper update -y
@@ -37,9 +133,15 @@ jobs:
make patch unzip which xz python3 python3-devel tree \
cmake bison
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup repo
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
run: |
source share/spack/setup-env.sh
@@ -49,56 +151,194 @@ jobs:
spack -d solve zlib
tree ~/.spack/bootstrap/store/
clingo-sources:
runs-on: ${{ matrix.runner }}
strategy:
matrix:
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
macos-clingo-sources:
runs-on: macos-latest
steps:
- name: Setup macOS
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest' }}
- name: Install dependencies
run: |
brew install cmake bison tree
brew install cmake bison@2.7 tree
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: "3.12"
- name: Bootstrap clingo
env:
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
run: |
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
source share/spack/setup-env.sh
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack external find --not-buildable cmake bison
spack -d solve zlib
${{ env.VALIDATE_LAST_EXIT }}
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
tree ~/.spack/bootstrap/store/
gnupg-sources:
runs-on: ${{ matrix.runner }}
macos-clingo-binaries:
runs-on: ${{ matrix.macos-version }}
strategy:
matrix:
runner: [ 'macos-13', 'macos-14', "ubuntu-latest" ]
macos-version: ['macos-11', 'macos-12']
steps:
- name: Setup macOS
if: ${{ matrix.runner != 'ubuntu-latest' }}
- name: Install dependencies
run: |
brew install tree gawk
sudo rm -rf $(command -v gpg gpg2)
- name: Setup Ubuntu
if: ${{ matrix.runner == 'ubuntu-latest' }}
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
brew install tree
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- name: Bootstrap clingo
run: |
set -ex
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
not_found=1
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
echo "Testing $ver_dir"
if [[ -d "$ver_dir" ]] ; then
if $ver_dir/python --version ; then
export PYTHON="$ver_dir/python"
not_found=0
old_path="$PATH"
export PATH="$ver_dir:$PATH"
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
export PATH="$old_path"
fi
fi
# NOTE: test all pythons that exist, not all do on 12
done
ubuntu-clingo-binaries:
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup repo
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
run: |
set -ex
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
not_found=1
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
echo "Testing $ver_dir"
if [[ -d "$ver_dir" ]] ; then
if $ver_dir/python --version ; then
export PYTHON="$ver_dir/python"
not_found=0
old_path="$PATH"
export PATH="$ver_dir:$PATH"
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
export PATH="$old_path"
fi
fi
if (($not_found)) ; then
echo Required python version $ver not found in runner!
exit 1
fi
done
ubuntu-gnupg-binaries:
runs-on: ubuntu-latest
container: "ubuntu:latest"
steps:
- name: Install dependencies
env:
DEBIAN_FRONTEND: noninteractive
run: |
apt-get update -y && apt-get upgrade -y
apt-get install -y \
bzip2 curl file g++ gcc patchelf gfortran git gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup non-root user
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
useradd spack-test && mkdir -p ~spack-test
chown -R spack-test . ~spack-test
- name: Setup repo
shell: runuser -u spack-test -- bash {0}
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap GnuPG
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.4
spack bootstrap disable spack-install
spack -d gpg list
tree ~/.spack/bootstrap/store/
ubuntu-gnupg-sources:
runs-on: ubuntu-latest
container: "ubuntu:latest"
steps:
- name: Install dependencies
env:
DEBIAN_FRONTEND: noninteractive
run: |
apt-get update -y && apt-get upgrade -y
apt-get install -y \
bzip2 curl file g++ gcc patchelf gfortran git gzip \
make patch unzip xz-utils python3 python3-dev tree \
gawk
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup non-root user
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
useradd spack-test && mkdir -p ~spack-test
chown -R spack-test . ~spack-test
- name: Setup repo
shell: runuser -u spack-test -- bash {0}
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap GnuPG
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack solve zlib
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack -d gpg list
tree ~/.spack/bootstrap/store/
macos-gnupg-binaries:
runs-on: macos-latest
steps:
- name: Install dependencies
run: |
brew install tree
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.4
spack bootstrap disable spack-install
spack -d gpg list
tree ~/.spack/bootstrap/store/
macos-gnupg-sources:
runs-on: macos-latest
steps:
- name: Install dependencies
run: |
brew install gawk tree
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
@@ -108,63 +348,10 @@ jobs:
spack -d gpg list
tree ~/.spack/bootstrap/store/
from-binaries:
runs-on: ${{ matrix.runner }}
strategy:
matrix:
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
steps:
- name: Setup macOS
if: ${{ matrix.runner != 'ubuntu-latest' }}
run: |
brew install tree
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Setup Ubuntu
if: ${{ matrix.runner == 'ubuntu-latest' }}
run: |
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
with:
python-version: |
3.8
3.9
3.10
3.11
3.12
- name: Set bootstrap sources
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.4
spack bootstrap disable spack-install
- name: Bootstrap clingo
run: |
set -e
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
not_found=1
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
if [[ -d "$ver_dir" ]] ; then
echo "Testing $ver_dir"
if $ver_dir/python --version ; then
export PYTHON="$ver_dir/python"
not_found=0
old_path="$PATH"
export PATH="$ver_dir:$PATH"
./bin/spack-tmpconfig -b ./.github/workflows/bin/bootstrap-test.sh
export PATH="$old_path"
fi
fi
if (($not_found)) ; then
echo Required python version $ver not found in runner!
exit 1
fi
done
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
spack -d gpg list
tree ~/.spack/bootstrap/store/
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
# See:
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
# - https://github.com/actions/checkout/issues/760
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog

View File

@@ -40,7 +40,8 @@ jobs:
# 1: Platforms to build for
# 2: Base image (e.g. ubuntu:22.04)
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
@@ -49,13 +50,15 @@ jobs:
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38'],
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
name: Build ${{ matrix.dockerfile[0] }}
if: github.repository == 'spack/spack'
steps:
- name: Checkout
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
id: docker_meta
@@ -76,7 +79,7 @@ jobs:
env:
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
run: |
.github/workflows/bin/generate_spack_yaml_containerize.sh
.github/workflows/generate_spack_yaml_containerize.sh
. share/spack/setup-env.sh
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
@@ -87,19 +90,19 @@ jobs:
fi
- name: Upload Dockerfile
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
with:
name: dockerfiles_${{ matrix.dockerfile[0] }}
path: dockerfiles
- name: Set up QEMU
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
- name: Log in to GitHub Container Registry
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -107,13 +110,13 @@ jobs:
- name: Log in to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@16ebe778df0e7752d2cfcbd924afdbbd89c1a755
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
with:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}
@@ -126,7 +129,7 @@ jobs:
needs: deploy-images
steps:
- name: Merge Artifacts
uses: actions/upload-artifact/merge@834a144ee995460fba8ed112a2fc961b36a5ec5a
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
with:
name: dockerfiles
pattern: dockerfiles_*

View File

@@ -36,7 +36,7 @@ jobs:
core: ${{ steps.filter.outputs.core }}
packages: ${{ steps.filter.outputs.packages }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
if: ${{ github.event_name == 'push' }}
with:
fetch-depth: 0
@@ -53,13 +53,6 @@ jobs:
- 'var/spack/repos/builtin/packages/clingo/**'
- 'var/spack/repos/builtin/packages/python/**'
- 'var/spack/repos/builtin/packages/re2c/**'
- 'var/spack/repos/builtin/packages/gnupg/**'
- 'var/spack/repos/builtin/packages/libassuan/**'
- 'var/spack/repos/builtin/packages/libgcrypt/**'
- 'var/spack/repos/builtin/packages/libgpg-error/**'
- 'var/spack/repos/builtin/packages/libksba/**'
- 'var/spack/repos/builtin/packages/npth/**'
- 'var/spack/repos/builtin/packages/pinentry/**'
- 'lib/spack/**'
- 'share/spack/**'
- '.github/workflows/bootstrap.yml'
@@ -84,8 +77,13 @@ jobs:
needs: [ prechecks, changes ]
uses: ./.github/workflows/unit_tests.yaml
secrets: inherit
windows:
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
needs: [ prechecks ]
uses: ./.github/workflows/windows_python.yml
secrets: inherit
all:
needs: [ unit-tests, bootstrap ]
needs: [ windows, unit-tests, bootstrap ]
runs-on: ubuntu-latest
steps:
- name: Success

8
.github/workflows/install_spack.sh vendored Executable file
View File

@@ -0,0 +1,8 @@
#!/usr/bin/env sh
. share/spack/setup-env.sh
echo -e "config:\n build_jobs: 2" > etc/spack/config.yaml
spack config add "packages:all:target:[x86_64]"
spack compiler find
spack compiler info apple-clang
spack debug report
spack solve zlib

View File

@@ -14,10 +14,10 @@ jobs:
build-paraview-deps:
runs-on: windows-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: 3.9
- name: Install Python packages

View File

@@ -1,7 +0,0 @@
black==24.8.0
clingo==5.7.1
flake8==7.1.1
isort==5.13.2
mypy==1.8.0
types-six==1.16.21.20240513
vermin==1.6.0

View File

@@ -0,0 +1,7 @@
black==24.4.2
clingo==5.7.1
flake8==7.0.0
isort==5.13.2
mypy==1.8.0
types-six==1.16.21.9
vermin==1.6.0

View File

@@ -51,10 +51,10 @@ jobs:
on_develop: false
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: ${{ matrix.python-version }}
- name: Install System packages
@@ -72,7 +72,7 @@ jobs:
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/bin/setup_git.sh
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
if: ${{ matrix.concretizer == 'clingo' }}
env:
@@ -91,7 +91,7 @@ jobs:
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with:
flags: unittests,linux,${{ matrix.concretizer }}
token: ${{ secrets.CODECOV_TOKEN }}
@@ -100,10 +100,10 @@ jobs:
shell:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: '3.11'
- name: Install System packages
@@ -118,13 +118,13 @@ jobs:
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/bin/setup_git.sh
. .github/workflows/setup_git.sh
- name: Run shell tests
env:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with:
flags: shelltests,linux
token: ${{ secrets.CODECOV_TOKEN }}
@@ -141,13 +141,13 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- name: Setup repo and non-root user
run: |
git --version
git config --global --add safe.directory /__w/spack/spack
git fetch --unshallow
. .github/workflows/bin/setup_git.sh
. .github/workflows/setup_git.sh
useradd spack-test
chown -R spack-test .
- name: Run unit tests
@@ -160,10 +160,10 @@ jobs:
clingo-cffi:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: '3.11'
- name: Install System packages
@@ -178,14 +178,14 @@ jobs:
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/bin/setup_git.sh
. .github/workflows/setup_git.sh
- name: Run unit tests (full suite with coverage)
env:
COVERAGE: true
SPACK_TEST_SOLVER: clingo
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with:
flags: unittests,linux,clingo
token: ${{ secrets.CODECOV_TOKEN }}
@@ -195,13 +195,13 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [macos-13, macos-14]
os: [macos-latest, macos-14]
python-version: ["3.11"]
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: ${{ matrix.python-version }}
- name: Install Python packages
@@ -217,45 +217,14 @@ jobs:
SPACK_TEST_PARALLEL: 4
run: |
git --version
. .github/workflows/bin/setup_git.sh
. .github/workflows/setup_git.sh
. share/spack/setup-env.sh
$(which spack) bootstrap disable spack-install
$(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with:
flags: unittests,macos
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
# Run unit tests on Windows
windows:
defaults:
run:
shell:
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
runs-on: windows-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
- name: Create local develop
run: |
./.github/workflows/bin/setup_git.ps1
- name: Unit Test
run: |
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true

View File

@@ -18,15 +18,15 @@ jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: '3.11'
cache: 'pip'
- name: Install Python Packages
run: |
pip install --upgrade pip setuptools
pip install -r .github/workflows/requirements/style/requirements.txt
pip install -r .github/workflows/style/requirements.txt
- name: vermin (Spack's Core)
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
- name: vermin (Repositories)
@@ -35,22 +35,22 @@ jobs:
style:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: '3.11'
cache: 'pip'
- name: Install Python packages
run: |
pip install --upgrade pip setuptools
pip install -r .github/workflows/requirements/style/requirements.txt
pip install -r .github/workflows/style/requirements.txt
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/bin/setup_git.sh
. .github/workflows/setup_git.sh
- name: Run style tests
run: |
share/spack/qa/run-style-tests
@@ -70,13 +70,13 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- name: Setup repo and non-root user
run: |
git --version
git config --global --add safe.directory /__w/spack/spack
git fetch --unshallow
. .github/workflows/bin/setup_git.sh
. .github/workflows/setup_git.sh
useradd spack-test
chown -R spack-test .
- name: Bootstrap Spack development environment

83
.github/workflows/windows_python.yml vendored Normal file
View File

@@ -0,0 +1,83 @@
name: windows
on:
workflow_call:
concurrency:
group: windows-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
cancel-in-progress: true
defaults:
run:
shell:
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
jobs:
unit-tests:
runs-on: windows-latest
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
- name: Create local develop
run: |
./.github/workflows/setup_git.ps1
- name: Unit Test
run: |
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
unit-tests-cmd:
runs-on: windows-latest
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip pywin32 setuptools coverage pytest-cov clingo
- name: Create local develop
run: |
./.github/workflows/setup_git.ps1
- name: Command Unit Test
run: |
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
build-abseil:
runs-on: windows-latest
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip pywin32 setuptools coverage
- name: Build Test
run: |
spack compiler find
spack -d external find cmake ninja
spack -d install abseil-cpp

View File

@@ -1,369 +1,3 @@
# v0.22.0 (2024-05-12)
`v0.22.0` is a major feature release.
## Features in this release
1. **Compiler dependencies**
We are in the process of making compilers proper dependencies in Spack, and a number
of changes in `v0.22` support that effort. You may notice nodes in your dependency
graphs for compiler runtime libraries like `gcc-runtime` or `libgfortran`, and you
may notice that Spack graphs now include `libc`. We've also begun moving compiler
configuration from `compilers.yaml` to `packages.yaml` to make it consistent with
other externals. We are trying to do this with the least disruption possible, so
your existing `compilers.yaml` files should still work. We expect to be done with
this transition by the `v0.23` release in November.
* #41104: Packages compiled with `%gcc` on Linux, macOS and FreeBSD now depend on a
new package `gcc-runtime`, which contains a copy of the shared compiler runtime
libraries. This enables gcc runtime libraries to be installed and relocated when
using a build cache. When building minimal Spack-generated container images it is
no longer necessary to install libgfortran, libgomp etc. using the system package
manager.
* #42062: Packages compiled with `%oneapi` now depend on a new package
`intel-oneapi-runtime`. This is similar to `gcc-runtime`, and the runtimes can
provide virtuals and compilers can inject dependencies on virtuals into compiled
packages. This allows us to model library soname compatibility and allows
compilers like `%oneapi` to provide virtuals like `sycl` (which can also be
provided by standalone libraries). Note that until we have an agreement in place
with intel, Intel packages are marked `redistribute(source=False, binary=False)`
and must be downloaded outside of Spack.
* #43272: changes to the optimization criteria of the solver improve the hit-rate of
buildcaches by a fair amount. The solver more relaxed compatibility rules and will
not try to strictly match compilers or targets of reused specs. Users can still
enforce the previous strict behavior with `require:` sections in `packages.yaml`.
Note that to enforce correct linking, Spack will *not* reuse old `%gcc` and
`%oneapi` specs that do not have the runtime libraries as a dependency.
* #43539: Spack will reuse specs built with compilers that are *not* explicitly
configured in `compilers.yaml`. Because we can now keep runtime libraries in build
cache, we do not require you to also have a local configured compiler to *use* the
runtime libraries. This improves reuse in buildcaches and avoids conflicts with OS
updates that happen underneath Spack.
* #43190: binary compatibility on `linux` is now based on the `libc` version,
instead of on the `os` tag. Spack builds now detect the host `libc` (`glibc` or
`musl`) and add it as an implicit external node in the dependency graph. Binaries
with a `libc` with the same name and a version less than or equal to that of the
detected `libc` can be reused. This is only on `linux`, not `macos` or `Windows`.
* #43464: each package that can provide a compiler is now detectable using `spack
external find`. External packages defining compiler paths are effectively used as
compilers, and `spack external find -t compiler` can be used as a substitute for
`spack compiler find`. More details on this transition are in
[the docs](https://spack.readthedocs.io/en/latest/getting_started.html#manual-compiler-configuration)
2. **Improved `spack find` UI for Environments**
If you're working in an enviroment, you likely care about:
* What are the roots
* Which ones are installed / not installed
* What's been added that still needs to be concretized
We've tweaked `spack find` in environments to show this information much more
clearly. Installation status is shown next to each root, so you can see what is
installed. Roots are also shown in bold in the list of installed packages. There is
also a new option for `spack find -r` / `--only-roots` that will only show env
roots, if you don't want to look at all the installed specs.
More details in #42334.
3. **Improved command-line string quoting**
We are making some breaking changes to how Spack parses specs on the CLI in order to
respect shell quoting instead of trying to fight it. If you (sadly) had to write
something like this on the command line:
```
spack install zlib cflags=\"-O2 -g\"
```
That will now result in an error, but you can now write what you probably expected
to work in the first place:
```
spack install zlib cflags="-O2 -g"
```
Quoted can also now include special characters, so you can supply flags like:
```
spack intall zlib ldflags='-Wl,-rpath=$ORIGIN/_libs'
```
To reduce ambiguity in parsing, we now require that you *not* put spaces around `=`
and `==` when for flags or variants. This would not have broken before but will now
result in an error:
```
spack install zlib cflags = "-O2 -g"
```
More details and discussion in #30634.
4. **Revert default `spack install` behavior to `--reuse`**
We changed the default concretizer behavior from `--reuse` to `--reuse-deps` in
#30990 (in `v0.20`), which meant that *every* `spack install` invocation would
attempt to build a new version of the requested package / any environment roots.
While this is a common ask for *upgrading* and for *developer* workflows, we don't
think it should be the default for a package manager.
We are going to try to stick to this policy:
1. Prioritize reuse and build as little as possible by default.
2. Only upgrade or install duplicates if they are explicitly asked for, or if there
is a known security issue that necessitates an upgrade.
With the install command you now have three options:
* `--reuse` (default): reuse as many existing installations as possible.
* `--reuse-deps` / `--fresh-roots`: upgrade (freshen) roots but reuse dependencies if possible.
* `--fresh`: install fresh versions of requested packages (roots) and their dependencies.
We've also introduced `--fresh-roots` as an alias for `--reuse-deps` to make it more clear
that it may give you fresh versions. More details in #41302 and #43988.
5. **More control over reused specs**
You can now control which packages to reuse and how. There is a new
`concretizer:reuse` config option, which accepts the following properties:
- `roots`: `true` to reuse roots, `false` to reuse just dependencies
- `exclude`: list of constraints used to select which specs *not* to reuse
- `include`: list of constraints used to select which specs *to* reuse
- `from`: list of sources for reused specs (some combination of `local`,
`buildcache`, or `external`)
For example, to reuse only specs compiled with GCC, you could write:
```yaml
concretizer:
reuse:
roots: true
include:
- "%gcc"
```
Or, if `openmpi` must be used from externals, and it must be the only external used:
```yaml
concretizer:
reuse:
roots: true
from:
- type: local
exclude: ["openmpi"]
- type: buildcache
exclude: ["openmpi"]
- type: external
include: ["openmpi"]
```
6. **New `redistribute()` directive**
Some packages can't be redistributed in source or binary form. We need an explicit
way to say that in a package.
Now there is a `redistribute()` directive so that package authors can write:
```python
class MyPackage(Package):
redistribute(source=False, binary=False)
```
Like other directives, this works with `when=`:
```python
class MyPackage(Package):
# 12.0 and higher are proprietary
redistribute(source=False, binary=False, when="@12.0:")
# can't redistribute when we depend on some proprietary dependency
redistribute(source=False, binary=False, when="^proprietary-dependency")
```
More in #20185.
7. **New `conflict:` and `prefer:` syntax for package preferences**
Previously, you could express conflicts and preferences in `packages.yaml` through
some contortions with `require:`:
```yaml
packages:
zlib-ng:
require:
- one_of: ["%clang", "@:"] # conflict on %clang
- any_of: ["+shared", "@:"] # strong preference for +shared
```
You can now use `require:` and `prefer:` for a much more readable configuration:
```yaml
packages:
zlib-ng:
conflict:
- "%clang"
prefer:
- "+shared"
```
See [the documentation](https://spack.readthedocs.io/en/latest/packages_yaml.html#conflicts-and-strong-preferences)
and #41832 for more details.
8. **`include_concrete` in environments**
You may want to build on the *concrete* contents of another environment without
changing that environment. You can now include the concrete specs from another
environment's `spack.lock` with `include_concrete`:
```yaml
spack:
specs: []
concretizer:
unify: true
include_concrete:
- /path/to/environment1
- /path/to/environment2
```
Now, when *this* environment is concretized, it will bring in the already concrete
specs from `environment1` and `environment2`, and build on top of them without
changing them. This is useful if you have phased deployments, where old deployments
should not be modified but you want to use as many of them as possible. More details
in #33768.
9. **`python-venv` isolation**
Spack has unique requirements for Python because it:
1. installs every package in its own independent directory, and
2. allows users to register *external* python installations.
External installations may contain their own installed packages that can interfere
with Spack installations, and some distributions (Debian and Ubuntu) even change the
`sysconfig` in ways that alter the installation layout of installed Python packages
(e.g., with the addition of a `/local` prefix on Debian or Ubuntu). To isolate Spack
from these and other issues, we now insert a small `python-venv` package in between
`python` and packages that need to install Python code. This isolates Spack's build
environment, isolates Spack from any issues with an external python, and resolves a
large number of issues we've had with Python installations.
See #40773 for further details.
## New commands, options, and directives
* Allow packages to be pushed to build cache after install from source (#42423)
* `spack develop`: stage build artifacts in same root as non-dev builds #41373
* Don't delete `spack develop` build artifacts after install (#43424)
* `spack find`: add options for local/upstream only (#42999)
* `spack logs`: print log files for packages (either partially built or installed) (#42202)
* `patch`: support reversing patches (#43040)
* `develop`: Add -b/--build-directory option to set build_directory package attribute (#39606)
* `spack list`: add `--namesapce` / `--repo` option (#41948)
* directives: add `checked_by` field to `license()`, add some license checks
* `spack gc`: add options for environments and build dependencies (#41731)
* Add `--create` to `spack env activate` (#40896)
## Performance improvements
* environment.py: fix excessive re-reads (#43746)
* ruamel yaml: fix quadratic complexity bug (#43745)
* Refactor to improve `spec format` speed (#43712)
* Do not acquire a write lock on the env post install if no views (#43505)
* asp.py: fewer calls to `spec.copy()` (#43715)
* spec.py: early return in `__str__`
* avoid `jinja2` import at startup unless needed (#43237)
## Other new features of note
* `archspec`: update to `v0.2.4`: support for Windows, bugfixes for `neoverse-v1` and
`neoverse-v2` detection.
* `spack config get`/`blame`: with no args, show entire config
* `spack env create <env>`: dir if dir-like (#44024)
* ASP-based solver: update os compatibility for macOS (#43862)
* Add handling of custom ssl certs in urllib ops (#42953)
* Add ability to rename environments (#43296)
* Add config option and compiler support to reuse across OS's (#42693)
* Support for prereleases (#43140)
* Only reuse externals when configured (#41707)
* Environments: Add support for including views (#42250)
## Binary caches
* Build cache: make signed/unsigned a mirror property (#41507)
* tools stack
## Removals, deprecations, and syntax changes
* remove `dpcpp` compiler and package (#43418)
* spack load: remove --only argument (#42120)
## Notable Bugfixes
* repo.py: drop deleted packages from provider cache (#43779)
* Allow `+` in module file names (#41999)
* `cmd/python`: use runpy to allow multiprocessing in scripts (#41789)
* Show extension commands with spack -h (#41726)
* Support environment variable expansion inside module projections (#42917)
* Alert user to failed concretizations (#42655)
* shell: fix zsh color formatting for PS1 in environments (#39497)
* spack mirror create --all: include patches (#41579)
## Spack community stats
* 7,994 total packages; 525 since `v0.21.0`
* 178 new Python packages, 5 new R packages
* 358 people contributed to this release
* 344 committers to packages
* 45 committers to core
# v0.21.2 (2024-03-01)
## Bugfixes
- Containerize: accommodate nested or pre-existing spack-env paths (#41558)
- Fix setup-env script, when going back and forth between instances (#40924)
- Fix using fully-qualified namespaces from root specs (#41957)
- Fix a bug when a required provider is requested for multiple virtuals (#42088)
- OCI buildcaches:
- only push in parallel when forking (#42143)
- use pickleable errors (#42160)
- Fix using sticky variants in externals (#42253)
- Fix a rare issue with conditional requirements and multi-valued variants (#42566)
## Package updates
- rust: add v1.75, rework a few variants (#41161,#41903)
- py-transformers: add v4.35.2 (#41266)
- mgard: fix OpenMP on AppleClang (#42933)
# v0.21.1 (2024-01-11)
## New features
- Add support for reading buildcaches created by Spack v0.22 (#41773)
## Bugfixes
- spack graph: fix coloring with environments (#41240)
- spack info: sort variants in --variants-by-name (#41389)
- Spec.format: error on old style format strings (#41934)
- ASP-based solver:
- fix infinite recursion when computing concretization errors (#41061)
- don't error for type mismatch on preferences (#41138)
- don't emit spurious debug output (#41218)
- Improve the error message for deprecated preferences (#41075)
- Fix MSVC preview version breaking clingo build on Windows (#41185)
- Fix multi-word aliases (#41126)
- Add a warning for unconfigured compiler (#41213)
- environment: fix an issue with deconcretization/reconcretization of specs (#41294)
- buildcache: don't error if a patch is missing, when installing from binaries (#41986)
- Multiple improvements to unit-tests (#41215,#41369,#41495,#41359,#41361,#41345,#41342,#41308,#41226)
## Package updates
- root: add a webgui patch to address security issue (#41404)
- BerkeleyGW: update source urls (#38218)
# v0.21.0 (2023-11-11)
`v0.21.0` is a major feature release.

View File

@@ -32,7 +32,7 @@
Spack is a multi-platform package manager that builds and installs
multiple versions and configurations of software. It works on Linux,
macOS, Windows, and many supercomputers. Spack is non-destructive: installing a
macOS, and many supercomputers. Spack is non-destructive: installing a
new version of a package does not break existing installations, so many
configurations of the same package can coexist.

View File

@@ -22,4 +22,4 @@
#
# This is compatible across platforms.
#
exec spack python "$@"
exec /usr/bin/env spack python "$@"

View File

@@ -188,27 +188,25 @@ if NOT "%_sp_args%"=="%_sp_args:--help=%" (
goto :end_switch
:case_load
if NOT defined _sp_args (
exit /B 0
)
:: If args contain --bat, or -h/--help: just execute.
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args:--list=%" (
goto :default_case
:: If args contain --sh, --csh, or -h/--help: just execute.
if defined _sp_args (
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
goto :default_case
)
)
for /f "tokens=* USEBACKQ" %%I in (
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`
) do %%I
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
goto :end_switch
:case_unload
goto :case_load
:default_case
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
goto :end_switch

View File

@@ -144,5 +144,3 @@ switch($SpackSubCommand)
"unload" {Invoke-SpackLoad}
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
}
exit $LASTEXITCODE

View File

@@ -0,0 +1,16 @@
# -------------------------------------------------------------------------
# This is the default configuration for Spack's module file generation.
#
# Settings here are versioned with Spack and are intended to provide
# sensible defaults out of the box. Spack maintainers should edit this
# file to keep it current.
#
# Users can override these settings by editing the following files.
#
# Per-spack-instance settings (overrides defaults):
# $SPACK_ROOT/etc/spack/modules.yaml
#
# Per-user settings (overrides default and site settings):
# ~/.spack/modules.yaml
# -------------------------------------------------------------------------
modules: {}

View File

@@ -0,0 +1,19 @@
# -------------------------------------------------------------------------
# This file controls default concretization preferences for Spack.
#
# Settings here are versioned with Spack and are intended to provide
# sensible defaults out of the box. Spack maintainers should edit this
# file to keep it current.
#
# Users can override these settings by editing the following files.
#
# Per-spack-instance settings (overrides defaults):
# $SPACK_ROOT/etc/spack/packages.yaml
#
# Per-user settings (overrides default and site settings):
# ~/.spack/packages.yaml
# -------------------------------------------------------------------------
packages:
all:
providers:
iconv: [glibc, musl, libiconv]

View File

@@ -0,0 +1,19 @@
# -------------------------------------------------------------------------
# This file controls default concretization preferences for Spack.
#
# Settings here are versioned with Spack and are intended to provide
# sensible defaults out of the box. Spack maintainers should edit this
# file to keep it current.
#
# Users can override these settings by editing the following files.
#
# Per-spack-instance settings (overrides defaults):
# $SPACK_ROOT/etc/spack/packages.yaml
#
# Per-user settings (overrides default and site settings):
# ~/.spack/packages.yaml
# -------------------------------------------------------------------------
packages:
all:
providers:
iconv: [glibc, musl, libiconv]

View File

@@ -20,7 +20,6 @@ packages:
awk: [gawk]
armci: [armcimpi]
blas: [openblas, amdblis]
cxx: [gcc]
D: [ldc]
daal: [intel-oneapi-daal]
elf: [elfutils]
@@ -39,9 +38,10 @@ packages:
lapack: [openblas, amdlibflame]
libc: [glibc, musl]
libgfortran: [ gcc-runtime ]
libglx: [mesa+glx]
libglx: [mesa+glx, mesa18+glx]
libifcore: [ intel-oneapi-runtime ]
libllvm: [llvm]
libosmesa: [mesa+osmesa, mesa18+osmesa]
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
luajit: [lua-luajit-openresty, lua-luajit]
mariadb-client: [mariadb-c-client, mariadb]
@@ -62,7 +62,6 @@ packages:
tbb: [intel-tbb]
unwind: [libunwind]
uuid: [util-linux-uuid, libuuid]
wasi-sdk: [wasi-sdk-prebuilt]
xxd: [xxd-standalone, vim]
yacc: [bison, byacc]
ziglang: [zig]

View File

@@ -865,7 +865,7 @@ There are several different ways to use Spack packages once you have
installed them. As you've seen, spack packages are installed into long
paths with hashes, and you need a way to get them into your path. The
easiest way is to use :ref:`spack load <cmd-spack-load>`, which is
described in this section.
described in the next section.
Some more advanced ways to use Spack packages include:
@@ -959,86 +959,7 @@ use ``spack find --loaded``.
You can also use ``spack load --list`` to get the same output, but it
does not have the full set of query options that ``spack find`` offers.
We'll learn more about Spack's spec syntax in :ref:`a later section <sec-specs>`.
.. _extensions:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Python packages and virtual environments
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Spack can install a large number of Python packages. Their names are
typically prefixed with ``py-``. Installing and using them is no
different from any other package:
.. code-block:: console
$ spack install py-numpy
$ spack load py-numpy
$ python3
>>> import numpy
The ``spack load`` command sets the ``PATH`` variable so that the right Python
executable is used, and makes sure that ``numpy`` and its dependencies can be
located in the ``PYTHONPATH``.
Spack is different from other Python package managers in that it installs
every package into its *own* prefix. This is in contrast to ``pip``, which
installs all packages into the same prefix, be it in a virtual environment
or not.
For many users, **virtual environments** are more convenient than repeated
``spack load`` commands, particularly when working with multiple Python
packages. Fortunately Spack supports environments itself, which together
with a view are no different from Python virtual environments.
The recommended way of working with Python extensions such as ``py-numpy``
is through :ref:`Environments <environments>`. The following example creates
a Spack environment with ``numpy`` in the current working directory. It also
puts a filesystem view in ``./view``, which is a more traditional combined
prefix for all packages in the environment.
.. code-block:: console
$ spack env create --with-view view --dir .
$ spack -e . add py-numpy
$ spack -e . concretize
$ spack -e . install
Now you can activate the environment and start using the packages:
.. code-block:: console
$ spack env activate .
$ python3
>>> import numpy
The environment view is also a virtual environment, which is useful if you are
sharing the environment with others who are unfamiliar with Spack. They can
either use the Python executable directly:
.. code-block:: console
$ ./view/bin/python3
>>> import numpy
or use the activation script:
.. code-block:: console
$ source ./view/bin/activate
$ python3
>>> import numpy
In general, there should not be much difference between ``spack env activate``
and using the virtual environment. The main advantage of ``spack env activate``
is that it knows about more packages than just Python packages, and it may set
additional runtime variables that are not covered by the virtual environment
activation script.
See :ref:`environments` for a more in-depth description of Spack
environments and customizations to views.
We'll learn more about Spack's spec syntax in the next section.
.. _sec-specs:
@@ -1433,12 +1354,22 @@ the reserved keywords ``platform``, ``os`` and ``target``:
$ spack install libelf os=ubuntu18.04
$ spack install libelf target=broadwell
or together by using the reserved keyword ``arch``:
.. code-block:: console
$ spack install libelf arch=cray-CNL10-haswell
Normally users don't have to bother specifying the architecture if they
are installing software for their current host, as in that case the
values will be detected automatically. If you need fine-grained control
over which packages use which targets (or over *all* packages' default
target), see :ref:`package-preferences`.
.. admonition:: Cray machines
The situation is a little bit different for Cray machines and a detailed
explanation on how the architecture can be set on them can be found at :ref:`cray-support`
.. _support-for-microarchitectures:
@@ -1774,6 +1705,165 @@ check only local packages (as opposed to those used transparently from
``upstream`` spack instances) and the ``-j,--json`` option to output
machine-readable json data for any errors.
.. _extensions:
---------------------------
Extensions & Python support
---------------------------
Spack's installation model assumes that each package will live in its
own install prefix. However, certain packages are typically installed
*within* the directory hierarchy of other packages. For example,
`Python <https://www.python.org>`_ packages are typically installed in the
``$prefix/lib/python-2.7/site-packages`` directory.
In Spack, installation prefixes are immutable, so this type of installation
is not directly supported. However, it is possible to create views that
allow you to merge install prefixes of multiple packages into a single new prefix.
Views are a convenient way to get a more traditional filesystem structure.
Using *extensions*, you can ensure that Python packages always share the
same prefix in the view as Python itself. Suppose you have
Python installed like so:
.. code-block:: console
$ spack find python
==> 1 installed packages.
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
python@2.7.8
.. _cmd-spack-extensions:
^^^^^^^^^^^^^^^^^^^^
``spack extensions``
^^^^^^^^^^^^^^^^^^^^
You can find extensions for your Python installation like this:
.. code-block:: console
$ spack extensions python
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
py-biopython py-mako py-pmw py-rpy2 py-sympy
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
py-dateutil py-mpi4py py-pygments py-scikit-learn
py-epydoc py-mx py-pylint py-scipy
py-gnuplot py-nose py-pyparsing py-setuptools
py-h5py py-numpy py-pyqt py-shiboken
==> 12 installed:
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
The extensions are a subset of what's returned by ``spack list``, and
they are packages like any other. They are installed into their own
prefixes, and you can see this with ``spack find --paths``:
.. code-block:: console
$ spack find --paths py-numpy
==> 1 installed packages.
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-numpy@1.9.1 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/py-numpy@1.9.1-66733244
However, even though this package is installed, you cannot use it
directly when you run ``python``:
.. code-block:: console
$ spack load python
$ python
Python 2.7.8 (default, Feb 17 2015, 01:35:25)
[GCC 4.4.7 20120313 (Red Hat 4.4.7-11)] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import numpy
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ImportError: No module named numpy
>>>
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Using Extensions in Environments
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The recommended way of working with extensions such as ``py-numpy``
above is through :ref:`Environments <environments>`. For example,
the following creates an environment in the current working directory
with a filesystem view in the ``./view`` directory:
.. code-block:: console
$ spack env create --with-view view --dir .
$ spack -e . add py-numpy
$ spack -e . concretize
$ spack -e . install
We recommend environments for two reasons. Firstly, environments
can be activated (requires :ref:`shell-support`):
.. code-block:: console
$ spack env activate .
which sets all the right environment variables such as ``PATH`` and
``PYTHONPATH``. This ensures that
.. code-block:: console
$ python
>>> import numpy
works. Secondly, even without shell support, the view ensures
that Python can locate its extensions:
.. code-block:: console
$ ./view/bin/python
>>> import numpy
See :ref:`environments` for a more in-depth description of Spack
environments and customizations to views.
^^^^^^^^^^^^^^^^^^^^
Using ``spack load``
^^^^^^^^^^^^^^^^^^^^
A more traditional way of using Spack and extensions is ``spack load``
(requires :ref:`shell-support`). This will add the extension to ``PYTHONPATH``
in your current shell, and Python itself will be available in the ``PATH``:
.. code-block:: console
$ spack load py-numpy
$ python
>>> import numpy
The loaded packages can be checked using ``spack find --loaded``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Loading Extensions via Modules
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Apart from ``spack env activate`` and ``spack load``, you can load numpy
through your environment modules (using ``environment-modules`` or
``lmod``). This will also add the extension to the ``PYTHONPATH`` in
your current shell.
.. code-block:: console
$ module load <name of numpy module>
If you do not know the name of the specific numpy module you wish to
load, you can use the ``spack module tcl|lmod loads`` command to get
the name of the module from the Spack spec.
-----------------------
Filesystem requirements
-----------------------

View File

@@ -147,15 +147,6 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
def autoreconf(self, spec, prefix):
which("bash")("autogen.sh")
If the ``package.py`` has build instructions in a separate
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
.. code-block:: python
class AutotoolsBuilder(AutotoolsBuilder):
def autoreconf(self, pkg, spec, prefix):
which("bash")("autogen.sh")
"""""""""""""""""""""""""""""""""""""""
patching configure or Makefile.in files
"""""""""""""""""""""""""""""""""""""""

View File

@@ -25,7 +25,7 @@ use Spack to build packages with the tools.
The Spack Python class ``IntelOneapiPackage`` is a base class that is
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
``IntelOneapiTbb`` and other classes to implement the oneAPI
packages. Search for ``oneAPI`` at `packages.spack.io <https://packages.spack.io>`_ for the full
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
list of available oneAPI packages, or use::
spack list -d oneAPI

View File

@@ -718,45 +718,23 @@ command-line tool, or C/C++/Fortran program with optional Python
modules? The former should be prepended with ``py-``, while the
latter should not.
""""""""""""""""""""""""""""""
``extends`` vs. ``depends_on``
""""""""""""""""""""""""""""""
""""""""""""""""""""""
extends vs. depends_on
""""""""""""""""""""""
This is very similar to the naming dilemma above, with a slight twist.
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
that the extension and extendee share the same prefix in views.
This allows the user to import a Python module without
having to add that module to ``PYTHONPATH``.
Additionally, ``extends("python")`` adds a dependency on the package
``python-venv``. This improves isolation from the system, whether
it's during the build or at runtime: user and system site packages
cannot accidentally be used by any package that ``extends("python")``.
As a rule of thumb: if a package does not install any Python modules
of its own, and merely puts a Python script in the ``bin`` directory,
then there is no need for ``extends``. If the package installs modules
in the ``site-packages`` directory, it requires ``extends``.
"""""""""""""""""""""""""""""""""""""
Executing ``python`` during the build
"""""""""""""""""""""""""""""""""""""
Whenever you need to execute a Python command or pass the path of the
Python interpreter to the build system, it is best to use the global
variable ``python`` directly. For example:
.. code-block:: python
@run_before("install")
def recythonize(self):
python("setup.py", "clean") # use the `python` global
As mentioned in the previous section, ``extends("python")`` adds an
automatic dependency on ``python-venv``, which is a virtual environment
that guarantees build isolation. The ``python`` global always refers to
the correct Python interpreter, whether the package uses ``extends("python")``
or ``depends_on("python")``.
When deciding between ``extends`` and ``depends_on``, the best rule of
thumb is to check the installation prefix. If Python libraries are
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
should use ``extends``. If Python libraries are installed elsewhere
or the only files that get installed reside in ``<prefix>/bin``, then
don't use ``extends``.
^^^^^^^^^^^^^^^^^^^^^
Alternatives to Spack

View File

@@ -11,8 +11,7 @@ Chaining Spack Installations
You can point your Spack installation to another installation to use any
packages that are installed there. To register the other Spack instance,
you can add it as an entry to ``upstreams.yaml`` at any of the
:ref:`configuration-scopes`:
you can add it as an entry to ``upstreams.yaml``:
.. code-block:: yaml
@@ -23,8 +22,7 @@ you can add it as an entry to ``upstreams.yaml`` at any of the
install_tree: /path/to/another/spack/opt/spack
``install_tree`` must point to the ``opt/spack`` directory inside of the
Spack base directory, or the location of the ``install_tree`` defined
in :ref:`config.yaml <config-yaml>`.
Spack base directory.
Once the upstream Spack instance has been added, ``spack find`` will
automatically check the upstream instance when querying installed packages,

View File

@@ -150,7 +150,7 @@ this can expose you to attacks. Use at your own risk.
--------------------
Path to custom certificats for SSL verification. The value can be a
filesytem path, or an environment variable that expands to an absolute file path.
filesytem path, or an environment variable that expands to a file path.
The default value is set to the environment variable ``SSL_CERT_FILE``
to use the same syntax used by many other applications that automatically
detect custom certificates.
@@ -160,9 +160,6 @@ in the subprocess calling ``curl``.
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
will work.
In all cases the expanded path must be absolute for Spack to use the certificates.
Certificates relative to an environment can be created by prepending the path variable
with the Spack configuration variable``$env``.
--------------------
``checksum``

View File

@@ -194,6 +194,9 @@ The OS that are currently supported are summarized in the table below:
* - Operating System
- Base Image
- Spack Image
* - Ubuntu 18.04
- ``ubuntu:18.04``
- ``spack/ubuntu-bionic``
* - Ubuntu 20.04
- ``ubuntu:20.04``
- ``spack/ubuntu-focal``
@@ -203,9 +206,12 @@ The OS that are currently supported are summarized in the table below:
* - Ubuntu 24.04
- ``ubuntu:24.04``
- ``spack/ubuntu-noble``
* - CentOS Stream9
- ``quay.io/centos/centos:stream9``
- ``spack/centos-stream9``
* - CentOS 7
- ``centos:7``
- ``spack/centos7``
* - CentOS Stream
- ``quay.io/centos/centos:stream``
- ``spack/centos-stream``
* - openSUSE Leap
- ``opensuse/leap``
- ``spack/leap15``
@@ -224,6 +230,12 @@ The OS that are currently supported are summarized in the table below:
* - Rocky Linux 9
- ``rockylinux:9``
- ``spack/rockylinux9``
* - Fedora Linux 37
- ``fedora:37``
- ``spack/fedora37``
* - Fedora Linux 38
- ``fedora:38``
- ``spack/fedora38``
* - Fedora Linux 39
- ``fedora:39``
- ``spack/fedora39``

View File

@@ -142,8 +142,12 @@ user's prompt to begin with the environment name in brackets.
$ spack env activate -p myenv
[myenv] $ ...
The ``activate`` command can also be used to create a new environment if it does not already
exist.
The ``activate`` command can also be used to create a new environment, if it is
not already defined, by adding the ``--create`` flag. Managed and anonymous
environments, anonymous environments are explained in the next section,
can both be created using the same flags that `spack env create` accepts.
If an environment already exists then spack will simply activate it and ignore the
create specific flags.
.. code-block:: console
@@ -172,36 +176,21 @@ environment will remove the view from the user environment.
Anonymous Environments
^^^^^^^^^^^^^^^^^^^^^^
Apart from managed environments, Spack also supports anonymous environments.
Anonymous environments can be placed in any directory of choice.
.. note::
When uninstalling packages, Spack asks the user to confirm the removal of packages
that are still used in a managed environment. This is not the case for anonymous
environments.
To create an anonymous environment, use one of the following commands:
Any directory can be treated as an environment if it contains a file
``spack.yaml``. To load an anonymous environment, use:
.. code-block:: console
$ spack env create --dir my_env
$ spack env create ./my_env
$ spack env activate -d /path/to/directory
As a shorthand, you can also create an anonymous environment upon activation if it does not
already exist:
Anonymous specs can be created in place using the command:
.. code-block:: console
$ spack env activate --create ./my_env
For convenience, Spack can also place an anonymous environment in a temporary directory for you:
.. code-block:: console
$ spack env activate --temp
$ spack env create -d .
In this case Spack simply creates a ``spack.yaml`` file in the requested
directory.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Environment Sensitive Commands
@@ -460,125 +449,6 @@ Sourcing that file in Bash will make the environment available to the
user; and can be included in ``.bashrc`` files, etc. The ``loads``
file may also be copied out of the environment, renamed, etc.
.. _environment_include_concrete:
------------------------------
Included Concrete Environments
------------------------------
Spack environments can create an environment based off of information in already
established environments. You can think of it as a combination of existing
environments. It will gather information from the existing environment's
``spack.lock`` and use that during the creation of this included concrete
environment. When an included concrete environment is created it will generate
a ``spack.lock`` file for the newly created environment.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Creating included environments
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
To create a combined concrete environment, you must have at least one existing
concrete environment. You will use the command ``spack env create`` with the
argument ``--include-concrete`` followed by the name or path of the environment
you'd like to include. Here is an example of how to create a combined environment
from the command line.
.. code-block:: console
$ spack env create myenv
$ spack -e myenv add python
$ spack -e myenv concretize
$ spack env create --include-concrete myenv included_env
You can also include an environment directly in the ``spack.yaml`` file. It
involves adding the ``include_concrete`` heading in the yaml followed by the
absolute path to the independent environments.
.. code-block:: yaml
spack:
specs: []
concretizer:
unify: true
include_concrete:
- /absolute/path/to/environment1
- /absolute/path/to/environment2
Once the ``spack.yaml`` has been updated you must concretize the environment to
get the concrete specs from the included environments.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Updating an included environment
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If changes were made to the base environment and you want that reflected in the
included environment you will need to reconcretize both the base environment and the
included environment for the change to be implemented. For example:
.. code-block:: console
$ spack env create myenv
$ spack -e myenv add python
$ spack -e myenv concretize
$ spack env create --include-concrete myenv included_env
$ spack -e myenv find
==> In environment myenv
==> Root specs
python
==> 0 installed packages
$ spack -e included_env find
==> In environment included_env
==> No root specs
==> Included specs
python
==> 0 installed packages
Here we see that ``included_env`` has access to the python package through
the ``myenv`` environment. But if we were to add another spec to ``myenv``,
``included_env`` will not be able to access the new information.
.. code-block:: console
$ spack -e myenv add perl
$ spack -e myenv concretize
$ spack -e myenv find
==> In environment myenv
==> Root specs
perl python
==> 0 installed packages
$ spack -e included_env find
==> In environment included_env
==> No root specs
==> Included specs
python
==> 0 installed packages
It isn't until you run the ``spack concretize`` command that the combined
environment will get the updated information from the reconcretized base environmennt.
.. code-block:: console
$ spack -e included_env concretize
$ spack -e included_env find
==> In environment included_env
==> No root specs
==> Included specs
perl python
==> 0 installed packages
.. _environment-configuration:
------------------------
@@ -930,85 +800,32 @@ For example, the following environment has three root packages:
This allows for a much-needed reduction in redundancy between packages
and constraints.
----------------
Filesystem Views
----------------
-----------------
Environment Views
-----------------
Spack Environments can have an associated filesystem view, which is a directory
with a more traditional structure ``<view>/bin``, ``<view>/lib``, ``<view>/include``
in which all files of the installed packages are linked.
By default a view is created for each environment, thanks to the ``view: true``
option in the ``spack.yaml`` manifest file:
.. code-block:: yaml
spack:
specs: [perl, python]
view: true
The view is created in a hidden directory ``.spack-env/view`` relative to the environment.
If you've used ``spack env activate``, you may have already interacted with this view. Spack
prepends its ``<view>/bin`` dir to ``PATH`` when the environment is activated, so that
you can directly run executables from all installed packages in the environment.
Views are highly customizable: you can control where they are put, modify their structure,
include and exclude specs, change how files are linked, and you can even generate multiple
views for a single environment.
Spack Environments can define filesystem views, which provide a direct access point
for software similar to the directory hierarchy that might exist under ``/usr/local``.
Filesystem views are updated every time the environment is written out to the lock
file ``spack.lock``, so the concrete environment and the view are always compatible.
The files of the view's installed packages are brought into the view by symbolic or
hard links, referencing the original Spack installation, or by copy.
.. _configuring_environment_views:
^^^^^^^^^^^^^^^^^^^^^^^^^^
Minimal view configuration
^^^^^^^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Configuration in ``spack.yaml``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The minimal configuration
.. code-block:: yaml
spack:
# ...
view: true
lets Spack generate a single view with default settings under the
``.spack-env/view`` directory of the environment.
Another short way to configure a view is to specify just where to put it:
.. code-block:: yaml
spack:
# ...
view: /path/to/view
Views can also be disabled by setting ``view: false``.
^^^^^^^^^^^^^^^^^^^^^^^^^^^
Advanced view configuration
^^^^^^^^^^^^^^^^^^^^^^^^^^^
One or more **view descriptors** can be defined under ``view``, keyed by a name.
The example from the previous section with ``view: /path/to/view`` is equivalent
to defining a view descriptor named ``default`` with a ``root`` attribute:
.. code-block:: yaml
spack:
# ...
view:
default: # name of the view
root: /path/to/view # view descriptor attribute
The ``default`` view descriptor name is special: when you ``spack env activate`` your
environment, this view will be used to update (among other things) your ``PATH``
variable.
View descriptors must contain the root of the view, and optionally projections,
``select`` and ``exclude`` lists and link information via ``link`` and
The Spack Environment manifest file has a top-level keyword
``view``. Each entry under that heading is a **view descriptor**, headed
by a name. Any number of views may be defined under the ``view`` heading.
The view descriptor contains the root of the view, and
optionally the projections for the view, ``select`` and
``exclude`` lists for the view and link information via ``link`` and
``link_type``.
As a more advanced example, in the following manifest
For example, in the following manifest
file snippet we define a view named ``mpis``, rooted at
``/path/to/view`` in which all projections use the package name,
version, and compiler name to determine the path for a given
@@ -1053,10 +870,59 @@ of ``hardlink`` or ``copy``.
when the environment is not activated, and linked libraries will be located
*outside* of the view thanks to rpaths.
There are two shorthands for environments with a single view. If the
environment at ``/path/to/env`` has a single view, with a root at
``/path/to/env/.spack-env/view``, with default selection and exclusion
and the default projection, we can put ``view: True`` in the
environment manifest. Similarly, if the environment has a view with a
different root, but default selection, exclusion, and projections, the
manifest can say ``view: /path/to/view``. These views are
automatically named ``default``, so that
.. code-block:: yaml
spack:
# ...
view: True
is equivalent to
.. code-block:: yaml
spack:
# ...
view:
default:
root: .spack-env/view
and
.. code-block:: yaml
spack:
# ...
view: /path/to/view
is equivalent to
.. code-block:: yaml
spack:
# ...
view:
default:
root: /path/to/view
By default, Spack environments are configured with ``view: True`` in
the manifest. Environments can be configured without views using
``view: False``. For backwards compatibility reasons, environments
with no ``view`` key are treated the same as ``view: True``.
From the command line, the ``spack env create`` command takes an
argument ``--with-view [PATH]`` that sets the path for a single, default
view. If no path is specified, the default path is used (``view:
true``). The argument ``--without-view`` can be used to create an
True``). The argument ``--without-view`` can be used to create an
environment without any view configured.
The ``spack env view`` command can be used to change the manage views
@@ -1122,18 +988,11 @@ the projection under ``all`` before reaching those entries.
Activating environment views
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The ``spack env activate <env>`` has two effects:
1. It activates the environment so that further Spack commands such
as ``spack install`` will run in the context of the environment.
2. It activates the view so that environment variables such as
``PATH`` are updated to include the view.
Without further arguments, the ``default`` view of the environment is
activated. If a view with a different name has to be activated,
``spack env activate --with-view <name> <env>`` can be
used instead. You can also activate the environment without modifying
further environment variables using ``--without-view``.
The ``spack env activate`` command will put the default view for the
environment into the user's path, in addition to activating the
environment for Spack commands. The arguments ``-v,--with-view`` and
``-V,--without-view`` can be used to tune this behavior. The default
behavior is to activate with the environment view if there is one.
The environment variables affected by the ``spack env activate``
command and the paths that are used to update them are determined by
@@ -1156,8 +1015,8 @@ relevant variable if the path exists. For this reason, it is not
recommended to use non-default projections with the default view of an
environment.
The ``spack env deactivate`` command will remove the active view of
the Spack environment from the user's environment variables.
The ``spack env deactivate`` command will remove the default view of
the environment from the user's path.
.. _env-generate-depfile:
@@ -1174,7 +1033,7 @@ other targets to depend on the environment installation.
A typical workflow is as follows:
.. code-block:: console
.. code:: console
spack env create -d .
spack -e . add perl
@@ -1267,7 +1126,7 @@ its dependencies. This can be useful when certain flags should only apply to
dependencies. Below we show a use case where a spec is installed with verbose
output (``spack install --verbose``) while its dependencies are installed silently:
.. code-block:: console
.. code:: console
$ spack env depfile -o Makefile
@@ -1289,7 +1148,7 @@ This can be accomplished through the generated ``[<prefix>/]SPACK_PACKAGE_IDS``
variable. Assuming we have an active and concrete environment, we generate the
associated ``Makefile`` with a prefix ``example``:
.. code-block:: console
.. code:: console
$ spack env depfile -o env.mk --make-prefix example
@@ -1316,7 +1175,7 @@ index once every package is pushed. Note how this target uses the generated
example/push/%: example/install/%
@mkdir -p $(dir $@)
$(info About to push $(SPEC) to a buildcache)
$(SPACK) -e . buildcache push --only=package $(BUILDCACHE_DIR) /$(HASH)
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
@touch $@
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))

View File

@@ -478,13 +478,6 @@ prefix, you can add them to the ``extra_attributes`` field. Similarly,
all other fields from the compilers config can be added to the
``extra_attributes`` field for an external representing a compiler.
Note that the format for the ``paths`` field in the
``extra_attributes`` section is different than in the ``compilers``
config. For compilers configured as external packages, the section is
named ``compilers`` and the dictionary maps language names (``c``,
``cxx``, ``fortran``) to paths, rather than using the names ``cc``,
``fc``, and ``f77``.
.. code-block:: yaml
packages:
@@ -500,10 +493,11 @@ named ``compilers`` and the dictionary maps language names (``c``,
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
prefix: /usr
extra_attributes:
compilers:
c: /usr/bin/clang-with-suffix
paths:
cc: /usr/bin/clang-with-suffix
cxx: /usr/bin/clang++-with-extra-info
fortran: /usr/bin/gfortran
fc: /usr/bin/gfortran
f77: /usr/bin/gfortran
extra_rpaths:
- /usr/lib/llvm/
@@ -1364,6 +1358,187 @@ This will write the private key to the file `dinosaur.priv`.
or for help on an issue or the Spack slack.
.. _cray-support:
-------------
Spack on Cray
-------------
Spack differs slightly when used on a Cray system. The architecture spec
can differentiate between the front-end and back-end processor and operating system.
For example, on Edison at NERSC, the back-end target processor
is "Ivy Bridge", so you can specify to use the back-end this way:
.. code-block:: console
$ spack install zlib target=ivybridge
You can also use the operating system to build against the back-end:
.. code-block:: console
$ spack install zlib os=CNL10
Notice that the name includes both the operating system name and the major
version number concatenated together.
Alternatively, if you want to build something for the front-end,
you can specify the front-end target processor. The processor for a login node
on Edison is "Sandy bridge" so we specify on the command line like so:
.. code-block:: console
$ spack install zlib target=sandybridge
And the front-end operating system is:
.. code-block:: console
$ spack install zlib os=SuSE11
^^^^^^^^^^^^^^^^^^^^^^^
Cray compiler detection
^^^^^^^^^^^^^^^^^^^^^^^
Spack can detect compilers using two methods. For the front-end, we treat
everything the same. The difference lies in back-end compiler detection.
Back-end compiler detection is made via the Tcl module avail command.
Once it detects the compiler it writes the appropriate PrgEnv and compiler
module name to compilers.yaml and sets the paths to each compiler with Cray\'s
compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load
the correct PrgEnv and compiler module and will call appropriate wrapper.
The compilers.yaml config file will also differ. There is a
modules section that is filled with the compiler's Programming Environment
and module name. On other systems, this field is empty []:
.. code-block:: yaml
- compiler:
modules:
- PrgEnv-intel
- intel/15.0.109
As mentioned earlier, the compiler paths will look different on a Cray system.
Since most compilers are invoked using cc, CC and ftn, the paths for each
compiler are replaced with their respective Cray compiler wrapper names:
.. code-block:: yaml
paths:
cc: cc
cxx: CC
f77: ftn
fc: ftn
As opposed to an explicit path to the compiler executable. This allows Spack
to call the Cray compiler wrappers during build time.
For more on compiler configuration, check out :ref:`compiler-config`.
Spack sets the default Cray link type to dynamic, to better match other
other platforms. Individual packages can enable static linking (which is the
default outside of Spack on cray systems) using the ``-static`` flag.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Setting defaults and using Cray modules
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If you want to use default compilers for each PrgEnv and also be able
to load cray external modules, you will need to set up a ``packages.yaml``.
Here's an example of an external configuration for cray modules:
.. code-block:: yaml
packages:
mpich:
externals:
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10"
modules:
- cray-mpich
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10"
modules:
- cray-mpich
all:
providers:
mpi: [mpich]
This tells Spack that for whatever package that depends on mpi, load the
cray-mpich module into the environment. You can then be able to use whatever
environment variables, libraries, etc, that are brought into the environment
via module load.
.. note::
For Cray-provided packages, it is best to use ``modules:`` instead of ``prefix:``
in ``packages.yaml``, because the Cray Programming Environment heavily relies on
modules (e.g., loading the ``cray-mpich`` module adds MPI libraries to the
compiler wrapper link line).
You can set the default compiler that Spack can use for each compiler type.
If you want to use the Cray defaults, then set them under ``all:`` in packages.yaml.
In the compiler field, set the compiler specs in your order of preference.
Whenever you build with that compiler type, Spack will concretize to that version.
Here is an example of a full packages.yaml used at NERSC
.. code-block:: yaml
packages:
mpich:
externals:
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
modules:
- cray-mpich
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge"
modules:
- cray-mpich
buildable: False
netcdf:
externals:
- spec: "netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
modules:
- cray-netcdf
- spec: "netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
modules:
- cray-netcdf
buildable: False
hdf5:
externals:
- spec: "hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
modules:
- cray-hdf5
- spec: "hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
modules:
- cray-hdf5
buildable: False
all:
compiler: [gcc@5.2.0, intel@16.0.0.109]
providers:
mpi: [mpich]
Here we tell spack that whenever we want to build with gcc use version 5.2.0 or
if we want to build with intel compilers, use version 16.0.0.109. We add a spec
for each compiler type for each cray modules. This ensures that for each
compiler on our system we can use that external module.
For more on external packages check out the section :ref:`sec-external-packages`.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Using Linux containers on Cray machines
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Spack uses environment variables particular to the Cray programming
environment to determine which systems are Cray platforms. These
environment variables may be propagated into containers that are not
using the Cray programming environment.
To ensure that Spack does not autodetect the Cray programming
environment, unset the environment variable ``MODULEPATH``. This
will cause Spack to treat a linux container on a Cray system as a base
linux distro.
.. _windows_support:
----------------

View File

@@ -2344,27 +2344,6 @@ you set ``parallel`` to ``False`` at the package level, then each call
to ``make()`` will be sequential by default, but packagers can call
``make(parallel=True)`` to override it.
Note that the ``--jobs`` option works out of the box for all standard
build systems. If you are using a non-standard build system instead, you
can use the variable ``make_jobs`` to extract the number of jobs specified
by the ``--jobs`` option:
.. code-block:: python
:emphasize-lines: 7, 11
:linenos:
class Xios(Package):
...
def install(self, spec, prefix):
...
options = [
...
'--jobs', str(make_jobs),
]
...
make_xios = Executable("./make_xios")
make_xios(*options)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Install-level build parallelism
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -5194,6 +5173,12 @@ installed executable. The check is implemented as follows:
reframe = Executable(self.prefix.bin.reframe)
reframe("-l")
.. warning::
The API for adding tests is not yet considered stable and may change
in future releases.
""""""""""""""""""""""""""""""""
Checking build-time test results
""""""""""""""""""""""""""""""""
@@ -5231,42 +5216,38 @@ be left in the build stage directory as illustrated below:
Stand-alone tests
^^^^^^^^^^^^^^^^^
While build-time tests are integrated with the installation process, stand-alone
While build-time tests are integrated with the build process, stand-alone
tests are expected to run days, weeks, even months after the software is
installed. The goal is to provide a mechanism for gaining confidence that
packages work as installed **and** *continue* to work as the underlying
software evolves. Packages can add and inherit stand-alone tests. The
``spack test`` command is used for stand-alone testing.
`spack test`` command is used to manage stand-alone testing.
.. admonition:: Stand-alone test methods should complete within a few minutes.
.. note::
Execution speed is important since these tests are intended to quickly
assess whether installed specs work on the system. Spack cannot spare
resources for more extensive testing of packages included in CI stacks.
assess whether installed specs work on the system. Consequently, they
should run relatively quickly -- as in on the order of at most a few
minutes -- while ideally executing all, or at least key aspects of the
installed software.
Consequently, stand-alone tests should run relatively quickly -- as in
on the order of at most a few minutes -- while testing at least key aspects
of the installed software. Save more extensive testing for other tools.
.. note::
Failing stand-alone tests indicate problems with the installation and,
therefore, there is no reason to proceed with more resource-intensive
tests until those have been investigated.
Passing stand-alone tests indicate that more thorough testing, such
as running extensive unit or regression tests, or tests that run at
scale can proceed without wasting resources on a problematic installation.
Tests are defined in the package using methods with names beginning ``test_``.
This allows Spack to support multiple independent checks, or parts. Files
needed for testing, such as source, data, and expected outputs, may be saved
from the build and or stored with the package in the repository. Regardless
of origin, these files are automatically copied to the spec's test stage
directory prior to execution of the test method(s). Spack also provides helper
functions to facilitate common processing.
.. tip::
**The status of stand-alone tests can be used to guide follow-up testing efforts.**
Passing stand-alone tests justify performing more thorough testing, such
as running extensive unit or regression tests or tests that run at scale,
when available. These tests are outside of the scope of Spack packaging.
Failing stand-alone tests indicate problems with the installation and,
therefore, no reason to proceed with more resource-intensive tests until
the failures have been investigated.
directory prior to execution of the test method(s). Spack also provides some
helper functions to facilitate processing.
.. _configure-test-stage:
@@ -5274,26 +5255,30 @@ functions to facilitate common processing.
Configuring the test stage directory
""""""""""""""""""""""""""""""""""""
Stand-alone tests utilize a test stage directory to build, run, and track
tests in the same way Spack uses a build stage directory to install software.
The default test stage root directory, ``$HOME/.spack/test``, is defined in
:ref:`config.yaml <config-yaml>`. This location is customizable by adding or
changing the ``test_stage`` path such that:
Stand-alone tests utilize a test stage directory for building, running,
and tracking results in the same way Spack uses a build stage directory.
The default test stage root directory, ``~/.spack/test``, is defined in
:ref:`etc/spack/defaults/config.yaml <config-yaml>`. This location is
customizable by adding or changing the ``test_stage`` path in the high-level
``config`` of the appropriate ``config.yaml`` file such that:
.. code-block:: yaml
config:
test_stage: /path/to/test/stage
Packages can use the ``self.test_suite.stage`` property to access the path.
Packages can use the ``self.test_suite.stage`` property to access this setting.
Other package properties that provide access to spec-specific subdirectories
and files are described in :ref:`accessing staged files <accessing-files>`.
.. admonition:: Each spec being tested has its own test stage directory.
.. note::
The ``config:test_stage`` option is the path to the root of a
**test suite**'s stage directories.
The test stage path is the root directory for the **entire suite**.
In other words, it is the root directory for **all specs** being
tested by the ``spack test run`` command. Each spec gets its own
stage subdirectory. Use ``self.test_suite.test_dir_for_spec(self.spec)``
to access the spec-specific test stage directory.
Other package properties that provide paths to spec-specific subdirectories
and files are described in :ref:`accessing-files`.
.. _adding-standalone-tests:
@@ -5306,144 +5291,61 @@ Test recipes are defined in the package using methods with names beginning
Each method has access to the information Spack tracks on the package, such
as options, compilers, and dependencies, supporting the customization of tests
to the build. Standard python ``assert`` statements and other error reporting
mechanisms can be used. These exceptions are automatically caught and reported
mechanisms are available. Such exceptions are automatically caught and reported
as test failures.
Each test method is an *implicit test part* named by the method. Its purpose
is the method's docstring. Providing a meaningful purpose for the test gives
context that can aid debugging. Spack outputs both the name and purpose at the
start of test execution so it's also important that the docstring/purpose be
brief.
.. tip::
We recommend naming test methods so it is clear *what* is being tested.
For example, if a test method is building and or running an executable
called ``example``, then call the method ``test_example``. This, together
with a similarly meaningful test purpose, will aid test comprehension,
debugging, and maintainability.
Stand-alone tests run in an environment that provides access to information
on the installed software, such as build options, dependencies, and compilers.
Build options and dependencies are accessed using the same spec checks used
by build recipes. Examples of checking :ref:`variant settings <variants>` and
:ref:`spec constraints <testing-specs>` can be found at the provided links.
.. admonition:: Spack automatically sets up the test stage directory and environment.
Spack automatically creates the test stage directory and copies
relevant files *prior to* running tests. It can also ensure build
dependencies are available **if** necessary.
The path to the test stage is configurable (see :ref:`configure-test-stage`).
Files that Spack knows to copy are those saved from the build (see
:ref:`cache_extra_test_sources`) and those added to the package repository
(see :ref:`cache_custom_files`).
Spack will use the value of the ``test_requires_compiler`` property to
determine whether it needs to also set up build dependencies (see
:ref:`test-build-tests`).
The ``MyPackage`` package below provides two basic test examples:
``test_example`` and ``test_example2``. The first runs the installed
``example`` and ensures its output contains an expected string. The second
runs ``example2`` without checking output so is only concerned with confirming
the executable runs successfully. If the installed spec is not expected to have
``example2``, then the check at the top of the method will raise a special
``SkipTest`` exception, which is captured to facilitate reporting skipped test
parts to tools like CDash.
Each test method is an implicit test part named by the method and whose
purpose is the method's docstring. Providing a purpose gives context for
aiding debugging. A test method may contain embedded test parts. Spack
outputs the test name and purpose prior to running each test method and
any embedded test parts. For example, ``MyPackage`` below provides two basic
examples of installation tests: ``test_always_fails`` and ``test_example``.
As the name indicates, the first always fails. The second simply runs the
installed example.
.. code-block:: python
class MyPackage(Package):
...
def test_always_fails(self):
"""use assert to always fail"""
assert False
def test_example(self):
"""ensure installed example works"""
expected = "Done."
"""run installed example"""
example = which(self.prefix.bin.example)
# Capture stdout and stderr from running the Executable
# and check that the expected output was produced.
out = example(output=str.split, error=str.split)
assert expected in out, f"Expected '{expected}' in the output"
def test_example2(self):
"""run installed example2"""
if self.spec.satisfies("@:1.0"):
# Raise SkipTest to ensure flagging the test as skipped for
# test reporting purposes.
raise SkipTest("Test is only available for v1.1 on")
example2 = which(self.prefix.bin.example2)
example2()
example()
Output showing the identification of each test part after running the tests
is illustrated below.
.. code-block:: console
$ spack test run --alias mypackage mypackage@2.0
$ spack test run --alias mypackage mypackage@1.0
==> Spack test mypackage
...
$ spack test results -l mypackage
==> Results for test suite 'mypackage':
...
==> [2024-03-10-16:03:56.625439] test: test_example: ensure installed example works
==> [2023-03-10-16:03:56.625204] test: test_always_fails: use assert to always fail
...
PASSED: MyPackage::test_example
==> [2024-03-10-16:03:56.625439] test: test_example2: run installed example2
FAILED
==> [2023-03-10-16:03:56.625439] test: test_example: run installed example
...
PASSED: MyPackage::test_example2
PASSED
.. admonition:: Do NOT implement tests that must run in the installation prefix.
Use of the package spec's installation prefix for building and running
tests is **strongly discouraged**. Doing so causes permission errors for
shared spack instances *and* facilities that install the software in
read-only file systems or directories.
.. note::
Instead, start these test methods by explicitly copying the needed files
from the installation prefix to the test stage directory. Note the test
stage directory is the current directory when the test is executed with
the ``spack test run`` command.
If ``MyPackage`` were a recipe for a library, the tests should build
an example or test program that is then executed.
.. admonition:: Test methods for library packages should build test executables.
A test method can include test parts using the ``test_part`` context manager.
Each part is treated as an independent check to allow subsequent test parts
to execute even after a test part fails.
Stand-alone tests for library packages *should* build test executables
that utilize the *installed* library. Doing so ensures the tests follow
a similar build process that users of the library would follow.
For more information on how to do this, see :ref:`test-build-tests`.
.. tip::
If you want to see more examples from packages with stand-alone tests, run
``spack pkg grep "def\stest" | sed "s/\/package.py.*//g" | sort -u``
from the command line to get a list of the packages.
.. _adding-standalone-test-parts:
"""""""""""""""""""""""""""""
Adding stand-alone test parts
"""""""""""""""""""""""""""""
Sometimes dependencies between steps of a test lend themselves to being
broken into parts. Tracking the pass/fail status of each part may aid
debugging. Spack provides a ``test_part`` context manager for use within
test methods.
Each test part is independently run, tracked, and reported. Test parts are
executed in the order they appear. If one fails, subsequent test parts are
still performed even if they would also fail. This allows tools like CDash
to track and report the status of test parts across runs. The pass/fail status
of the enclosing test is derived from the statuses of the embedded test parts.
.. admonition:: Test method and test part names **must** be unique.
Test results reporting requires that test methods and embedded test parts
within a package have unique names.
.. _test-part:
The signature for ``test_part`` is:
@@ -5465,68 +5367,40 @@ where each argument has the following meaning:
* ``work_dir`` is the path to the directory in which the test will run.
The default of ``None``, or ``"."``, corresponds to the the spec's test
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``).
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``.
.. admonition:: Start test part names with the name of the enclosing test.
.. admonition:: Tests should **not** run under the installation directory.
We **highly recommend** starting the names of test parts with the name
of the enclosing test. Doing so helps with the comprehension, readability
and debugging of test results.
Use of the package spec's installation directory for building and running
tests is **strongly** discouraged. Doing so causes permission errors for
shared spack instances *and* facilities that install the software in
read-only file systems or directories.
Suppose ``MyPackage`` installs multiple executables that need to run in a
specific order since the outputs from one are inputs of others. Further suppose
we want to add an integration test that runs the executables in order. We can
accomplish this goal by implementing a stand-alone test method consisting of
test parts for each executable as follows:
Suppose ``MyPackage`` actually installs two examples we want to use for tests.
These checks can be implemented as separate checks or, as illustrated below,
embedded test parts.
.. code-block:: python
class MyPackage(Package):
...
def test_series(self):
"""run setup, perform, and report"""
def test_example(self):
"""run installed examples"""
for example in ["ex1", "ex2"]:
with test_part(
self,
f"test_example_{example}",
purpose=f"run installed {example}",
):
exe = which(join_path(self.prefix.bin, example))
exe()
with test_part(self, "test_series_setup", purpose="setup operation"):
exe = which(self.prefix.bin.setup))
exe()
with test_part(self, "test_series_run", purpose="perform operation"):
exe = which(self.prefix.bin.run))
exe()
with test_part(self, "test_series_report", purpose="generate report"):
exe = which(self.prefix.bin.report))
exe()
The result is ``test_series`` runs the following executable in order: ``setup``,
``run``, and ``report``. In this case no options are passed to any of the
executables and no outputs from running them are checked. Consequently, the
implementation could be simplified with a for-loop as follows:
.. code-block:: python
class MyPackage(Package):
...
def test_series(self):
"""execute series setup, run, and report"""
for exe, reason in [
("setup", "setup operation"),
("run", "perform operation"),
("report", "generate report")
]:
with test_part(self, f"test_series_{exe}", purpose=reason):
exe = which(self.prefix.bin.join(exe))
exe()
In both cases, since we're using a context manager, each test part in
``test_series`` will execute regardless of the status of the other test
parts.
Now let's look at the output from running the stand-alone tests where
the second test part, ``test_series_run``, fails.
In this case, there will be an implicit test part for ``test_example``
and separate sub-parts for ``ex1`` and ``ex2``. The second sub-part
will be executed regardless of whether the first passes. The test
log for a run where the first executable fails and the second passes
is illustrated below.
.. code-block:: console
@@ -5536,68 +5410,50 @@ the second test part, ``test_series_run``, fails.
$ spack test results -l mypackage
==> Results for test suite 'mypackage':
...
==> [2024-03-10-16:03:56.625204] test: test_series: execute series setup, run, and report
==> [2024-03-10-16:03:56.625439] test: test_series_setup: setup operation
==> [2023-03-10-16:03:56.625204] test: test_example: run installed examples
==> [2023-03-10-16:03:56.625439] test: test_example_ex1: run installed ex1
...
PASSED: MyPackage::test_series_setup
==> [2024-03-10-16:03:56.625555] test: test_series_run: perform operation
FAILED
==> [2023-03-10-16:03:56.625555] test: test_example_ex2: run installed ex2
...
FAILED: MyPackage::test_series_run
==> [2024-03-10-16:03:57.003456] test: test_series_report: generate report
...
FAILED: MyPackage::test_series_report
FAILED: MyPackage::test_series
PASSED
...
Since test parts depended on the success of previous parts, we see that the
failure of one results in the failure of subsequent checks and the overall
result of the test method, ``test_series``, is failure.
.. warning::
.. tip::
Test results reporting requires that each test method and embedded
test part for a package have a unique name.
If you want to see more examples from packages using ``test_part``, run
``spack pkg grep "test_part(" | sed "s/\/package.py.*//g" | sort -u``
from the command line to get a list of the packages.
Stand-alone tests run in an environment that provides access to information
Spack has on how the software was built, such as build options, dependencies,
and compilers. Build options and dependencies are accessed with the normal
spec checks. Examples of checking :ref:`variant settings <variants>` and
:ref:`spec constraints <testing-specs>` can be found at the provided links.
Accessing compilers in stand-alone tests that are used by the build requires
setting a package property as described :ref:`below <test-compilation>`.
.. _test-build-tests:
"""""""""""""""""""""""""""""""""""""
Building and running test executables
"""""""""""""""""""""""""""""""""""""
.. _test-compilation:
.. admonition:: Re-use build-time sources and (small) input data sets when possible.
"""""""""""""""""""""""""
Enabling test compilation
"""""""""""""""""""""""""
We **highly recommend** re-using build-time test sources and pared down
input files for testing installed software. These files are easier
to keep synchronized with software capabilities when they reside
within the software's repository. More information on saving files from
the installation process can be found at :ref:`cache_extra_test_sources`.
If you want to build and run binaries in tests, then you'll need to tell
Spack to load the package's compiler configuration. This is accomplished
by setting the package's ``test_requires_compiler`` property to ``True``.
If that is not possible, you can add test-related files to the package
repository (see :ref:`cache_custom_files`). It will be important to
remember to maintain them so they work across listed or supported versions
of the package.
Setting the property to ``True`` ensures access to the compiler through
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
It also gives access to build dependencies like ``cmake`` through their
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake``).
Packages that build libraries are good examples of cases where you'll want
to build test executables from the installed software before running them.
Doing so requires you to let Spack know it needs to load the package's
compiler configuration. This is accomplished by setting the package's
``test_requires_compiler`` property to ``True``.
.. note::
.. admonition:: ``test_requires_compiler = True`` is required to build test executables.
The ``test_requires_compiler`` property should be added at the top of
the package near other attributes, such as the ``homepage`` and ``url``.
Setting the property to ``True`` ensures access to the compiler through
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
It also gives access to build dependencies like ``cmake`` through their
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake`` for the
path or ``self.spec["cmake"].command`` for the ``Executable`` instance).
Be sure to add the property at the top of the package class under other
properties like the ``homepage``.
The example below, which ignores how ``cxx-example.cpp`` is acquired,
illustrates the basic process of compiling a test executable using the
installed library before running it.
Below illustrates using this feature to compile an example.
.. code-block:: python
@@ -5621,22 +5477,28 @@ installed library before running it.
cxx_example = which(exe)
cxx_example()
Typically the files used to build and or run test executables are either
cached from the installation (see :ref:`cache_extra_test_sources`) or added
to the package repository (see :ref:`cache_custom_files`). There is nothing
preventing the use of both.
.. _cache_extra_test_sources:
""""""""""""""""""""""""""""""""""""
Saving build- and install-time files
""""""""""""""""""""""""""""""""""""
"""""""""""""""""""""""
Saving build-time files
"""""""""""""""""""""""
You can use the ``cache_extra_test_sources`` helper routine to copy
directories and or files from the source build stage directory to the
package's installation directory. Spack will automatically copy these
files for you when it sets up the test stage directory and before it
begins running the tests.
.. note::
We highly recommend re-using build-time test sources and pared down
input files for testing installed software. These files are easier
to keep synchronized with software capabilities since they reside
within the software's repository.
If that is not possible, you can add test-related files to the package
repository (see :ref:`adding custom files <cache_custom_files>`). It
will be important to maintain them so they work across listed or supported
versions of the package.
You can use the ``cache_extra_test_sources`` helper to copy directories
and or files from the source build stage directory to the package's
installation directory.
The signature for ``cache_extra_test_sources`` is:
@@ -5651,69 +5513,46 @@ where each argument has the following meaning:
* ``srcs`` is a string *or* a list of strings corresponding to the
paths of subdirectories and or files needed for stand-alone testing.
.. warning::
The paths must be relative to the staged source directory. Contents of
subdirectories and files are copied to a special test cache subdirectory
of the installation prefix. They are automatically copied to the appropriate
relative paths under the test stage directory prior to executing stand-alone
tests.
Paths provided in the ``srcs`` argument **must be relative** to the
staged source directory. They will be copied to the equivalent relative
location under the test stage directory prior to test execution.
Contents of subdirectories and files are copied to a special test cache
subdirectory of the installation prefix. They are automatically copied to
the appropriate relative paths under the test stage directory prior to
executing stand-alone tests.
.. tip::
*Perform test-related conversions once when copying files.*
If one or more of the copied files needs to be modified to reference
the installed software, it is recommended that those changes be made
to the cached files **once** in the post-``install`` copy method
**after** the call to ``cache_extra_test_sources``. This will reduce
the amount of unnecessary work in the test method **and** avoid problems
running stand-alone tests in shared instances and facility deployments.
The ``filter_file`` function can be quite useful for such changes
(see :ref:`file-filtering`).
Below is a basic example of a test that relies on files from the installation.
This package method re-uses the contents of the ``examples`` subdirectory,
which is assumed to have all of the files implemented to allow ``make`` to
compile and link ``foo.c`` and ``bar.c`` against the package's installed
library.
For example, a package method for copying everything in the ``tests``
subdirectory plus the ``foo.c`` and ``bar.c`` files from ``examples``
and using ``foo.c`` in a test method is illustrated below.
.. code-block:: python
class MyLibPackage(MakefilePackage):
class MyLibPackage(Package):
...
@run_after("install")
def copy_test_files(self):
cache_extra_test_sources(self, "examples")
srcs = ["tests",
join_path("examples", "foo.c"),
join_path("examples", "bar.c")]
cache_extra_test_sources(self, srcs)
def test_example(self):
"""build and run the examples"""
examples_dir = self.test_suite.current_test_cache_dir.examples
with working_dir(examples_dir):
make = which("make")
make()
def test_foo(self):
exe = "foo"
src_dir = self.test_suite.current_test_cache_dir.examples
with working_dir(src_dir):
cc = which(os.environ["CC"])
cc(
f"-L{self.prefix.lib}",
f"-I{self.prefix.include}",
f"{exe}.c",
"-o", exe
)
foo = which(exe)
foo()
for program in ["foo", "bar"]:
with test_part(
self,
f"test_example_{program}",
purpose=f"ensure {program} runs"
):
exe = Executable(program)
exe()
In this case, ``copy_test_files`` copies the associated files from the
build stage to the package's test cache directory under the installation
prefix. Running ``spack test run`` for the package results in Spack copying
the directory and its contents to the the test stage directory. The
``working_dir`` context manager ensures the commands within it are executed
from the ``examples_dir``. The test builds the software using ``make`` before
running each executable, ``foo`` and ``bar``, as independent test parts.
In this case, the method copies the associated files from the build
stage, **after** the software is installed, to the package's test
cache directory. Then ``test_foo`` builds ``foo`` using ``foo.c``
before running the program.
.. note::
@@ -5722,18 +5561,43 @@ running each executable, ``foo`` and ``bar``, as independent test parts.
The key to copying files for stand-alone testing at build time is use
of the ``run_after`` directive, which ensures the associated files are
copied **after** the provided build stage (``install``) when the installation
prefix **and** files are available.
copied **after** the provided build stage where the files **and**
installation prefix are available.
The test method uses the path contained in the package's
``self.test_suite.current_test_cache_dir`` property for the root directory
of the copied files. In this case, that's the ``examples`` subdirectory.
These paths are **automatically copied** from cache to the test stage
directory prior to the execution of any stand-alone tests. Tests access
the files using the ``self.test_suite.current_test_cache_dir`` property.
In our example above, test methods can use the following paths to reference
the copy of each entry listed in ``srcs``, respectively:
.. tip::
* ``self.test_suite.current_test_cache_dir.tests``
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
.. admonition:: Library packages should build stand-alone tests
Library developers will want to build the associated tests
against their **installed** libraries before running them.
.. note::
While source and input files are generally recommended, binaries
**may** also be cached by the build process. Only you, as the package
writer or maintainer, know whether these files would be appropriate
for testing the installed software weeks to months later.
.. note::
If one or more of the copied files needs to be modified to reference
the installed software, it is recommended that those changes be made
to the cached files **once** in the ``copy_test_sources`` method and
***after** the call to ``cache_extra_test_sources()``. This will
reduce the amount of unnecessary work in the test method **and** avoid
problems testing in shared instances and facility deployments.
The ``filter_file`` function can be quite useful for such changes.
See :ref:`file manipulation <file-manipulation>`.
If you want to see more examples from packages that cache build files, run
``spack pkg grep cache_extra_test_sources | sed "s/\/package.py.*//g" | sort -u``
from the command line to get a list of the packages.
.. _cache_custom_files:
@@ -5741,9 +5605,8 @@ running each executable, ``foo`` and ``bar``, as independent test parts.
Adding custom files
"""""""""""""""""""
Sometimes it is helpful or necessary to include custom files for building and
or checking the results of tests as part of the package. Examples of the types
of files that might be useful are:
In some cases it can be useful to have files that can be used to build or
check the results of tests. Examples include:
- test source files
- test input files
@@ -5751,15 +5614,17 @@ of files that might be useful are:
- expected test outputs
While obtaining such files from the software repository is preferred (see
:ref:`cache_extra_test_sources`), there are circumstances where doing so is not
feasible such as when the software is not being actively maintained. When test
files cannot be obtained from the repository or there is a need to supplement
files that can, Spack supports the inclusion of additional files under the
``test`` subdirectory of the package in the Spack repository.
:ref:`adding build-time files <cache_extra_test_sources>`), there are
circumstances where that is not feasible (e.g., the software is not being
actively maintained). When test files can't be obtained from the repository
or as a supplement to files that can, Spack supports the inclusion of
additional files under the ``test`` subdirectory of the package in the
Spack repository.
The following example assumes a ``custom-example.c`` is saved in ``MyLibary``
package's ``test`` subdirectory. It also assumes the program simply needs to
be compiled and linked against the installed ``MyLibrary`` software.
Spack **automatically copies** the contents of that directory to the
test staging directory prior to running stand-alone tests. Test methods
access those files using the ``self.test_suite.current_test_data_dir``
property as shown below.
.. code-block:: python
@@ -5769,29 +5634,17 @@ be compiled and linked against the installed ``MyLibrary`` software.
test_requires_compiler = True
...
def test_custom_example(self):
def test_example(self):
"""build and run custom-example"""
src_dir = self.test_suite.current_test_data_dir
data_dir = self.test_suite.current_test_data_dir
exe = "custom-example"
src = datadir.join(f"{exe}.cpp")
...
# TODO: Build custom-example using src and exe
...
custom_example = which(exe)
custom_example()
with working_dir(src_dir):
cc = which(os.environ["CC"])
cc(
f"-L{self.prefix.lib}",
f"-I{self.prefix.include}",
f"{exe}.cpp",
"-o", exe
)
custom_example = Executable(exe)
custom_example()
In this case, ``spack test run`` for the package results in Spack copying
the contents of the ``test`` subdirectory to the test stage directory path
in ``self.test_suite.current_test_data_dir`` before calling
``test_custom_example``. Use of the ``working_dir`` context manager
ensures the commands to build and run the program are performed from
within the appropriate subdirectory of the test stage.
.. _expected_test_output_from_file:
@@ -5800,8 +5653,9 @@ Reading expected output from a file
"""""""""""""""""""""""""""""""""""
The helper function ``get_escaped_text_output`` is available for packages
to retrieve properly formatted text from a file potentially containing
special characters.
to retrieve and properly format the text from a file that contains the
expected output from running an executable that may contain special
characters.
The signature for ``get_escaped_text_output`` is:
@@ -5811,13 +5665,10 @@ The signature for ``get_escaped_text_output`` is:
where ``filename`` is the path to the file containing the expected output.
The path provided to ``filename`` for one of the copied custom files
(:ref:`custom file <cache_custom_files>`) is in the path rooted at
``self.test_suite.current_test_data_dir``.
The example below shows how to reference both the custom database
(``packages.db``) and expected output (``dump.out``) files Spack copies
to the test stage:
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
accessed by tests using the ``self.test_suite.current_test_data_dir``
property. The example below illustrates how to read a file that was
added to the package's ``test`` subdirectory.
.. code-block:: python
@@ -5839,9 +5690,8 @@ to the test stage:
for exp in expected:
assert re.search(exp, out), f"Expected '{exp}' in output"
If the files were instead cached from installing the software, the paths to the
two files would be found under the ``self.test_suite.current_test_cache_dir``
directory as shown below:
If the file was instead copied from the ``tests`` subdirectory of the staged
source code, the path would be obtained as shown below.
.. code-block:: python
@@ -5849,24 +5699,17 @@ directory as shown below:
"""check example table dump"""
test_cache_dir = self.test_suite.current_test_cache_dir
db_filename = test_cache_dir.join("packages.db")
..
expected = get_escaped_text_output(test_cache_dir.join("dump.out"))
...
Alternatively, if both files had been installed by the software into the
``share/tests`` subdirectory of the installation prefix, the paths to the
two files would be referenced as follows:
Alternatively, if the file was copied to the ``share/tests`` subdirectory
as part of the installation process, the test could access the path as
follows:
.. code-block:: python
def test_example(self):
"""check example table dump"""
db_filename = self.prefix.share.tests.join("packages.db")
..
expected = get_escaped_text_output(
self.prefix.share.tests.join("dump.out")
)
...
db_filename = join_path(self.prefix.share.tests, "packages.db")
.. _check_outputs:
@@ -5874,9 +5717,9 @@ two files would be referenced as follows:
Comparing expected to actual outputs
""""""""""""""""""""""""""""""""""""
The ``check_outputs`` helper routine is available for packages to ensure
multiple expected outputs from running an executable are contained within
the actual outputs.
The helper function ``check_outputs`` is available for packages to ensure
the expected outputs from running an executable are contained within the
actual outputs.
The signature for ``check_outputs`` is:
@@ -5902,17 +5745,11 @@ Invoking the method is the equivalent of:
if errors:
raise RuntimeError("\n ".join(errors))
.. tip::
If you want to see more examples from packages that use this helper, run
``spack pkg grep check_outputs | sed "s/\/package.py.*//g" | sort -u``
from the command line to get a list of the packages.
.. _accessing-files:
"""""""""""""""""""""""""""""""""""""""""
Finding package- and test-related files
Accessing package- and test-related files
"""""""""""""""""""""""""""""""""""""""""
You may need to access files from one or more locations when writing
@@ -5921,7 +5758,8 @@ include test source files or includes them but has no way to build the
executables using the installed headers and libraries. In these cases
you may need to reference the files relative to one or more root directory.
The table below lists relevant path properties and provides additional
examples of their use. See :ref:`expected_test_output_from_file` for
examples of their use.
:ref:`Reading expected output <expected_test_output_from_file>` provides
examples of accessing files saved from the software repository, package
repository, and installation.
@@ -5950,6 +5788,7 @@ repository, and installation.
- ``self.test_suite.current_test_data_dir``
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
.. _inheriting-tests:
""""""""""""""""""""""""""""
@@ -5992,7 +5831,7 @@ maintainers provide additional stand-alone tests customized to the package.
.. warning::
Any package that implements a test method with the same name as an
inherited method will override the inherited method. If that is not the
inherited method overrides the inherited method. If that is not the
goal and you are not explicitly calling and adding functionality to
the inherited method for the test, then make sure that all test methods
and embedded test parts have unique test names.
@@ -6157,8 +5996,6 @@ running:
This is already part of the boilerplate for packages created with
``spack create``.
.. _file-filtering:
^^^^^^^^^^^^^^^^^^^
Filtering functions
^^^^^^^^^^^^^^^^^^^

View File

@@ -253,6 +253,17 @@ can easily happen if it is not updated frequently, this behavior ensures that
spack has a way to know for certain about the status of any concrete spec on
the remote mirror, but can slow down pipeline generation significantly.
The ``--optimize`` argument is experimental and runs the generated pipeline
document through a series of optimization passes designed to reduce the size
of the generated file.
The ``--dependencies`` is also experimental and disables what in Gitlab is
referred to as DAG scheduling, internally using the ``dependencies`` keyword
rather than ``needs`` to list dependency jobs. The drawback of using this option
is that before any job can begin, all jobs in previous stages must first
complete. The benefit is that Gitlab allows more dependencies to be listed
when using ``dependencies`` instead of ``needs``.
The optional ``--output-file`` argument should be an absolute path (including
file name) to the generated pipeline, and if not given, the default is
``./.gitlab-ci.yml``.

View File

@@ -476,3 +476,9 @@ implemented using Python's built-in `sys.path
:py:mod:`spack.repo` module implements a custom `Python importer
<https://docs.python.org/2/library/imp.html>`_.
.. warning::
The mechanism for extending packages is not yet extensively tested,
and extending packages across repositories imposes inter-repo
dependencies, which may be hard to manage. Use this feature at your
own risk, but let us know if you have a use case for it.

View File

@@ -1,13 +1,13 @@
sphinx==7.4.7
sphinx==7.2.6
sphinxcontrib-programoutput==0.17
sphinx_design==0.6.1
sphinx_design==0.5.0
sphinx-rtd-theme==2.0.0
python-levenshtein==0.25.1
docutils==0.20.1
pygments==2.18.0
urllib3==2.2.2
pytest==8.3.2
pygments==2.17.2
urllib3==2.2.1
pytest==8.2.0
isort==5.13.2
black==24.8.0
flake8==7.1.1
mypy==1.11.1
black==24.4.0
flake8==7.0.0
mypy==1.9.0

96
lib/spack/env/cc vendored
View File

@@ -174,46 +174,6 @@ preextend() {
unset IFS
}
execute() {
# dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
case "$SPACK_TEST_COMMAND" in
dump-args)
IFS="$lsep"
for arg in $full_command_list; do
echo "$arg"
done
unset IFS
exit
;;
dump-env-*)
var=${SPACK_TEST_COMMAND#dump-env-}
eval "printf '%s\n' \"\$0: \$var: \$$var\""
;;
*)
die "Unknown test command: '$SPACK_TEST_COMMAND'"
;;
esac
fi
#
# Write the input and output commands to debug logs if it's asked for.
#
if [ "$SPACK_DEBUG" = TRUE ]; then
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
echo "[$mode] $command $input_command" >> "$input_log"
IFS="$lsep"
echo "[$mode] "$full_command_list >> "$output_log"
unset IFS
fi
# Execute the full command, preserving spaces with IFS set
# to the alarm bell separator.
IFS="$lsep"; exec $full_command_list
exit
}
# Fail with a clear message if the input contains any bell characters.
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
@@ -271,17 +231,12 @@ fi
# ld link
# ccld compile & link
# Note. SPACK_ALWAYS_XFLAGS are applied for all compiler invocations,
# including version checks (SPACK_XFLAGS variants are not applied
# for version checks).
command="${0##*/}"
comp="CC"
vcheck_flags=""
case "$command" in
cpp)
mode=cpp
debug_flags="-g"
vcheck_flags="${SPACK_ALWAYS_CPPFLAGS}"
;;
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe|craycc)
command="$SPACK_CC"
@@ -289,7 +244,6 @@ case "$command" in
comp="CC"
lang_flags=C
debug_flags="-g"
vcheck_flags="${SPACK_ALWAYS_CFLAGS}"
;;
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
command="$SPACK_CXX"
@@ -297,7 +251,6 @@ case "$command" in
comp="CXX"
lang_flags=CXX
debug_flags="-g"
vcheck_flags="${SPACK_ALWAYS_CXXFLAGS}"
;;
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang|crayftn)
command="$SPACK_FC"
@@ -305,7 +258,6 @@ case "$command" in
comp="FC"
lang_flags=F
debug_flags="-g"
vcheck_flags="${SPACK_ALWAYS_FFLAGS}"
;;
f77|xlf|xlf_r|pgf77)
command="$SPACK_F77"
@@ -313,7 +265,6 @@ case "$command" in
comp="F77"
lang_flags=F
debug_flags="-g"
vcheck_flags="${SPACK_ALWAYS_FFLAGS}"
;;
ld|ld.gold|ld.lld)
mode=ld
@@ -414,11 +365,7 @@ unset IFS
export PATH="$new_dirs"
if [ "$mode" = vcheck ]; then
full_command_list="$command"
args="$@"
extend full_command_list vcheck_flags
extend full_command_list args
execute
exec "${command}" "$@"
fi
# Darwin's linker has a -r argument that merges object files together.
@@ -775,7 +722,6 @@ case "$mode" in
cc|ccld)
case $lang_flags in
F)
extend spack_flags_list SPACK_ALWAYS_FFLAGS
extend spack_flags_list SPACK_FFLAGS
;;
esac
@@ -785,7 +731,6 @@ esac
# C preprocessor flags come before any C/CXX flags
case "$mode" in
cpp|as|cc|ccld)
extend spack_flags_list SPACK_ALWAYS_CPPFLAGS
extend spack_flags_list SPACK_CPPFLAGS
;;
esac
@@ -796,11 +741,9 @@ case "$mode" in
cc|ccld)
case $lang_flags in
C)
extend spack_flags_list SPACK_ALWAYS_CFLAGS
extend spack_flags_list SPACK_CFLAGS
;;
CXX)
extend spack_flags_list SPACK_ALWAYS_CXXFLAGS
extend spack_flags_list SPACK_CXXFLAGS
;;
esac
@@ -990,4 +933,39 @@ if [ -n "$SPACK_CCACHE_BINARY" ]; then
esac
fi
execute
# dump the full command if the caller supplies SPACK_TEST_COMMAND=dump-args
if [ -n "${SPACK_TEST_COMMAND=}" ]; then
case "$SPACK_TEST_COMMAND" in
dump-args)
IFS="$lsep"
for arg in $full_command_list; do
echo "$arg"
done
unset IFS
exit
;;
dump-env-*)
var=${SPACK_TEST_COMMAND#dump-env-}
eval "printf '%s\n' \"\$0: \$var: \$$var\""
;;
*)
die "Unknown test command: '$SPACK_TEST_COMMAND'"
;;
esac
fi
#
# Write the input and output commands to debug logs if it's asked for.
#
if [ "$SPACK_DEBUG" = TRUE ]; then
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
echo "[$mode] $command $input_command" >> "$input_log"
IFS="$lsep"
echo "[$mode] "$full_command_list >> "$output_log"
unset IFS
fi
# Execute the full command, preserving spaces with IFS set
# to the alarm bell separator.
IFS="$lsep"; exec $full_command_list

View File

@@ -18,7 +18,7 @@
* Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
* Version: 0.2.3 (commit 7b8fe60b69e2861e7dac104bc1c183decfcd3daf)
astunparse
----------------

View File

@@ -1,3 +1,3 @@
"""Init file to avoid namespace packages"""
__version__ = "0.2.4"
__version__ = "0.2.3"

View File

@@ -5,10 +5,9 @@
"""The "cpu" package permits to query and compare different
CPU microarchitectures.
"""
from .detect import brand_string, host
from .detect import host
from .microarchitecture import (
TARGETS,
InvalidCompilerVersion,
Microarchitecture,
UnsupportedMicroarchitecture,
generic_microarchitecture,
@@ -16,12 +15,10 @@
)
__all__ = [
"brand_string",
"host",
"TARGETS",
"InvalidCompilerVersion",
"Microarchitecture",
"UnsupportedMicroarchitecture",
"TARGETS",
"generic_microarchitecture",
"host",
"version_components",
]

View File

@@ -155,31 +155,6 @@ def _is_bit_set(self, register: int, bit: int) -> bool:
mask = 1 << bit
return register & mask > 0
def brand_string(self) -> Optional[str]:
"""Returns the brand string, if available."""
if self.highest_extension_support < 0x80000004:
return None
r1 = self.cpuid.registers_for(eax=0x80000002, ecx=0)
r2 = self.cpuid.registers_for(eax=0x80000003, ecx=0)
r3 = self.cpuid.registers_for(eax=0x80000004, ecx=0)
result = struct.pack(
"IIIIIIIIIIII",
r1.eax,
r1.ebx,
r1.ecx,
r1.edx,
r2.eax,
r2.ebx,
r2.ecx,
r2.edx,
r3.eax,
r3.ebx,
r3.ecx,
r3.edx,
).decode("utf-8")
return result.strip("\x00")
@detection(operating_system="Windows")
def cpuid_info():
@@ -199,8 +174,8 @@ def _check_output(args, env):
WINDOWS_MAPPING = {
"AMD64": X86_64,
"ARM64": AARCH64,
"AMD64": "x86_64",
"ARM64": "aarch64",
}
@@ -434,16 +409,3 @@ def compatibility_check_for_riscv64(info, target):
return (target == arch_root or arch_root in target.ancestors) and (
target.name == info.name or target.vendor == "generic"
)
def brand_string() -> Optional[str]:
"""Returns the brand string of the host, if detected, or None."""
if platform.system() == "Darwin":
return _check_output(
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
).strip()
if host().family == X86_64:
return CpuidInfoCollector().brand_string()
return None

View File

@@ -208,8 +208,6 @@ def optimization_flags(self, compiler, version):
"""Returns a string containing the optimization flags that needs
to be used to produce code optimized for this micro-architecture.
The version is expected to be a string of dot separated digits.
If there is no information on the compiler passed as argument the
function returns an empty string. If it is known that the compiler
version we want to use does not support this architecture the function
@@ -218,11 +216,6 @@ def optimization_flags(self, compiler, version):
Args:
compiler (str): name of the compiler to be used
version (str): version of the compiler to be used
Raises:
UnsupportedMicroarchitecture: if the requested compiler does not support
this micro-architecture.
ValueError: if the version doesn't match the expected format
"""
# If we don't have information on compiler at all return an empty string
if compiler not in self.family.compilers:
@@ -239,14 +232,6 @@ def optimization_flags(self, compiler, version):
msg = msg.format(compiler, best_target, best_target.family)
raise UnsupportedMicroarchitecture(msg)
# Check that the version matches the expected format
if not re.match(r"^(?:\d+\.)*\d+$", version):
msg = (
"invalid format for the compiler version argument. "
"Only dot separated digits are allowed."
)
raise InvalidCompilerVersion(msg)
# If we have information on this compiler we need to check the
# version being used
compiler_info = self.compilers[compiler]
@@ -307,7 +292,7 @@ def generic_microarchitecture(name):
Args:
name (str): name of the micro-architecture
"""
return Microarchitecture(name, parents=[], vendor="generic", features=set(), compilers={})
return Microarchitecture(name, parents=[], vendor="generic", features=[], compilers={})
def version_components(version):
@@ -382,15 +367,7 @@ def fill_target_from_dict(name, data, targets):
TARGETS = LazyDictionary(_known_microarchitectures)
class ArchspecError(Exception):
"""Base class for errors within archspec"""
class UnsupportedMicroarchitecture(ArchspecError, ValueError):
class UnsupportedMicroarchitecture(ValueError):
"""Raised if a compiler version does not support optimization for a given
micro-architecture.
"""
class InvalidCompilerVersion(ArchspecError, ValueError):
"""Raised when an invalid format is used for compiler versions in archspec."""

View File

@@ -2937,6 +2937,8 @@
"ilrcpc",
"flagm",
"ssbs",
"paca",
"pacg",
"dcpodp",
"svei8mm",
"svebf16",
@@ -3064,6 +3066,8 @@
"flagm",
"ssbs",
"sb",
"paca",
"pacg",
"dcpodp",
"sve2",
"sveaes",
@@ -3077,7 +3081,8 @@
"svebf16",
"i8mm",
"bf16",
"dgh"
"dgh",
"bti"
],
"compilers" : {
"gcc": [

View File

@@ -98,10 +98,3 @@ def path_filter_caller(*args, **kwargs):
if _func:
return holder_func(_func)
return holder_func
def sanitize_win_longpath(path: str) -> str:
"""Strip Windows extended path prefix from strings
Returns sanitized string.
no-op if extended path prefix is not present"""
return path.lstrip("\\\\?\\")

View File

@@ -187,18 +187,12 @@ def polite_filename(filename: str) -> str:
return _polite_antipattern().sub("_", filename)
def getuid() -> Union[str, int]:
"""Returns os getuid on non Windows
On Windows returns 0 for admin users, login string otherwise
This is in line with behavior from get_owner_uid which
always returns the login string on Windows
"""
def getuid():
if sys.platform == "win32":
import ctypes
# If not admin, use the string name of the login as a unique ID
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
return os.getlogin()
return 1
return 0
else:
return os.getuid()
@@ -219,15 +213,6 @@ def _win_rename(src, dst):
os.replace(src, dst)
@system_path_filter
def msdos_escape_parens(path):
"""MS-DOS interprets parens as grouping parameters even in a quoted string"""
if sys.platform == "win32":
return path.replace("(", "^(").replace(")", "^)")
else:
return path
@system_path_filter
def rename(src, dst):
# On Windows, os.rename will fail if the destination file already exists
@@ -568,13 +553,7 @@ def exploding_archive_handler(tarball_container, stage):
@system_path_filter(arg_slice=slice(1))
def get_owner_uid(path, err_msg=None) -> Union[str, int]:
"""Returns owner UID of path destination
On non Windows this is the value of st_uid
On Windows this is the login string associated with the
owning user.
"""
def get_owner_uid(path, err_msg=None):
if not os.path.exists(path):
mkdirp(path, mode=stat.S_IRWXU)
@@ -766,6 +745,7 @@ def copy_tree(
src: str,
dest: str,
symlinks: bool = True,
allow_broken_symlinks: bool = sys.platform != "win32",
ignore: Optional[Callable[[str], bool]] = None,
_permissions: bool = False,
):
@@ -788,6 +768,8 @@ def copy_tree(
src (str): the directory to copy
dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
On Windows, setting this to True will raise an exception. Defaults to true on unix.
ignore (typing.Callable): function indicating which files to ignore
_permissions (bool): for internal use only
@@ -795,6 +777,8 @@ def copy_tree(
IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest*
"""
if allow_broken_symlinks and sys.platform == "win32":
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
if _permissions:
tty.debug("Installing {0} to {1}".format(src, dest))
else:
@@ -838,7 +822,7 @@ def copy_tree(
if islink(s):
link_target = resolve_link_target_relative_to_the_link(s)
if symlinks:
target = readlink(s)
target = os.readlink(s)
if os.path.isabs(target):
def escaped_path(path):
@@ -867,14 +851,16 @@ def escaped_path(path):
copy_mode(s, d)
for target, d, s in links:
symlink(target, d)
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
if _permissions:
set_install_permissions(d)
copy_mode(s, d)
@system_path_filter
def install_tree(src, dest, symlinks=True, ignore=None):
def install_tree(
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
):
"""Recursively install an entire directory tree rooted at *src*.
Same as :py:func:`copy_tree` with the addition of setting proper
@@ -885,12 +871,21 @@ def install_tree(src, dest, symlinks=True, ignore=None):
dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks
ignore (typing.Callable): function indicating which files to ignore
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
On Windows, setting this to True will raise an exception.
Raises:
IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest*
"""
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
copy_tree(
src,
dest,
symlinks=symlinks,
allow_broken_symlinks=allow_broken_symlinks,
ignore=ignore,
_permissions=True,
)
@system_path_filter
@@ -2434,10 +2429,9 @@ def add_library_dependent(self, *dest):
"""
for pth in dest:
if os.path.isfile(pth):
new_pth = pathlib.Path(pth).parent
self._additional_library_dependents.add(pathlib.Path(pth).parent)
else:
new_pth = pathlib.Path(pth)
self._additional_library_dependents.add(new_pth)
self._additional_library_dependents.add(pathlib.Path(pth))
@property
def rpaths(self):
@@ -2515,14 +2509,8 @@ def establish_link(self):
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
# install a symlink to each dependent library
# do not rpath for system libraries included in the dag
# we should not be modifying libraries managed by the Windows system
# as this will negatively impact linker behavior and can result in permission
# errors if those system libs are not modifiable by Spack
if "windows-system" not in getattr(self.pkg, "tags", []):
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
self._link(library, lib_dir)
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
self._link(library, lib_dir)
@system_path_filter

View File

@@ -8,75 +8,100 @@
import subprocess
import sys
import tempfile
from typing import Union
from llnl.util import lang, tty
from ..path import sanitize_win_longpath, system_path_filter
from ..path import system_path_filter
if sys.platform == "win32":
from win32file import CreateHardLink
is_windows = sys.platform == "win32"
def _windows_symlink(
src: str, dst: str, target_is_directory: bool = False, *, dir_fd: Union[int, None] = None
):
"""On Windows with System Administrator privileges this will be a normal symbolic link via
os.symlink. On Windows without privledges the link will be a junction for a directory and a
hardlink for a file. On Windows the various link types are:
Symbolic Link: A link to a file or directory on the same or different volume (drive letter) or
even to a remote file or directory (using UNC in its path). Need System Administrator
privileges to make these.
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
"""
Create a link.
Hard Link: A link to a file on the same volume (drive letter) only. Every file (file's data)
has at least 1 hard link (file's name). But when this method creates a new hard link there will
be 2. Deleting all hard links effectively deletes the file. Don't need System Administrator
privileges.
On non-Windows and Windows with System Administrator
privleges this will be a normal symbolic link via
os.symlink.
Junction: A link to a directory on the same or different volume (drive letter) but not to a
remote directory. Don't need System Administrator privileges."""
source_path = os.path.normpath(src)
On Windows without privledges the link will be a
junction for a directory and a hardlink for a file.
On Windows the various link types are:
Symbolic Link: A link to a file or directory on the
same or different volume (drive letter) or even to
a remote file or directory (using UNC in its path).
Need System Administrator privileges to make these.
Hard Link: A link to a file on the same volume (drive
letter) only. Every file (file's data) has at least 1
hard link (file's name). But when this method creates
a new hard link there will be 2. Deleting all hard
links effectively deletes the file. Don't need System
Administrator privileges.
Junction: A link to a directory on the same or different
volume (drive letter) but not to a remote directory. Don't
need System Administrator privileges.
Parameters:
source_path (str): The real file or directory that the link points to.
Must be absolute OR relative to the link.
link_path (str): The path where the link will exist.
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
doesn't exist. This will still raise an exception on Windows.
"""
source_path = os.path.normpath(source_path)
win_source_path = source_path
link_path = os.path.normpath(dst)
link_path = os.path.normpath(link_path)
# Perform basic checks to make sure symlinking will succeed
if os.path.lexists(link_path):
raise AlreadyExistsError(f"Link path ({link_path}) already exists. Cannot create link.")
# Never allow broken links on Windows.
if sys.platform == "win32" and allow_broken_symlinks:
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
if not os.path.exists(source_path):
if os.path.isabs(source_path):
# An absolute source path that does not exist will result in a broken link.
raise SymlinkError(
f"Source path ({source_path}) is absolute but does not exist. Resulting "
f"link would be broken so not making link."
if not allow_broken_symlinks:
# Perform basic checks to make sure symlinking will succeed
if os.path.lexists(link_path):
raise AlreadyExistsError(
f"Link path ({link_path}) already exists. Cannot create link."
)
else:
# os.symlink can create a link when the given source path is relative to
# the link path. Emulate this behavior and check to see if the source exists
# relative to the link path ahead of link creation to prevent broken
# links from being made.
link_parent_dir = os.path.dirname(link_path)
relative_path = os.path.join(link_parent_dir, source_path)
if os.path.exists(relative_path):
# In order to work on windows, the source path needs to be modified to be
# relative because hardlink/junction dont resolve relative paths the same
# way as os.symlink. This is ignored on other operating systems.
win_source_path = relative_path
else:
if not os.path.exists(source_path):
if os.path.isabs(source_path) and not allow_broken_symlinks:
# An absolute source path that does not exist will result in a broken link.
raise SymlinkError(
f"The source path ({source_path}) is not relative to the link path "
f"({link_path}). Resulting link would be broken so not making link."
f"Source path ({source_path}) is absolute but does not exist. Resulting "
f"link would be broken so not making link."
)
else:
# os.symlink can create a link when the given source path is relative to
# the link path. Emulate this behavior and check to see if the source exists
# relative to the link path ahead of link creation to prevent broken
# links from being made.
link_parent_dir = os.path.dirname(link_path)
relative_path = os.path.join(link_parent_dir, source_path)
if os.path.exists(relative_path):
# In order to work on windows, the source path needs to be modified to be
# relative because hardlink/junction dont resolve relative paths the same
# way as os.symlink. This is ignored on other operating systems.
win_source_path = relative_path
elif not allow_broken_symlinks:
raise SymlinkError(
f"The source path ({source_path}) is not relative to the link path "
f"({link_path}). Resulting link would be broken so not making link."
)
# Create the symlink
if not _windows_can_symlink():
if sys.platform == "win32" and not _windows_can_symlink():
_windows_create_link(win_source_path, link_path)
else:
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
def _windows_islink(path: str) -> bool:
def islink(path: str) -> bool:
"""Override os.islink to give correct answer for spack logic.
For Non-Windows: a link can be determined with the os.path.islink method.
@@ -222,9 +247,9 @@ def _windows_create_junction(source: str, link: str):
out, err = proc.communicate()
tty.debug(out.decode())
if proc.returncode != 0:
err_str = err.decode()
tty.error(err_str)
raise SymlinkError("Make junction command returned a non-zero return code.", err_str)
err = err.decode()
tty.error(err)
raise SymlinkError("Make junction command returned a non-zero return code.", err)
def _windows_create_hard_link(path: str, link: str):
@@ -244,14 +269,14 @@ def _windows_create_hard_link(path: str, link: str):
CreateHardLink(link, path)
def _windows_readlink(path: str, *, dir_fd=None):
def readlink(path: str):
"""Spack utility to override of os.readlink method to work cross platform"""
if _windows_is_hardlink(path):
return _windows_read_hard_link(path)
elif _windows_is_junction(path):
return _windows_read_junction(path)
else:
return sanitize_win_longpath(os.readlink(path, dir_fd=dir_fd))
return os.readlink(path)
def _windows_read_hard_link(link: str) -> str:
@@ -313,16 +338,6 @@ def resolve_link_target_relative_to_the_link(link):
return os.path.join(link_dir, target)
if sys.platform == "win32":
symlink = _windows_symlink
readlink = _windows_readlink
islink = _windows_islink
else:
symlink = os.symlink
readlink = os.readlink
islink = os.path.islink
class SymlinkError(RuntimeError):
"""Exception class for errors raised while creating symlinks,
junctions and hard links

View File

@@ -59,7 +59,6 @@
To output an @, use '@@'. To output a } inside braces, use '}}'.
"""
import os
import re
import sys
from contextlib import contextmanager
@@ -102,29 +101,9 @@ def __init__(self, message):
# Mapping from color arguments to values for tty.set_color
color_when_values = {"always": True, "auto": None, "never": False}
def _color_when_value(when):
"""Raise a ValueError for an invalid color setting.
Valid values are 'always', 'never', and 'auto', or equivalently,
True, False, and None.
"""
if when in color_when_values:
return color_when_values[when]
elif when not in color_when_values.values():
raise ValueError("Invalid color setting: %s" % when)
return when
def _color_from_environ() -> Optional[bool]:
try:
return _color_when_value(os.environ.get("SPACK_COLOR", "auto"))
except ValueError:
return None
#: When `None` colorize when stdout is tty, when `True` or `False` always or never colorize resp.
_force_color = _color_from_environ()
# Force color; None: Only color if stdout is a tty
# True: Always colorize output, False: Never colorize output
_force_color = None
def try_enable_terminal_color_on_windows():
@@ -185,6 +164,19 @@ def _err_check(result, func, args):
debug("Unable to support color on Windows terminal")
def _color_when_value(when):
"""Raise a ValueError for an invalid color setting.
Valid values are 'always', 'never', and 'auto', or equivalently,
True, False, and None.
"""
if when in color_when_values:
return color_when_values[when]
elif when not in color_when_values.values():
raise ValueError("Invalid color setting: %s" % when)
return when
def get_color_when():
"""Return whether commands should print color or not."""
if _force_color is not None:

View File

@@ -33,23 +33,8 @@
pass
esc, bell, lbracket, bslash, newline = r"\x1b", r"\x07", r"\[", r"\\", r"\n"
# Ansi Control Sequence Introducers (CSI) are a well-defined format
# Standard ECMA-48: Control Functions for Character-Imaging I/O Devices, section 5.4
# https://www.ecma-international.org/wp-content/uploads/ECMA-48_5th_edition_june_1991.pdf
csi_pre = f"{esc}{lbracket}"
csi_param, csi_inter, csi_post = r"[0-?]", r"[ -/]", r"[@-~]"
ansi_csi = f"{csi_pre}{csi_param}*{csi_inter}*{csi_post}"
# General ansi escape sequences have well-defined prefixes,
# but content and suffixes are less reliable.
# Conservatively assume they end with either "<ESC>\" or "<BELL>",
# with no intervening "<ESC>"/"<BELL>" keys or newlines
esc_pre = f"{esc}[@-_]"
esc_content = f"[^{esc}{bell}{newline}]"
esc_post = f"(?:{esc}{bslash}|{bell})"
ansi_esc = f"{esc_pre}{esc_content}*{esc_post}"
# Use this to strip escape sequences
_escape = re.compile(f"{ansi_csi}|{ansi_esc}")
_escape = re.compile(r"\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07")
# control characters for enabling/disabling echo
#

View File

@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "0.23.0.dev0"
__version__ = "0.22.0.dev0"
spack_version = __version__

View File

@@ -254,8 +254,8 @@ def _search_duplicate_specs_in_externals(error_cls):
@config_packages
def _deprecated_preferences(error_cls):
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.22)"""
# TODO (v0.22): remove this audit as the attributes will not be allowed in config
errors = []
packages_yaml = spack.config.CONFIG.get_config("packages")
@@ -351,22 +351,6 @@ def _wrongly_named_spec(error_cls):
return errors
@config_packages
def _ensure_all_virtual_packages_have_default_providers(error_cls):
"""All virtual packages must have a default provider explicitly set."""
configuration = spack.config.create()
defaults = configuration.get("packages", scope="defaults")
default_providers = defaults["all"]["providers"]
virtuals = spack.repo.PATH.provider_index.providers
default_providers_filename = configuration.scopes["defaults"].get_section_filename("packages")
return [
error_cls(f"'{virtual}' must have a default provider in {default_providers_filename}", [])
for virtual in virtuals
if virtual not in default_providers
]
def _make_config_error(config_data, summary, error_cls):
s = io.StringIO()
s.write("Occurring in the following file:\n")
@@ -437,10 +421,6 @@ def _check_patch_urls(pkgs, error_cls):
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
)
github_pull_commits_re = (
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
r".+/.+/pull/\d+/commits/[a-fA-F0-9]+\.(?:patch|diff)"
)
# Only .diff URLs have stable/full hashes:
# https://forum.gitlab.com/t/patches-with-full-index/29313
gitlab_patch_url_re = (
@@ -456,24 +436,14 @@ def _check_patch_urls(pkgs, error_cls):
if not isinstance(patch, spack.patch.UrlPatch):
continue
if re.match(github_pull_commits_re, patch.url):
url = re.sub(r"/pull/\d+/commits/", r"/commit/", patch.url)
url = re.sub(r"^(.*)(?<!full_index=1)$", r"\1?full_index=1", url)
errors.append(
error_cls(
f"patch URL in package {pkg_cls.name} "
+ "must not be a pull request commit; "
+ f"instead use {url}",
[patch.url],
)
)
elif re.match(github_patch_url_re, patch.url):
if re.match(github_patch_url_re, patch.url):
full_index_arg = "?full_index=1"
if not patch.url.endswith(full_index_arg):
errors.append(
error_cls(
f"patch URL in package {pkg_cls.name} "
+ f"must end with {full_index_arg}",
"patch URL in package {0} must end with {1}".format(
pkg_cls.name, full_index_arg
),
[patch.url],
)
)
@@ -481,7 +451,9 @@ def _check_patch_urls(pkgs, error_cls):
if not patch.url.endswith(".diff"):
errors.append(
error_cls(
f"patch URL in package {pkg_cls.name} must end with .diff",
"patch URL in package {0} must end with .diff".format(
pkg_cls.name
),
[patch.url],
)
)
@@ -807,7 +779,7 @@ def check_virtual_with_variants(spec, msg):
return
error = error_cls(
f"{pkg_name}: {msg}",
[f"remove variants from '{spec}' in depends_on directive in {filename}"],
f"remove variants from '{spec}' in depends_on directive in {filename}",
)
errors.append(error)

View File

@@ -29,7 +29,6 @@
import llnl.util.lang
import llnl.util.tty as tty
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
from llnl.util.symlink import readlink
import spack.caches
import spack.cmd
@@ -659,7 +658,7 @@ def get_buildfile_manifest(spec):
# 2. paths are used as strings.
for rel_path in visitor.symlinks:
abs_path = os.path.join(root, rel_path)
link = readlink(abs_path)
link = os.readlink(abs_path)
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
data["link_to_relocate"].append(rel_path)
@@ -2002,7 +2001,6 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
with spack.util.path.filter_padding():
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
extract_tarball(spec, download_result, force)
spec.package.windows_establish_runtime_linkage()
spack.hooks.post_install(spec, False)
spack.store.STORE.db.add(spec, spack.store.STORE.layout)

View File

@@ -5,13 +5,7 @@
"""Function and classes needed to bootstrap Spack itself."""
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
from .core import (
all_core_root_specs,
ensure_clingo_importable_or_raise,
ensure_core_dependencies,
ensure_gpg_in_path_or_raise,
ensure_patchelf_in_path_or_raise,
)
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
from .environment import BootstrapEnvironment, ensure_environment_dependencies
from .status import status_message
@@ -19,8 +13,6 @@
"is_bootstrapping",
"ensure_bootstrap_configuration",
"ensure_core_dependencies",
"ensure_gpg_in_path_or_raise",
"ensure_clingo_importable_or_raise",
"ensure_patchelf_in_path_or_raise",
"all_core_root_specs",
"ensure_environment_dependencies",

View File

@@ -54,14 +54,10 @@ def _try_import_from_store(
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
for candidate_spec in installed_specs:
# previously bootstrapped specs may not have a python-venv dependency.
if candidate_spec.dependencies("python-venv"):
python, *_ = candidate_spec.dependencies("python-venv")
else:
python, *_ = candidate_spec.dependencies("python")
pkg = candidate_spec["python"].package
module_paths = [
os.path.join(candidate_spec.prefix, python.package.purelib),
os.path.join(candidate_spec.prefix, python.package.platlib),
os.path.join(candidate_spec.prefix, pkg.purelib),
os.path.join(candidate_spec.prefix, pkg.platlib),
]
path_before = list(sys.path)
@@ -213,18 +209,15 @@ def _root_spec(spec_str: str) -> str:
Args:
spec_str: spec to be bootstrapped. Must be without compiler and target.
"""
# Add a compiler and platform requirement to the root spec.
# Add a compiler requirement to the root spec.
platform = str(spack.platforms.host())
if platform == "darwin":
spec_str += " %apple-clang"
elif platform == "windows":
spec_str += " %msvc"
elif platform == "linux":
spec_str += " %gcc"
elif platform == "freebsd":
spec_str += " %clang"
spec_str += f" platform={platform}"
target = archspec.cpu.host().family
spec_str += f" target={target}"

View File

@@ -129,10 +129,10 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
for name, path in configuration_paths:
platform = spack.platforms.host().name
platform_scope = spack.config.DirectoryConfigScope(
f"{name}/{platform}", os.path.join(path, platform)
platform_scope = spack.config.ConfigScope(
"/".join([name, platform]), os.path.join(path, platform)
)
generic_scope = spack.config.DirectoryConfigScope(name, path)
generic_scope = spack.config.ConfigScope(name, path)
config_scopes.extend([generic_scope, platform_scope])
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
tty.debug(msg.format(generic_scope.name, generic_scope.path))

View File

@@ -270,6 +270,10 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
with spack_python_interpreter():
# Add hint to use frontend operating system on Cray
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
# This is needed to help the old concretizer taking the `setuptools` dependency
# only when bootstrapping from sources on Python 3.12
if spec_for_current_python() == "python@3.12":
concrete_spec.constrain("+force_setuptools")
if module == "clingo":
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme

View File

@@ -3,11 +3,13 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Bootstrap non-core Spack dependencies from an environment."""
import glob
import hashlib
import os
import pathlib
import sys
from typing import Iterable, List
import warnings
from typing import List
import archspec.cpu
@@ -26,16 +28,6 @@
class BootstrapEnvironment(spack.environment.Environment):
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
def __init__(self) -> None:
if not self.spack_yaml().exists():
self._write_spack_yaml_file()
super().__init__(self.environment_root())
# Remove python package roots created before python-venv was introduced
for s in self.concrete_roots():
if "python" in s.package.extendees and not s.dependencies("python-venv"):
self.deconcretize(s)
@classmethod
def spack_dev_requirements(cls) -> List[str]:
"""Spack development requirements"""
@@ -67,19 +59,31 @@ def view_root(cls) -> pathlib.Path:
return cls.environment_root().joinpath("view")
@classmethod
def bin_dir(cls) -> pathlib.Path:
"""Paths to be added to PATH"""
return cls.view_root().joinpath("bin")
def pythonpaths(cls) -> List[str]:
"""Paths to be added to sys.path or PYTHONPATH"""
python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}"
glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**"))
result = glob.glob(glob_expr)
if not result:
msg = f"Cannot find any Python path in {cls.view_root()}"
warnings.warn(msg)
return result
def python_dirs(self) -> Iterable[pathlib.Path]:
python = next(s for s in self.all_specs_generator() if s.name == "python-venv").package
return {self.view_root().joinpath(p) for p in (python.platlib, python.purelib)}
@classmethod
def bin_dirs(cls) -> List[pathlib.Path]:
"""Paths to be added to PATH"""
return [cls.view_root().joinpath("bin")]
@classmethod
def spack_yaml(cls) -> pathlib.Path:
"""Environment spack.yaml file"""
return cls.environment_root().joinpath("spack.yaml")
def __init__(self) -> None:
if not self.spack_yaml().exists():
self._write_spack_yaml_file()
super().__init__(self.environment_root())
def update_installations(self) -> None:
"""Update the installations of this environment."""
log_enabled = tty.is_debug() or tty.is_verbose()
@@ -96,13 +100,21 @@ def update_installations(self) -> None:
self.install_all()
self.write(regenerate=True)
def load(self) -> None:
"""Update PATH and sys.path."""
# Make executables available (shouldn't need PYTHONPATH)
os.environ["PATH"] = f"{self.bin_dir()}{os.pathsep}{os.environ.get('PATH', '')}"
# Spack itself imports pytest
sys.path.extend(str(p) for p in self.python_dirs())
def update_syspath_and_environ(self) -> None:
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
the environment view.
"""
# Do minimal modifications to sys.path and environment variables. In particular, pay
# attention to have the smallest PYTHONPATH / sys.path possible, since that may impact
# the performance of the current interpreter
sys.path.extend(self.pythonpaths())
os.environ["PATH"] = os.pathsep.join(
[str(x) for x in self.bin_dirs()] + os.environ.get("PATH", "").split(os.pathsep)
)
os.environ["PYTHONPATH"] = os.pathsep.join(
os.environ.get("PYTHONPATH", "").split(os.pathsep)
+ [str(x) for x in self.pythonpaths()]
)
def _write_spack_yaml_file(self) -> None:
tty.msg(
@@ -152,4 +164,4 @@ def ensure_environment_dependencies() -> None:
_add_externals_if_missing()
with BootstrapEnvironment() as env:
env.update_installations()
env.load()
env.update_syspath_and_environ()

View File

@@ -43,7 +43,7 @@
from collections import defaultdict
from enum import Flag, auto
from itertools import chain
from typing import Dict, List, Set, Tuple
from typing import List, Set, Tuple
import llnl.util.tty as tty
from llnl.string import plural
@@ -72,7 +72,6 @@
import spack.store
import spack.subprocess_context
import spack.user_environment
import spack.util.executable
import spack.util.path
import spack.util.pattern
from spack import traverse
@@ -92,7 +91,7 @@
)
from spack.util.executable import Executable
from spack.util.log_parse import make_log_context, parse_log_events
from spack.util.module_cmd import load_module, path_from_modules
from spack.util.module_cmd import load_module, module, path_from_modules
#
# This can be set by the user to globally disable parallel builds.
@@ -191,6 +190,14 @@ def __call__(self, *args, **kwargs):
return super().__call__(*args, **kwargs)
def _on_cray():
host_platform = spack.platforms.host()
host_os = host_platform.operating_system("default_os")
on_cray = str(host_platform) == "cray"
using_cnl = re.match(r"cnl\d+", str(host_os))
return on_cray, using_cnl
def clean_environment():
# Stuff in here sanitizes the build environment to eliminate
# anything the user has set that may interfere. We apply it immediately
@@ -234,6 +241,17 @@ def clean_environment():
if varname.endswith("_ROOT") and varname != "SPACK_ROOT":
env.unset(varname)
# On Cray "cluster" systems, unset CRAY_LD_LIBRARY_PATH to avoid
# interference with Spack dependencies.
# CNL requires these variables to be set (or at least some of them,
# depending on the CNL version).
on_cray, using_cnl = _on_cray()
if on_cray and not using_cnl:
env.unset("CRAY_LD_LIBRARY_PATH")
for varname in os.environ.keys():
if "PKGCONF" in varname:
env.unset(varname)
# Unset the following variables because they can affect installation of
# Autotools and CMake packages.
build_system_vars = [
@@ -363,7 +381,11 @@ def set_compiler_environment_variables(pkg, env):
_add_werror_handling(keep_werror, env)
# Set the target parameters that the compiler will add
isa_arg = spec.architecture.target.optimization_flags(compiler)
# Don't set on cray platform because the targeting module handles this
if spec.satisfies("platform=cray"):
isa_arg = ""
else:
isa_arg = spec.architecture.target.optimization_flags(compiler)
env.set("SPACK_TARGET_ARGS", isa_arg)
# Trap spack-tracked compiler flags as appropriate.
@@ -459,7 +481,10 @@ def set_wrapper_variables(pkg, env):
# Find ccache binary and hand it to build environment
if spack.config.get("config:ccache"):
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
ccache = Executable("ccache")
if not ccache:
raise RuntimeError("No ccache binary found in PATH")
env.set(SPACK_CCACHE_BINARY, ccache)
# Gather information about various types of dependencies
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
@@ -705,28 +730,12 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwa
return compiler(*compiler_args, output=compiler_output)
def _get_rpath_deps_from_spec(
spec: spack.spec.Spec, transitive_rpaths: bool
) -> List[spack.spec.Spec]:
if not transitive_rpaths:
return spec.dependencies(deptype=dt.LINK)
by_name: Dict[str, spack.spec.Spec] = {}
for dep in spec.traverse(root=False, deptype=dt.LINK):
lookup = by_name.get(dep.name)
if lookup is None:
by_name[dep.name] = dep
elif lookup.version < dep.version:
by_name[dep.name] = dep
return list(by_name.values())
def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]:
"""Return immediate or transitive dependencies (depending on the package) that need to be
rpath'ed. If a package occurs multiple times, the newest version is kept."""
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
def get_rpath_deps(pkg):
"""Return immediate or transitive RPATHs depending on the package."""
if pkg.transitive_rpaths:
return [d for d in pkg.spec.traverse(root=False, deptype=("link"))]
else:
return pkg.spec.dependencies(deptype="link")
def get_rpaths(pkg):
@@ -738,9 +747,7 @@ def get_rpaths(pkg):
# Second module is our compiler mod name. We use that to get rpaths from
# module show output.
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
mod_rpath = path_from_modules([pkg.compiler.modules[1]])
if mod_rpath:
rpaths.append(mod_rpath)
rpaths.append(path_from_modules([pkg.compiler.modules[1]]))
return list(dedupe(filter_system_paths(rpaths)))
@@ -810,6 +817,14 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
for mod in pkg.compiler.modules:
load_module(mod)
# kludge to handle cray mpich and libsci being automatically loaded by
# PrgEnv modules on cray platform. Module unload does no damage when
# unnecessary
on_cray, _ = _on_cray()
if on_cray and not dirty:
for mod in ["cray-mpich", "cray-libsci"]:
module("unload", mod)
if target and target.module_name:
load_module(target.module_name)
@@ -1473,7 +1488,7 @@ def long_message(self):
out.write(" {0}\n".format(self.log_name))
# Also output the test log path IF it exists
if self.context != "test" and have_log:
if self.context != "test":
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
if os.path.isfile(test_log):
out.write("\nSee test log for details:\n")

View File

@@ -162,9 +162,7 @@ def initconfig_compiler_entries(self):
ld_flags = " ".join(flags["ldflags"])
ld_format_string = "CMAKE_{0}_LINKER_FLAGS"
# CMake has separate linker arguments for types of builds.
# 'ldflags' should not be used with CMAKE_STATIC_LINKER_FLAGS which
# is used by the archiver, so don't include "STATIC" in this loop:
for ld_type in ["EXE", "MODULE", "SHARED"]:
for ld_type in ["EXE", "MODULE", "SHARED", "STATIC"]:
ld_string = ld_format_string.format(ld_type)
entries.append(cmake_cache_string(ld_string, ld_flags))

View File

@@ -39,11 +39,16 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
``find_python_hints`` for context."""
if not getattr(pkg, "find_python_hints", False) or not pkg.spec.dependencies(
"python", dt.BUILD | dt.LINK
):
if not getattr(pkg, "find_python_hints", False):
return
python_executable = pkg.spec["python"].command.path
pythons = pkg.spec.dependencies("python", dt.BUILD | dt.LINK)
if len(pythons) != 1:
return
try:
python_executable = pythons[0].package.command.path
except RuntimeError:
return
args.extend(
[
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),

View File

@@ -1,144 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import itertools
import os
import pathlib
import re
import sys
from typing import Dict, List, Sequence, Tuple, Union
import llnl.util.tty as tty
from llnl.util.lang import classproperty
import spack.compiler
import spack.package_base
# Local "type" for type hints
Path = Union[str, pathlib.Path]
class CompilerPackage(spack.package_base.PackageBase):
"""A Package mixin for all common logic for packages that implement compilers"""
# TODO: how do these play nicely with other tags
tags: Sequence[str] = ["compiler"]
#: Optional suffix regexes for searching for this type of compiler.
#: Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
#: version suffix for gcc.
compiler_suffixes: List[str] = [r"-.*"]
#: Optional prefix regexes for searching for this compiler
compiler_prefixes: List[str] = []
#: Compiler argument(s) that produces version information
#: If multiple arguments, the earlier arguments must produce errors when invalid
compiler_version_argument: Union[str, Tuple[str]] = "-dumpversion"
#: Regex used to extract version from compiler's output
compiler_version_regex: str = "(.*)"
#: Static definition of languages supported by this class
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
def __init__(self, spec: "spack.spec.Spec"):
super().__init__(spec)
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
msg += f" supports: {self.supported_languages}, valid values: {self.compiler_languages}"
assert set(self.supported_languages) <= set(self.compiler_languages), msg
@property
def supported_languages(self) -> Sequence[str]:
"""Dynamic definition of languages supported by this package"""
return self.compiler_languages
@classproperty
def compiler_names(cls) -> Sequence[str]:
"""Construct list of compiler names from per-language names"""
names = []
for language in cls.compiler_languages:
names.extend(getattr(cls, f"{language}_names"))
return names
@classproperty
def executables(cls) -> Sequence[str]:
"""Construct executables for external detection from names, prefixes, and suffixes."""
regexp_fmt = r"^({0}){1}({2})$"
prefixes = [""] + cls.compiler_prefixes
suffixes = [""] + cls.compiler_suffixes
if sys.platform == "win32":
ext = r"\.(?:exe|bat)"
suffixes += [suf + ext for suf in suffixes]
return [
regexp_fmt.format(prefix, re.escape(name), suffix)
for prefix, name, suffix in itertools.product(prefixes, cls.compiler_names, suffixes)
]
@classmethod
def determine_version(cls, exe: Path):
version_argument = cls.compiler_version_argument
if isinstance(version_argument, str):
version_argument = (version_argument,)
for va in version_argument:
try:
output = spack.compiler.get_compiler_version_output(exe, va)
match = re.search(cls.compiler_version_regex, output)
if match:
return ".".join(match.groups())
except spack.util.executable.ProcessError:
pass
except Exception as e:
tty.debug(
f"[{__file__}] Cannot detect a valid version for the executable "
f"{str(exe)}, for package '{cls.name}': {e}"
)
@classmethod
def compiler_bindir(cls, prefix: Path) -> Path:
"""Overridable method for the location of the compiler bindir within the preifx"""
return os.path.join(prefix, "bin")
@classmethod
def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
"""Compute the paths to compiler executables associated with this package
This is a helper method for ``determine_variants`` to compute the ``extra_attributes``
to include with each spec object."""
# There are often at least two copies (not symlinks) of each compiler executable in the
# same directory: one with a canonical name, e.g. "gfortran", and another one with the
# target prefix, e.g. "x86_64-pc-linux-gnu-gfortran". There also might be a copy of "gcc"
# with the version suffix, e.g. "x86_64-pc-linux-gnu-gcc-6.3.0". To ensure the consistency
# of values in the "paths" dictionary (i.e. we prefer all of them to reference copies
# with canonical names if possible), we iterate over the executables in the reversed sorted
# order:
# First pass over languages identifies exes that are perfect matches for canonical names
# Second pass checks for names with prefix/suffix
# Second pass is sorted by language name length because longer named languages
# e.g. cxx can often contain the names of shorter named languages
# e.g. c (e.g. clang/clang++)
paths = {}
exes = sorted(exes, reverse=True)
languages = {
lang: getattr(cls, f"{lang}_names")
for lang in sorted(cls.compiler_languages, key=len, reverse=True)
}
for exe in exes:
for lang, names in languages.items():
if os.path.basename(exe) in names:
paths[lang] = exe
break
else:
for lang, names in languages.items():
if any(name in os.path.basename(exe) for name in names):
paths[lang] = exe
break
return paths
@classmethod
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
# path determination is separated so it can be reused in subclasses
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}

View File

@@ -110,8 +110,9 @@ def cuda_flags(arch_list):
# From the NVIDIA install guide we know of conflicts for particular
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
# platform=linux, since they may apply to platform=darwin. We currently
# do not provide conflicts for platform=darwin with %apple-clang.
# platform=linux, since they should also apply to platform=cray, and may
# apply to platform=darwin. We currently do not provide conflicts for
# platform=darwin with %apple-clang.
# Linux x86_64 compiler conflicts from here:
# https://gist.github.com/ax3l/9489132
@@ -124,8 +125,6 @@ def cuda_flags(arch_list):
# minimum supported versions
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
conflicts("%gcc@:7.2", when="+cuda ^cuda@12.4:")
conflicts("%clang@:6", when="+cuda ^cuda@12.2:")
# maximum supported version
# NOTE:
@@ -138,14 +137,11 @@ def cuda_flags(arch_list):
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.5")
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
conflicts("%clang@16:", when="+cuda ^cuda@:12.3")
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
@@ -213,16 +209,12 @@ def cuda_flags(arch_list):
conflicts("%intel@19.0:", when="+cuda ^cuda@:10.0")
conflicts("%intel@19.1:", when="+cuda ^cuda@:10.1")
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
conflicts("%intel@2021:", when="+cuda ^cuda@:11.4.0")
# XL is mostly relevant for ppc64le Linux
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
conflicts("%xl@:12,17:", when="+cuda ^cuda@:11.1.0")
# PowerPC.
conflicts("target=ppc64le", when="+cuda ^cuda@12.5:")
# Darwin.
# TODO: add missing conflicts for %apple-clang cuda@:10
conflicts("platform=darwin", when="+cuda ^cuda@11.0.2:")
conflicts("platform=darwin", when="+cuda ^cuda@11.0.2: ")

View File

@@ -72,7 +72,7 @@ def build_directory(self):
def build_args(self):
"""Arguments for ``go build``."""
# Pass ldflags -s = --strip-all and -w = --no-warnings by default
return ["-modcacherw", "-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
return ["-ldflags", "-s -w", "-o", f"{self.pkg.name}"]
@property
def check_args(self):

View File

@@ -846,7 +846,6 @@ def scalapack_libs(self):
"^mpich@2:" in spec_root
or "^cray-mpich" in spec_root
or "^mvapich2" in spec_root
or "^mvapich" in spec_root
or "^intel-mpi" in spec_root
or "^intel-oneapi-mpi" in spec_root
or "^intel-parallel-studio" in spec_root
@@ -937,15 +936,32 @@ def mpi_setup_dependent_build_environment(self, env, dependent_spec, compilers_o
"I_MPI_ROOT": self.normalize_path("mpi"),
}
compiler_wrapper_commands = self.mpi_compiler_wrappers
wrapper_vars.update(
{
"MPICC": compiler_wrapper_commands["MPICC"],
"MPICXX": compiler_wrapper_commands["MPICXX"],
"MPIF77": compiler_wrapper_commands["MPIF77"],
"MPIF90": compiler_wrapper_commands["MPIF90"],
}
)
# CAUTION - SIMILAR code in:
# var/spack/repos/builtin/packages/mpich/package.py
# var/spack/repos/builtin/packages/openmpi/package.py
# var/spack/repos/builtin/packages/mvapich2/package.py
#
# On Cray, the regular compiler wrappers *are* the MPI wrappers.
if "platform=cray" in self.spec:
# TODO: Confirm
wrapper_vars.update(
{
"MPICC": compilers_of_client["CC"],
"MPICXX": compilers_of_client["CXX"],
"MPIF77": compilers_of_client["F77"],
"MPIF90": compilers_of_client["F90"],
}
)
else:
compiler_wrapper_commands = self.mpi_compiler_wrappers
wrapper_vars.update(
{
"MPICC": compiler_wrapper_commands["MPICC"],
"MPICXX": compiler_wrapper_commands["MPICXX"],
"MPIF77": compiler_wrapper_commands["MPIF77"],
"MPIF90": compiler_wrapper_commands["MPIF90"],
}
)
# Ensure that the directory containing the compiler wrappers is in the
# PATH. Spack packages add `prefix.bin` to their dependents' paths,

View File

@@ -24,6 +24,7 @@ class MSBuildPackage(spack.package_base.PackageBase):
build_system("msbuild")
conflicts("platform=linux", when="build_system=msbuild")
conflicts("platform=darwin", when="build_system=msbuild")
conflicts("platform=cray", when="build_system=msbuild")
@spack.builder.builder("msbuild")

View File

@@ -24,6 +24,7 @@ class NMakePackage(spack.package_base.PackageBase):
build_system("nmake")
conflicts("platform=linux", when="build_system=nmake")
conflicts("platform=darwin", when="build_system=nmake")
conflicts("platform=cray", when="build_system=nmake")
@spack.builder.builder("nmake")
@@ -144,7 +145,7 @@ def install(self, pkg, spec, prefix):
opts += self.nmake_install_args()
if self.makefile_name:
opts.append("/F{}".format(self.makefile_name))
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
opts.append(self.define("PREFIX", prefix))
with fs.working_dir(self.build_directory):
inspect.getmodule(self.pkg).nmake(
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes

View File

@@ -36,8 +36,9 @@ class IntelOneApiPackage(Package):
"target=ppc64:",
"target=ppc64le:",
"target=aarch64:",
"platform=darwin",
"platform=windows",
"platform=darwin:",
"platform=cray:",
"platform=windows:",
]:
conflicts(c, msg="This package in only available for x86_64 and Linux")

View File

@@ -138,21 +138,16 @@ def view_file_conflicts(self, view, merge_map):
return conflicts
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
# Patch up shebangs if the package extends Python and we put a Python interpreter in the
# view.
if not self.extendee_spec:
return super().add_files_to_view(view, merge_map, skip_if_exists)
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
if python.external:
# Patch up shebangs to the python linked in the view only if python is built by Spack.
if not self.extendee_spec or self.extendee_spec.external:
return super().add_files_to_view(view, merge_map, skip_if_exists)
# We only patch shebangs in the bin directory.
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
bin_dir = self.spec.prefix.bin
bin_dir = self.spec.prefix.bin
python_prefix = self.extendee_spec.prefix
for src, dst in merge_map.items():
if skip_if_exists and os.path.lexists(dst):
continue
@@ -173,7 +168,7 @@ def add_files_to_view(self, view, merge_map, skip_if_exists=True):
copied_files[(s.st_dev, s.st_ino)] = dst
shutil.copy2(src, dst)
fs.filter_file(
python.prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
python_prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
)
else:
view.link(src, dst)
@@ -204,13 +199,14 @@ def remove_files_from_view(self, view, merge_map):
ignore_namespace = True
bin_dir = self.spec.prefix.bin
global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec)
to_remove = []
for src, dst in merge_map.items():
if ignore_namespace and namespace_init(dst):
continue
if not fs.path_contains_subdirectory(src, bin_dir):
if global_view or not fs.path_contains_subdirectory(src, bin_dir):
to_remove.append(dst)
else:
os.remove(dst)
@@ -366,12 +362,6 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
return f"https://pypi.org/simple/{name}/"
return None
@property
def python_spec(self):
"""Get python-venv if it exists or python otherwise."""
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
return python
@property
def headers(self) -> HeaderList:
"""Discover header files in platlib."""
@@ -381,9 +371,8 @@ def headers(self) -> HeaderList:
# Headers should only be in include or platlib, but no harm in checking purelib too
include = self.prefix.join(self.spec["python"].package.include).join(name)
python = self.python_spec
platlib = self.prefix.join(python.package.platlib).join(name)
purelib = self.prefix.join(python.package.purelib).join(name)
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
headers_list = map(fs.find_all_headers, [include, platlib, purelib])
headers = functools.reduce(operator.add, headers_list)
@@ -402,9 +391,8 @@ def libs(self) -> LibraryList:
name = self.spec.name[3:]
# Libraries should only be in platlib, but no harm in checking purelib too
python = self.python_spec
platlib = self.prefix.join(python.package.platlib).join(name)
purelib = self.prefix.join(python.package.purelib).join(name)
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
libs_list = map(find_all_libraries, [platlib, purelib])
@@ -516,8 +504,6 @@ def global_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
"""Install everything from build directory."""
pip = spec["python"].command
pip.add_default_arg("-m", "pip")
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
@@ -533,6 +519,14 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
else:
args.append(".")
pip = spec["python"].command
# Hide user packages, since we don't have build isolation. This is
# necessary because pip / setuptools may run hooks from arbitrary
# packages during the build. There is no equivalent variable to hide
# system packages, so this is not reliable for external Python.
pip.add_default_env("PYTHONNOUSERSITE", "1")
pip.add_default_arg("-m")
pip.add_default_arg("pip")
with fs.working_dir(self.build_directory):
pip(*args)

View File

@@ -139,10 +139,6 @@ def configure(self, pkg, spec, prefix):
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
args.extend(self.configure_args())
# https://github.com/Python-SIP/sip/commit/cb0be6cb6e9b756b8b0db3136efb014f6fb9b766
if spec["py-sip"].satisfies("@6.1.0:"):
args.extend(["--scripts-dir", pkg.prefix.bin])
sip_build = Executable(spec["py-sip"].prefix.bin.join("sip-build"))
sip_build(*args)

View File

@@ -34,8 +34,6 @@ def _misc_cache():
return spack.util.file_cache.FileCache(path)
FileCacheType = Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton]
#: Spack's cache for small data
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
llnl.util.lang.Singleton(_misc_cache)

View File

@@ -22,8 +22,6 @@
from urllib.parse import urlencode
from urllib.request import HTTPHandler, Request, build_opener
import ruamel.yaml
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.lang import memoized
@@ -46,7 +44,6 @@
from spack import traverse
from spack.error import SpackError
from spack.reporters import CDash, CDashConfiguration
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
from spack.reporters.cdash import build_stamp as cdash_build_stamp
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
@@ -71,7 +68,7 @@
# TODO: Remove this in Spack 0.23
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
JOB_NAME_FORMAT = (
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{ arch=architecture}"
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{arch=architecture}"
)
IS_WINDOWS = sys.platform == "win32"
spack_gpg = spack.main.SpackCommand("gpg")
@@ -553,9 +550,10 @@ def generate_gitlab_ci_yaml(
env,
print_summary,
output_file,
*,
prune_dag=False,
check_index_only=False,
run_optimizer=False,
use_dependencies=False,
artifacts_root=None,
remote_mirror_override=None,
):
@@ -576,6 +574,12 @@ def generate_gitlab_ci_yaml(
this mode results in faster yaml generation time). Otherwise, also
check each spec directly by url (useful if there is no index or it
might be out of date).
run_optimizer (bool): If True, post-process the generated yaml to try
try to reduce the size (attempts to collect repeated configuration
and replace with definitions).)
use_dependencies (bool): If true, use "dependencies" rather than "needs"
("needs" allows DAG scheduling). Useful if gitlab instance cannot
be configured to handle more than a few "needs" per job.
artifacts_root (str): Path where artifacts like logs, environment
files (spack.yaml, spack.lock), etc should be written. GitLab
requires this to be within the project directory.
@@ -679,22 +683,6 @@ def generate_gitlab_ci_yaml(
"instead.",
)
def ensure_expected_target_path(path):
"""Returns passed paths with all Windows path separators exchanged
for posix separators only if copy_only_pipeline is enabled
This is required as copy_only_pipelines are a unique scenario where
the generate job and child pipelines are run on different platforms.
To make this compatible w/ Windows, we cannot write Windows style path separators
that will be consumed on by the Posix copy job runner.
TODO (johnwparent): Refactor config + cli read/write to deal only in posix
style paths
"""
if copy_only_pipeline and path:
path = path.replace("\\", "/")
return path
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
deprecated_mirror_config = False
buildcache_destination = None
@@ -809,8 +797,7 @@ def ensure_expected_target_path(path):
cli_scopes = [
os.path.relpath(s.path, concrete_env_dir)
for s in cfg.scopes().values()
if not s.writable
and isinstance(s, (cfg.DirectoryConfigScope))
if isinstance(s, cfg.ImmutableConfigScope)
and s.path not in env_includes
and os.path.exists(s.path)
]
@@ -819,7 +806,7 @@ def ensure_expected_target_path(path):
if scope not in include_scopes and scope not in env_includes:
include_scopes.insert(0, scope)
env_includes.extend(include_scopes)
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
env_yaml_root["spack"]["include"] = env_includes
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
@@ -1240,9 +1227,6 @@ def main_script_replacements(cmd):
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
}
output_vars = output_object["variables"]
for item, val in output_vars.items():
output_vars[item] = ensure_expected_target_path(val)
# TODO: Remove this block in Spack 0.23
if deprecated_mirror_config and remote_mirror_override:
@@ -1267,6 +1251,17 @@ def main_script_replacements(cmd):
with open(copy_specs_file, "w") as fd:
fd.write(json.dumps(buildcache_copies))
# TODO(opadron): remove this or refactor
if run_optimizer:
import spack.ci_optimization as ci_opt
output_object = ci_opt.optimizer(output_object)
# TODO(opadron): remove this or refactor
if use_dependencies:
import spack.ci_needs_workaround as cinw
output_object = cinw.needs_to_dependencies(output_object)
else:
# No jobs were generated
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
@@ -1288,6 +1283,7 @@ def main_script_replacements(cmd):
sorted_output = {}
for output_key, output_value in sorted(output_object.items()):
sorted_output[output_key] = output_value
if known_broken_specs_encountered:
tty.error("This pipeline generated hashes known to be broken on develop:")
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
@@ -1295,11 +1291,8 @@ def main_script_replacements(cmd):
if not rebuild_everything:
sys.exit(1)
# Minimize yaml output size through use of anchors
syaml.anchorify(sorted_output)
with open(output_file, "w") as f:
ruamel.yaml.YAML().dump(sorted_output, f)
with open(output_file, "w") as outf:
outf.write(syaml.dump(sorted_output, default_flow_style=True))
def _url_encode_string(input_string):
@@ -1485,12 +1478,6 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
def win_quote(quote_str: str) -> str:
if IS_WINDOWS:
quote_str = f'"{quote_str}"'
return quote_str
def download_and_extract_artifacts(url, work_dir):
"""Look for gitlab artifacts.zip at the given url, and attempt to download
and extract the contents into the given work_dir
@@ -1513,7 +1500,7 @@ def download_and_extract_artifacts(url, work_dir):
request = Request(url, headers=headers)
request.get_method = lambda: "GET"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response = opener.open(request)
response_code = response.getcode()
if response_code != 200:
@@ -1955,9 +1942,9 @@ def compose_command_err_handling(args):
# but we need to handle EXEs (git, etc) ourselves
catch_exe_failure = (
"""
if ($LASTEXITCODE -ne 0){{
throw 'Command {} has failed'
}}
if ($LASTEXITCODE -ne 0){
throw "Command {} has failed"
}
"""
if IS_WINDOWS
else ""
@@ -2189,13 +2176,13 @@ def __init__(self, ci_cdash):
def args(self):
return [
"--cdash-upload-url",
win_quote(self.upload_url),
self.upload_url,
"--cdash-build",
win_quote(self.build_name),
self.build_name,
"--cdash-site",
win_quote(self.site),
self.site,
"--cdash-buildstamp",
win_quote(self.build_stamp),
self.build_stamp,
]
@property # type: ignore
@@ -2261,7 +2248,7 @@ def create_buildgroup(self, opener, headers, url, group_name, group_type):
request = Request(url, data=enc_data, headers=headers)
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response = opener.open(request)
response_code = response.getcode()
if response_code not in [200, 201]:
@@ -2307,7 +2294,7 @@ def populate_buildgroup(self, job_names):
request = Request(url, data=enc_data, headers=headers)
request.get_method = lambda: "PUT"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response = opener.open(request)
response_code = response.getcode()
if response_code != 200:

View File

@@ -0,0 +1,34 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections.abc
get_job_name = lambda needs_entry: (
needs_entry.get("job")
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
else needs_entry if isinstance(needs_entry, str) else None
)
def convert_job(job_entry):
if not isinstance(job_entry, collections.abc.Mapping):
return job_entry
needs = job_entry.get("needs")
if needs is None:
return job_entry
new_job = {}
new_job.update(job_entry)
del new_job["needs"]
new_job["dependencies"] = list(
filter((lambda x: x is not None), (get_job_name(needs_entry) for needs_entry in needs))
)
return new_job
def needs_to_dependencies(yaml):
return dict((k, convert_job(v)) for k, v in yaml.items())

View File

@@ -0,0 +1,363 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import collections.abc
import copy
import hashlib
import spack.util.spack_yaml as syaml
def sort_yaml_obj(obj):
if isinstance(obj, collections.abc.Mapping):
return syaml.syaml_dict(
(k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0])))
)
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
return obj
def matches(obj, proto):
"""Returns True if the test object "obj" matches the prototype object
"proto".
If obj and proto are mappings, obj matches proto if (key in obj) and
(obj[key] matches proto[key]) for every key in proto.
If obj and proto are sequences, obj matches proto if they are of the same
length and (a matches b) for every (a,b) in zip(obj, proto).
Otherwise, obj matches proto if obj == proto.
Precondition: proto must not have any reference cycles
"""
if isinstance(obj, collections.abc.Mapping):
if not isinstance(proto, collections.abc.Mapping):
return False
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
return False
if len(obj) != len(proto):
return False
return all(matches(obj[index], val) for index, val in enumerate(proto))
return obj == proto
def subkeys(obj, proto):
"""Returns the test mapping "obj" after factoring out the items it has in
common with the prototype mapping "proto".
Consider a recursive merge operation, merge(a, b) on mappings a and b, that
returns a mapping, m, whose keys are the union of the keys of a and b, and
for every such key, "k", its corresponding value is:
- merge(a[key], b[key]) if a[key] and b[key] are mappings, or
- b[key] if (key in b) and not matches(a[key], b[key]),
or
- a[key] otherwise
If obj and proto are mappings, the returned object is the smallest object,
"a", such that merge(a, proto) matches obj.
Otherwise, obj is returned.
"""
if not (
isinstance(obj, collections.abc.Mapping) and isinstance(proto, collections.abc.Mapping)
):
return obj
new_obj = {}
for key, value in obj.items():
if key not in proto:
new_obj[key] = value
continue
if matches(value, proto[key]) and matches(proto[key], value):
continue
if isinstance(value, collections.abc.Mapping):
new_obj[key] = subkeys(value, proto[key])
continue
new_obj[key] = value
return new_obj
def add_extends(yaml, key):
"""Modifies the given object "yaml" so that it includes an "extends" key
whose value features "key".
If "extends" is not in yaml, then yaml is modified such that
yaml["extends"] == key.
If yaml["extends"] is a str, then yaml is modified such that
yaml["extends"] == [yaml["extends"], key]
If yaml["extends"] is a list that does not include key, then key is
appended to the list.
Otherwise, yaml is left unchanged.
"""
has_key = "extends" in yaml
extends = yaml.get("extends")
if has_key and not isinstance(extends, (str, collections.abc.Sequence)):
return
if extends is None:
yaml["extends"] = key
return
if isinstance(extends, str):
if extends != key:
yaml["extends"] = [extends, key]
return
if key not in extends:
extends.append(key)
def common_subobject(yaml, sub):
"""Factor prototype object "sub" out of the values of mapping "yaml".
Consider a modified copy of yaml, "new", where for each key, "key" in yaml:
- If yaml[key] matches sub, then new[key] = subkeys(yaml[key], sub).
- Otherwise, new[key] = yaml[key].
If the above match criteria is not satisfied for any such key, then (yaml,
None) is returned. The yaml object is returned unchanged.
Otherwise, each matching value in new is modified as in
add_extends(new[key], common_key), and then new[common_key] is set to sub.
The common_key value is chosen such that it does not match any preexisting
key in new. In this case, (new, common_key) is returned.
"""
match_list = set(k for k, v in yaml.items() if matches(v, sub))
if not match_list:
return yaml, None
common_prefix = ".c"
common_index = 0
while True:
common_key = "".join((common_prefix, str(common_index)))
if common_key not in yaml:
break
common_index += 1
new_yaml = {}
for key, val in yaml.items():
new_yaml[key] = copy.deepcopy(val)
if not matches(val, sub):
continue
new_yaml[key] = subkeys(new_yaml[key], sub)
add_extends(new_yaml[key], common_key)
new_yaml[common_key] = sub
return new_yaml, common_key
def print_delta(name, old, new, applied=None):
delta = new - old
reldelta = (1000 * delta) // old
reldelta = (reldelta // 10, reldelta % 10)
if applied is None:
applied = new <= old
print(
"\n".join(
(
"{0} {1}:",
" before: {2: 10d}",
" after : {3: 10d}",
" delta : {4:+10d} ({5:=+3d}.{6}%)",
)
).format(name, ("+" if applied else "x"), old, new, delta, reldelta[0], reldelta[1])
)
def try_optimization_pass(name, yaml, optimization_pass, *args, **kwargs):
"""Try applying an optimization pass and return information about the
result
"name" is a string describing the nature of the pass. If it is a non-empty
string, summary statistics are also printed to stdout.
"yaml" is the object to apply the pass to.
"optimization_pass" is the function implementing the pass to be applied.
"args" and "kwargs" are the additional arguments to pass to optimization
pass. The pass is applied as
>>> (new_yaml, *other_results) = optimization_pass(yaml, *args, **kwargs)
The pass's results are greedily rejected if it does not modify the original
yaml document, or if it produces a yaml document that serializes to a
larger string.
Returns (new_yaml, yaml, applied, other_results) if applied, or
(yaml, new_yaml, applied, other_results) otherwise.
"""
result = optimization_pass(yaml, *args, **kwargs)
new_yaml, other_results = result[0], result[1:]
if new_yaml is yaml:
# pass was not applied
return (yaml, new_yaml, False, other_results)
pre_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
post_size = len(syaml.dump_config(sort_yaml_obj(new_yaml), default_flow_style=True))
# pass makes the size worse: not applying
applied = post_size <= pre_size
if applied:
yaml, new_yaml = new_yaml, yaml
if name:
print_delta(name, pre_size, post_size, applied)
return (yaml, new_yaml, applied, other_results)
def build_histogram(iterator, key):
"""Builds a histogram of values given an iterable of mappings and a key.
For each mapping "m" with key "key" in iterator, the value m[key] is
considered.
Returns a list of tuples (hash, count, proportion, value), where
- "hash" is a sha1sum hash of the value.
- "count" is the number of occurences of values that hash to "hash".
- "proportion" is the proportion of all values considered above that
hash to "hash".
- "value" is one of the values considered above that hash to "hash".
Which value is chosen when multiple values hash to the same "hash" is
undefined.
The list is sorted in descending order by count, yielding the most
frequently occuring hashes first.
"""
buckets = collections.defaultdict(int)
values = {}
num_objects = 0
for obj in iterator:
num_objects += 1
try:
val = obj[key]
except (KeyError, TypeError):
continue
value_hash = hashlib.sha1()
value_hash.update(syaml.dump_config(sort_yaml_obj(val)).encode())
value_hash = value_hash.hexdigest()
buckets[value_hash] += 1
values[value_hash] = val
return [
(h, buckets[h], float(buckets[h]) / num_objects, values[h])
for h in sorted(buckets.keys(), key=lambda k: -buckets[k])
]
def optimizer(yaml):
original_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
# try factoring out commonly repeated portions
common_job = {
"variables": {"SPACK_COMPILER_ACTION": "NONE"},
"after_script": ['rm -rf "./spack"'],
"artifacts": {"paths": ["jobs_scratch_dir", "cdash_report"], "when": "always"},
}
# look for a list of tags that appear frequently
_, count, proportion, tags = next(iter(build_histogram(yaml.values(), "tags")), (None,) * 4)
# If a list of tags is found, and there are more than one job that uses it,
# *and* the jobs that do use it represent at least 70% of all jobs, then
# add the list to the prototype object.
if tags and count > 1 and proportion >= 0.70:
common_job["tags"] = tags
# apply common object factorization
yaml, other, applied, rest = try_optimization_pass(
"general common object factorization", yaml, common_subobject, common_job
)
# look for a common script, and try factoring that out
_, count, proportion, script = next(
iter(build_histogram(yaml.values(), "script")), (None,) * 4
)
if script and count > 1 and proportion >= 0.70:
yaml, other, applied, rest = try_optimization_pass(
"script factorization", yaml, common_subobject, {"script": script}
)
# look for a common before_script, and try factoring that out
_, count, proportion, script = next(
iter(build_histogram(yaml.values(), "before_script")), (None,) * 4
)
if script and count > 1 and proportion >= 0.70:
yaml, other, applied, rest = try_optimization_pass(
"before_script factorization", yaml, common_subobject, {"before_script": script}
)
# Look specifically for the SPACK_ROOT_SPEC environment variables.
# Try to factor them out.
h = build_histogram(
(getattr(val, "get", lambda *args: {})("variables") for val in yaml.values()),
"SPACK_ROOT_SPEC",
)
# In this case, we try to factor out *all* instances of the SPACK_ROOT_SPEC
# environment variable; not just the one that appears with the greatest
# frequency. We only require that more than 1 job uses a given instance's
# value, because we expect the value to be very large, and so expect even
# few-to-one factorizations to yield large space savings.
counter = 0
for _, count, proportion, spec in h:
if count <= 1:
continue
counter += 1
yaml, other, applied, rest = try_optimization_pass(
"SPACK_ROOT_SPEC factorization ({count})".format(count=counter),
yaml,
common_subobject,
{"variables": {"SPACK_ROOT_SPEC": spec}},
)
new_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
print("\n")
print_delta("overall summary", original_size, new_size)
print("\n")
return yaml

View File

@@ -237,7 +237,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
if len(matching_specs) <= 1:
return
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{ arch=architecture}"
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{arch=architecture}"
args = ["%s matches multiple packages." % spec, "Matching packages:"]
args += [
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
@@ -336,7 +336,6 @@ def display_specs(specs, args=None, **kwargs):
groups (bool): display specs grouped by arch/compiler (default True)
decorator (typing.Callable): function to call to decorate specs
all_headers (bool): show headers even when arch/compiler aren't defined
status_fn (typing.Callable): if provided, prepend install-status info
output (typing.IO): A file object to write to. Default is ``sys.stdout``
"""
@@ -360,7 +359,6 @@ def get_arg(name, default=None):
groups = get_arg("groups", True)
all_headers = get_arg("all_headers", False)
output = get_arg("output", sys.stdout)
status_fn = get_arg("status_fn", None)
decorator = get_arg("decorator", None)
if decorator is None:
@@ -388,13 +386,6 @@ def get_arg(name, default=None):
def fmt(s, depth=0):
"""Formatter function for all output specs"""
string = ""
if status_fn:
# This was copied from spec.tree's colorization logic
# then shortened because it seems like status_fn should
# always return an InstallStatus
string += colorize(status_fn(s).value)
if hashes:
string += gray_hash(s, hlen) + " "
string += depth * " "
@@ -453,7 +444,7 @@ def format_list(specs):
def filter_loaded_specs(specs):
"""Filter a list of specs returning only those that are
currently loaded."""
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
return [x for x in specs if x.dag_hash() in hashes]

View File

@@ -165,7 +165,7 @@ def _reset(args):
if not ok_to_continue:
raise RuntimeError("Aborting")
for scope in spack.config.CONFIG.writable_scopes:
for scope in spack.config.CONFIG.file_scopes:
# The default scope should stay untouched
if scope.name == "defaults":
continue

View File

@@ -13,6 +13,7 @@
import shutil
import sys
import tempfile
import urllib.request
from typing import Dict, List, Optional, Tuple, Union
import llnl.util.tty as tty
@@ -53,7 +54,6 @@
from spack.oci.oci import (
copy_missing_layers_with_retry,
get_manifest_and_config_with_retry,
list_tags,
upload_blob_with_retry,
upload_manifest_with_retry,
)
@@ -70,6 +70,12 @@ def setup_parser(subparser: argparse.ArgumentParser):
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
push.add_argument(
"--allow-root",
"-a",
action="store_true",
help="allow install root string in binary files after RPATH substitution",
)
push_sign = push.add_mutually_exclusive_group(required=False)
push_sign.add_argument(
"--unsigned",
@@ -184,6 +190,10 @@ def setup_parser(subparser: argparse.ArgumentParser):
keys.add_argument("-f", "--force", action="store_true", help="force new download of keys")
keys.set_defaults(func=keys_fn)
preview = subparsers.add_parser("preview", help=preview_fn.__doc__)
arguments.add_common_arguments(preview, ["installed_specs"])
preview.set_defaults(func=preview_fn)
# Check if binaries need to be rebuilt on remote mirror
check = subparsers.add_parser("check", help=check_fn.__doc__)
check.add_argument(
@@ -394,6 +404,11 @@ def push_fn(args):
else:
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
if args.allow_root:
tty.warn(
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
)
mirror: spack.mirror.Mirror = args.mirror
# Check if this is an OCI image.
@@ -841,7 +856,10 @@ def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
tags = list_tags(image_ref)
request = urllib.request.Request(url=image_ref.tags_url())
response = spack.oci.opener.urlopen(request)
spack.oci.opener.ensure_status(request, response, 200)
tags = json.load(response)["tags"]
# Fetch all image config files in parallel
spec_dicts = pool.starmap(
@@ -945,6 +963,14 @@ def keys_fn(args):
bindist.get_keys(args.install, args.trust, args.force)
def preview_fn(args):
"""analyze an installed spec and reports whether executables and libraries are relocatable"""
tty.warn(
"`spack buildcache preview` is deprecated since `spack buildcache push --allow-root` is "
"now the default. This command will be removed in Spack 0.22"
)
def check_fn(args: argparse.Namespace):
"""check specs against remote binary mirror(s) to see if any need to be rebuilt

View File

@@ -6,7 +6,6 @@
import json
import os
import shutil
import warnings
from urllib.parse import urlparse, urlunparse
import llnl.util.filesystem as fs
@@ -32,6 +31,7 @@
level = "long"
SPACK_COMMAND = "spack"
MAKE_COMMAND = "make"
INSTALL_FAIL_CODE = 1
FAILED_CREATE_BUILDCACHE_CODE = 100
@@ -40,12 +40,6 @@ def deindent(desc):
return desc.replace(" ", "")
def unicode_escape(path: str) -> str:
"""Returns transformed path with any unicode
characters replaced with their corresponding escapes"""
return path.encode("unicode-escape").decode("utf-8")
def setup_parser(subparser):
setup_parser.parser = subparser
subparsers = subparser.add_subparsers(help="CI sub-commands")
@@ -74,7 +68,7 @@ def setup_parser(subparser):
"--optimize",
action="store_true",
default=False,
help="(DEPRECATED) optimize the gitlab yaml file for size\n\n"
help="(experimental) optimize the gitlab yaml file for size\n\n"
"run the generated document through a series of optimization passes "
"designed to reduce the size of the generated file",
)
@@ -82,7 +76,7 @@ def setup_parser(subparser):
"--dependencies",
action="store_true",
default=False,
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
)
generate.add_argument(
"--buildcache-destination",
@@ -201,18 +195,6 @@ def ci_generate(args):
before invoking this command. the value must be the CDash authorization token needed to create
a build group and register all generated jobs under it
"""
if args.optimize:
warnings.warn(
"The --optimize option has been deprecated, and currently has no effect. "
"It will be removed in Spack v0.24."
)
if args.dependencies:
warnings.warn(
"The --dependencies option has been deprecated, and currently has no effect. "
"It will be removed in Spack v0.24."
)
env = spack.cmd.require_active_env(cmd_name="ci generate")
if args.copy_to:
@@ -225,6 +207,8 @@ def ci_generate(args):
output_file = args.output_file
copy_yaml_to = args.copy_to
run_optimizer = args.optimize
use_dependencies = args.dependencies
prune_dag = args.prune_dag
index_only = args.index_only
artifacts_root = args.artifacts_root
@@ -245,6 +229,8 @@ def ci_generate(args):
output_file,
prune_dag=prune_dag,
check_index_only=index_only,
run_optimizer=run_optimizer,
use_dependencies=use_dependencies,
artifacts_root=artifacts_root,
remote_mirror_override=buildcache_destination,
)
@@ -565,35 +551,75 @@ def ci_rebuild(args):
# No hash match anywhere means we need to rebuild spec
# Start with spack arguments
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose", "install"]
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose"]
config = cfg.get("config")
if not config["verify_ssl"]:
spack_cmd.append("-k")
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
install_args = []
can_verify = spack_ci.can_verify_binaries()
verify_binaries = can_verify and spack_is_pr_pipeline is False
if not verify_binaries:
install_args.append("--no-check-signature")
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
slash_hash = "/{}".format(job_spec.dag_hash())
# Arguments when installing dependencies from cache
deps_install_args = install_args
# Arguments when installing the root from sources
deps_install_args = install_args + ["--only=dependencies"]
root_install_args = install_args + ["--keep-stage", "--only=package"]
root_install_args = install_args + [
"--keep-stage",
"--only=package",
"--use-buildcache=package:never,dependencies:only",
]
if cdash_handler:
# Add additional arguments to `spack install` for CDash reporting.
root_install_args.extend(cdash_handler.args())
root_install_args.append(slash_hash)
# ["x", "y"] -> "'x' 'y'"
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
commands = [
# apparently there's a race when spack bootstraps? do it up front once
[SPACK_COMMAND, "-e", unicode_escape(env.path), "bootstrap", "now"],
spack_cmd + deps_install_args + [slash_hash],
spack_cmd + root_install_args + [slash_hash],
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
[
SPACK_COMMAND,
"-e",
env.path,
"env",
"depfile",
"-o",
"Makefile",
"--use-buildcache=package:never,dependencies:only",
slash_hash, # limit to spec we're building
],
[
# --output-sync requires GNU make 4.x.
# Old make errors when you pass it a flag it doesn't recognize,
# but it doesn't error or warn when you set unrecognized flags in
# this variable.
"export",
"GNUMAKEFLAGS=--output-sync=recurse",
],
[
MAKE_COMMAND,
"SPACK={}".format(args_to_string(spack_cmd)),
"SPACK_COLOR=always",
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
"-j$(nproc)",
"install-deps/{}".format(
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
"{name}-{version}-{hash}"
)
),
],
spack_cmd + ["install"] + root_install_args,
]
tty.debug("Installing {0} from source".format(job_spec.name))
install_exit_code = spack_ci.process_command("install", commands, repro_dir)

View File

@@ -106,8 +106,7 @@ def clean(parser, args):
# Then do the cleaning falling through the cases
if args.specs:
specs = spack.cmd.parse_specs(args.specs, concretize=False)
specs = list(spack.cmd.matching_spec_from_env(x) for x in specs)
specs = spack.cmd.parse_specs(args.specs, concretize=True)
for spec in specs:
msg = "Cleaning build stage [{0}]"
tty.msg(msg.format(spec.short_spec))

View File

@@ -11,6 +11,7 @@
from argparse import ArgumentParser, Namespace
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command
from llnl.util.tty.colify import colify
@@ -866,6 +867,9 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
prepend_header(args, f)
formatter(args, f)
if args.update_completion:
fs.set_executable(args.update)
else:
prepend_header(args, sys.stdout)
formatter(args, sys.stdout)

View File

@@ -3,9 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import llnl.util.tty as tty
from llnl.string import plural
import spack.cmd
import spack.cmd.common.arguments
import spack.environment as ev
@@ -46,9 +43,5 @@ def concretize(parser, args):
with env.write_transaction():
concretized_specs = env.concretize(force=args.force, tests=tests)
if not args.quiet:
if concretized_specs:
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}:")
ev.display_specs([concrete for _, concrete in concretized_specs])
else:
tty.msg("No new specs to concretize.")
ev.display_specs(concretized_specs)
env.write()

View File

@@ -156,7 +156,7 @@ def print_flattened_configuration(*, blame: bool) -> None:
"""
env = ev.active_environment()
if env is not None:
pristine = env.manifest.yaml_content
pristine = env.manifest.pristine_yaml_content
flattened = pristine.copy()
flattened[spack.schema.env.TOP_LEVEL_KEY] = pristine[spack.schema.env.TOP_LEVEL_KEY].copy()
else:
@@ -264,9 +264,7 @@ def config_remove(args):
def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
if isinstance(scope, spack.config.SingleFileScope):
return fs.can_access(cfg_file)
elif isinstance(scope, spack.config.DirectoryConfigScope):
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
return False
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
def _config_change_requires_scope(path, spec, scope, match_spec=None):
@@ -364,11 +362,14 @@ def config_change(args):
def config_update(args):
# Read the configuration files
spack.config.CONFIG.get_config(args.section, scope=args.scope)
updates: List[spack.config.ConfigScope] = [
x
for x in spack.config.CONFIG.format_updates[args.section]
if not isinstance(x, spack.config.InternalConfigScope) and x.writable
]
updates: List[spack.config.ConfigScope] = list(
filter(
lambda s: not isinstance(
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
),
spack.config.CONFIG.format_updates[args.section],
)
)
cannot_overwrite, skip_system_scope = [], False
for scope in updates:
@@ -446,7 +447,7 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
def config_revert(args):
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.writable_scopes]
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
# Search for backup files in the configuration scopes
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import re
import sys
@@ -933,7 +934,7 @@ def get_repository(args, name):
# Figure out where the new package should live
repo_path = args.repo
if repo_path is not None:
repo = spack.repo.from_path(repo_path)
repo = spack.repo.Repo(repo_path)
if spec.namespace and spec.namespace != repo.namespace:
tty.die(
"Can't create package with namespace {0} in repo with "
@@ -941,7 +942,9 @@ def get_repository(args, name):
)
else:
if spec.namespace:
repo = spack.repo.PATH.get_repo(spec.namespace)
repo = spack.repo.PATH.get_repo(spec.namespace, None)
if not repo:
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
else:
repo = spack.repo.PATH.first_repo()

View File

@@ -47,6 +47,16 @@ def inverted_dependencies():
dependents of, e.g., `mpi`, but virtuals are not included as
actual dependents.
"""
dag = {}
for pkg_cls in spack.repo.PATH.all_package_classes():
dag.setdefault(pkg_cls.name, set())
for dep in pkg_cls.dependencies_by_name():
deps = [dep]
# expand virtuals if necessary
if spack.repo.PATH.is_virtual(dep):
deps += [s.name for s in spack.repo.PATH.providers_for(dep)]
dag = collections.defaultdict(set)
for pkg_cls in spack.repo.PATH.all_package_classes():
for _, deps_by_name in pkg_cls.dependencies.items():

View File

@@ -9,8 +9,6 @@
import spack.cmd
import spack.config
import spack.fetch_strategy
import spack.repo
import spack.spec
import spack.util.path
import spack.version
@@ -71,15 +69,13 @@ def _retrieve_develop_source(spec, abspath):
# We construct a package class ourselves, rather than asking for
# Spec.package, since Spec only allows this when it is concrete
package = pkg_cls(spec)
source_stage = package.stage[0]
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
source_stage.fetcher.get_full_repo = True
if isinstance(package.stage[0].fetcher, spack.fetch_strategy.GitFetchStrategy):
package.stage[0].fetcher.get_full_repo = True
# If we retrieved this version before and cached it, we may have
# done so without cloning the full git repo; likewise, any
# mirror might store an instance with truncated history.
source_stage.disable_mirrors()
package.stage[0].disable_mirrors()
source_stage.fetcher.set_package(package)
package.stage.steal_source(abspath)

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import errno
import glob
import os
@@ -12,13 +11,43 @@
import spack.cmd
import spack.paths
import spack.repo
import spack.util.editor
from spack.spec import Spec
from spack.util.editor import editor
description = "open package files in $EDITOR"
section = "packaging"
level = "short"
def edit_package(name, repo_path, namespace):
"""Opens the requested package file in your favorite $EDITOR.
Args:
name (str): The name of the package
repo_path (str): The path to the repository containing this package
namespace (str): A valid namespace registered with Spack
"""
# Find the location of the package
if repo_path:
repo = spack.repo.Repo(repo_path)
elif namespace:
repo = spack.repo.PATH.get_repo(namespace)
else:
repo = spack.repo.PATH
path = repo.filename_for_package_name(name)
spec = Spec(name)
if os.path.exists(path):
if not os.path.isfile(path):
tty.die("Something is wrong. '{0}' is not a file!".format(path))
if not os.access(path, os.R_OK):
tty.die("Insufficient permissions on '%s'!" % path)
else:
raise spack.repo.UnknownPackageError(spec.name)
editor(path)
def setup_parser(subparser):
excl_args = subparser.add_mutually_exclusive_group()
@@ -69,67 +98,41 @@ def setup_parser(subparser):
excl_args.add_argument("-r", "--repo", default=None, help="path to repo to edit package in")
excl_args.add_argument("-N", "--namespace", default=None, help="namespace of package to edit")
subparser.add_argument("package", nargs="*", default=None, help="package name")
def locate_package(name: str, repo: spack.repo.Repo) -> str:
path = repo.filename_for_package_name(name)
try:
with open(path, "r"):
return path
except OSError as e:
if e.errno == errno.ENOENT:
raise spack.repo.UnknownPackageError(name) from e
tty.die(f"Cannot edit package: {e}")
def locate_file(name: str, path: str) -> str:
# convert command names to python module name
if path == spack.paths.command_path:
name = spack.cmd.python_name(name)
file_path = os.path.join(path, name)
# Try to open direct match.
try:
with open(file_path, "r"):
return file_path
except OSError as e:
if e.errno != errno.ENOENT:
tty.die(f"Cannot edit file: {e}")
pass
# Otherwise try to find a file that starts with the name
candidates = glob.glob(file_path + "*")
exclude_list = [".pyc", "~"] # exclude binaries and backups
files = [f for f in candidates if not any(f.endswith(ext) for ext in exclude_list)]
if len(files) > 1:
tty.die(
f"Multiple files start with `{name}`:\n"
+ "\n".join(f" {os.path.basename(f)}" for f in files)
)
elif not files:
tty.die(f"No file for '{name}' was found in {path}")
return files[0]
subparser.add_argument("package", nargs="?", default=None, help="package name")
def edit(parser, args):
names = args.package
name = args.package
# By default, edit package files
path = spack.paths.packages_path
# If `--command`, `--test`, or `--module` is chosen, edit those instead
if args.path:
paths = [locate_file(name, args.path) for name in names] if names else [args.path]
spack.util.editor.editor(*paths)
elif names:
if args.repo:
repo = spack.repo.from_path(args.repo)
elif args.namespace:
repo = spack.repo.PATH.get_repo(args.namespace)
else:
repo = spack.repo.PATH
paths = [locate_package(name, repo) for name in names]
spack.util.editor.editor(*paths)
path = args.path
if name:
# convert command names to python module name
if path == spack.paths.command_path:
name = spack.cmd.python_name(name)
path = os.path.join(path, name)
if not os.path.exists(path):
files = glob.glob(path + "*")
exclude_list = [".pyc", "~"] # exclude binaries and backups
files = list(filter(lambda x: all(s not in x for s in exclude_list), files))
if len(files) > 1:
m = "Multiple files exist with the name {0}.".format(name)
m += " Please specify a suffix. Files are:\n\n"
for f in files:
m += " " + os.path.basename(f) + "\n"
tty.die(m)
if not files:
tty.die("No file for '{0}' was found in {1}".format(name, path))
path = files[0] # already confirmed only one entry in files
editor(path)
elif name:
edit_package(name, args.repo, args.namespace)
else:
# By default open the directory where packages live
spack.util.editor.editor(spack.paths.packages_path)
editor(path)

View File

@@ -10,13 +10,13 @@
import sys
import tempfile
from pathlib import Path
from typing import List, Optional
from typing import Optional
import llnl.string as string
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
from llnl.util.tty.color import cescape, colorize
from llnl.util.tty.color import colorize
import spack.cmd
import spack.cmd.common
@@ -61,7 +61,14 @@
#
def env_create_setup_parser(subparser):
"""create a new environment"""
subparser.add_argument("env_name", metavar="env", help="name or directory of environment")
subparser.add_argument(
"env_name",
metavar="env",
help=(
"name of managed environment or directory of the anonymous env "
"(when using --dir/-d) to activate"
),
)
subparser.add_argument(
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
)
@@ -87,9 +94,6 @@ def env_create_setup_parser(subparser):
default=None,
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
)
subparser.add_argument(
"--include-concrete", action="append", help="name of old environment to copy specs from"
)
def env_create(args):
@@ -107,32 +111,19 @@ def env_create(args):
# the environment should not include a view.
with_view = None
include_concrete = None
if hasattr(args, "include_concrete"):
include_concrete = args.include_concrete
env = _env_create(
args.env_name,
init_file=args.envfile,
dir=args.dir or os.path.sep in args.env_name or args.env_name in (".", ".."),
dir=args.dir,
with_view=with_view,
keep_relative=args.keep_relative,
include_concrete=include_concrete,
)
# Generate views, only really useful for environments created from spack.lock files.
env.regenerate_views()
def _env_create(
name_or_path: str,
*,
init_file: Optional[str] = None,
dir: bool = False,
with_view: Optional[str] = None,
keep_relative: bool = False,
include_concrete: Optional[List[str]] = None,
):
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
"""Create a new environment, with an optional yaml description.
Arguments:
@@ -144,31 +135,22 @@ def _env_create(
keep_relative (bool): if True, develop paths are copied verbatim into
the new environment file, otherwise they may be made absolute if the
new environment is in a different location
include_concrete (list): list of the included concrete environments
"""
if not dir:
env = ev.create(
name_or_path,
init_file=init_file,
with_view=with_view,
keep_relative=keep_relative,
include_concrete=include_concrete,
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
)
tty.msg(
colorize(
f"Created environment @c{{{cescape(name_or_path)}}} in: @c{{{cescape(env.path)}}}"
)
)
else:
env = ev.create_in_dir(
name_or_path,
init_file=init_file,
with_view=with_view,
keep_relative=keep_relative,
include_concrete=include_concrete,
)
tty.msg(colorize(f"Created independent environment in: @c{{{cescape(env.path)}}}"))
tty.msg(f"Activate with: {colorize(f'@c{{spack env activate {cescape(name_or_path)}}}')}")
tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
tty.msg("You can activate this environment with:")
tty.msg(" spack env activate %s" % (name_or_path))
return env
env = ev.create_in_dir(
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
)
tty.msg("Created environment in %s" % env.path)
tty.msg("You can activate this environment with:")
tty.msg(" spack env activate %s" % env.path)
return env
@@ -454,12 +436,6 @@ def env_remove_setup_parser(subparser):
"""remove an existing environment"""
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
arguments.add_common_arguments(subparser, ["yes_to_all"])
subparser.add_argument(
"-f",
"--force",
action="store_true",
help="remove the environment even if it is included in another environment",
)
def env_remove(args):
@@ -469,35 +445,13 @@ def env_remove(args):
and manifests embedded in repositories should be removed manually.
"""
read_envs = []
valid_envs = []
bad_envs = []
invalid_envs = []
for env_name in ev.all_environment_names():
for env_name in args.rm_env:
try:
env = ev.read(env_name)
valid_envs.append(env_name)
if env_name in args.rm_env:
read_envs.append(env)
read_envs.append(env)
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
invalid_envs.append(env_name)
if env_name in args.rm_env:
bad_envs.append(env_name)
# Check if env is linked to another before trying to remove
for name in valid_envs:
# don't check if environment is included to itself
if name == env_name:
continue
environ = ev.Environment(ev.root(name))
if ev.root(env_name) in environ.included_concrete_envs:
msg = f'Environment "{env_name}" is being used by environment "{name}"'
if args.force:
tty.warn(msg)
else:
tty.die(msg)
bad_envs.append(env_name)
if not args.yes_to_all:
environments = string.plural(len(args.rm_env), "environment", show_n=False)

View File

@@ -7,7 +7,7 @@
import os
import re
import sys
from typing import List, Optional, Set
from typing import List, Optional
import llnl.util.tty as tty
import llnl.util.tty.colify as colify
@@ -19,7 +19,6 @@
import spack.detection
import spack.error
import spack.repo
import spack.spec
import spack.util.environment
from spack.cmd.common import arguments
@@ -139,26 +138,14 @@ def external_find(args):
candidate_packages, path_hints=args.path, max_workers=args.jobs
)
new_specs = spack.detection.update_configuration(
new_entries = spack.detection.update_configuration(
detected_packages, scope=args.scope, buildable=not args.not_buildable
)
# If the user runs `spack external find --not-buildable mpich` we also mark `mpi` non-buildable
# to avoid that the concretizer picks a different mpi provider.
if new_specs and args.not_buildable:
virtuals: Set[str] = {
virtual.name
for new_spec in new_specs
for virtual_specs in spack.repo.PATH.get_pkg_class(new_spec.name).provided.values()
for virtual in virtual_specs
}
new_virtuals = spack.detection.set_virtuals_nonbuildable(virtuals, scope=args.scope)
new_specs.extend(spack.spec.Spec(name) for name in new_virtuals)
if new_specs:
if new_entries:
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
tty.msg(f"The following specs have been detected on this system and added to {path}")
spack.cmd.display_specs(new_specs)
msg = "The following specs have been detected on this system and added to {0}"
tty.msg(msg.format(path))
spack.cmd.display_specs(new_entries)
else:
tty.msg("No new external packages detected")

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import copy
import sys
import llnl.util.lang
@@ -46,10 +45,6 @@ def setup_parser(subparser):
help="output specs as machine-readable json records",
)
subparser.add_argument(
"-I", "--install-status", action="store_true", help="show install status of packages"
)
subparser.add_argument(
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
)
@@ -276,45 +271,25 @@ def root_decorator(spec, string):
print()
if env.included_concrete_envs:
tty.msg("Included specs")
# Root specs cannot be displayed with prefixes, since those are not
# set for abstract specs. Same for hashes
root_args = copy.copy(args)
root_args.paths = False
# Roots are displayed with variants, etc. so that we can see
# specifically what the user asked for.
cmd.display_specs(
env.included_user_specs,
root_args,
decorator=lambda s, f: color.colorize("@*{%s}" % f),
namespace=True,
show_flags=True,
show_full_compiler=True,
variants=True,
)
if args.show_concretized:
tty.msg("Concretized roots")
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)
print()
# Display a header for the installed packages section IF there are installed
# packages. If there aren't any, we'll just end up printing "0 installed packages"
# later.
if results and not args.only_roots:
tty.msg("Installed packages")
def find(parser, args):
env = ev.active_environment()
q_args = query_arguments(args)
results = args.specs(**q_args)
env = ev.active_environment()
if not env and args.only_roots:
tty.die("-r / --only-roots requires an active environment")
if not env and args.show_concretized:
tty.die("-c / --show-concretized requires an active environment")
if env:
if args.constraint:
init_specs = spack.cmd.parse_specs(args.constraint)
results = env.all_matching_specs(*init_specs)
else:
results = env.all_specs()
else:
q_args = query_arguments(args)
results = args.specs(**q_args)
decorator = make_env_decorator(env) if env else lambda s, f: f
@@ -335,11 +310,6 @@ def find(parser, args):
if args.loaded:
results = spack.cmd.filter_loaded_specs(results)
if args.install_status or args.show_concretized:
status_fn = spack.spec.Spec.install_status
else:
status_fn = None
# Display the result
if args.json:
cmd.display_specs_as_json(results, deps=args.deps)
@@ -348,34 +318,12 @@ def find(parser, args):
if env:
display_env(env, args, decorator, results)
count_suffix = " (not shown)"
if not args.only_roots:
display_results = results
if not args.show_concretized:
display_results = list(x for x in results if x.installed)
cmd.display_specs(
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
)
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
count_suffix = ""
# print number of installed packages last (as the list may be long)
if sys.stdout.isatty() and args.groups:
installed_suffix = ""
concretized_suffix = " to be installed"
if args.only_roots:
installed_suffix += " (not shown)"
concretized_suffix += " (not shown)"
else:
if env and not args.show_concretized:
concretized_suffix += " (show with `spack find -c`)"
pkg_type = "loaded" if args.loaded else "installed"
spack.cmd.print_how_many_pkgs(
list(x for x in results if x.installed), pkg_type, suffix=installed_suffix
)
if env:
spack.cmd.print_how_many_pkgs(
list(x for x in results if not x.installed),
"concretized",
suffix=concretized_suffix,
)
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)

View File

@@ -56,6 +56,7 @@ def roots_from_environments(args, active_env):
# -e says "also preserve things needed by this particular env"
for env_name_or_dir in args.except_environment:
print("HMM", env_name_or_dir)
if ev.exists(env_name_or_dir):
env = ev.read(env_name_or_dir)
elif ev.is_env_dir(env_name_or_dir):

View File

@@ -9,7 +9,7 @@
import spack.environment as ev
import spack.store
from spack.cmd.common import arguments
from spack.graph import DAGWithDependencyTypes, SimpleDAG, graph_ascii, graph_dot, static_graph_dot
from spack.graph import DotGraph, MermaidGraph, DAGWithDependencyTypes, SimpleDAG, graph_ascii, graph_dot, static_graph_dot
description = "generate graphs of package dependency relationships"
section = "basic"
@@ -33,6 +33,9 @@ def setup_parser(subparser):
method.add_argument(
"-d", "--dot", action="store_true", help="generate graph in dot format and print to stdout"
)
method.add_argument(
"-m", "--mermaid", action="store_true", help="generate graph in mermaid format and print to stdout"
)
subparser.add_argument(
"-s",
@@ -85,10 +88,14 @@ def graph(parser, args):
static_graph_dot(specs, depflag=args.deptype)
return
if args.dot:
builder = SimpleDAG()
if args.dot or args.mermaid:
if args.dot:
graph = DotGraph()
if args.mermaid:
graph = MermaidGraph()
builder = SimpleDAG(graph=graph)
if args.color:
builder = DAGWithDependencyTypes()
builder = DAGWithDependencyTypes(graph=graph)
graph_dot(specs, builder=builder, depflag=args.deptype)
return

View File

@@ -50,7 +50,7 @@
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
architecture variants:
@m{platform=platform} linux, darwin, freebsd, windows
@m{platform=platform} linux, darwin, cray, etc.
@m{os=operating_system} specific <operating_system>
@m{target=target} specific <target> processor
@m{arch=platform-os-target} shortcut for all three above

View File

@@ -10,7 +10,6 @@
from typing import List
import llnl.util.filesystem as fs
from llnl.string import plural
from llnl.util import lang, tty
import spack.build_environment
@@ -62,6 +61,7 @@ def install_kwargs_from_args(args):
"dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc),
"dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc),
"include_build_deps": args.include_build_deps,
"explicit": True, # Use true as a default for install command
"stop_at": args.until,
"unsigned": args.unsigned,
"install_deps": ("dependencies" in args.things_to_install),
@@ -376,9 +376,7 @@ def _maybe_add_and_concretize(args, env, specs):
# `spack concretize`
tests = compute_tests_install_kwargs(env.user_specs, args.test)
concretized_specs = env.concretize(tests=tests)
if concretized_specs:
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}")
ev.display_specs([concrete for _, concrete in concretized_specs])
ev.display_specs(concretized_specs)
# save view regeneration for later, so that we only do it
# once, as it can be slow.
@@ -475,7 +473,6 @@ def install_without_active_env(args, install_kwargs, reporter_factory):
require_user_confirmation_for_overwrite(concrete_specs, args)
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
installs = [s.package for s in concrete_specs]
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
builder = PackageInstaller(installs, install_kwargs)
installs = [(s.package, install_kwargs) for s in concrete_specs]
builder = PackageInstaller(installs)
builder.install()

View File

@@ -169,9 +169,7 @@ def pkg_hash(args):
def get_grep(required=False):
"""Get a grep command to use with ``spack pkg grep``."""
grep = exe.which(os.environ.get("SPACK_GREP") or "grep", required=required)
grep.ignore_quotes = True # allow `spack pkg grep '"quoted string"'` without warning
return grep
return exe.which(os.environ.get("SPACK_GREP") or "grep", required=required)
def pkg_grep(args, unknown_args):

View File

@@ -91,7 +91,7 @@ def repo_add(args):
tty.die("Not a Spack repository: %s" % path)
# Make sure it's actually a spack repository by constructing it.
repo = spack.repo.from_path(canon_path)
repo = spack.repo.Repo(canon_path)
# If that succeeds, finally add it to the configuration.
repos = spack.config.get("repos", scope=args.scope)
@@ -124,7 +124,7 @@ def repo_remove(args):
# If it is a namespace, remove corresponding repo
for path in repos:
try:
repo = spack.repo.from_path(path)
repo = spack.repo.Repo(path)
if repo.namespace == namespace_or_path:
repos.remove(path)
spack.config.set("repos", repos, args.scope)
@@ -142,7 +142,7 @@ def repo_list(args):
repos = []
for r in roots:
try:
repos.append(spack.repo.from_path(r))
repos.append(spack.repo.Repo(r))
except spack.repo.RepoError:
continue

View File

@@ -114,16 +114,15 @@ def _process_result(result, show, required_format, kwargs):
# dump the solutions as concretized specs
if "solutions" in show:
if required_format:
for spec in result.specs:
# With -y, just print YAML to output.
if required_format == "yaml":
# use write because to_yaml already has a newline.
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
elif required_format == "json":
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
else:
sys.stdout.write(spack.spec.tree(result.specs, color=sys.stdout.isatty(), **kwargs))
for spec in result.specs:
# With -y, just print YAML to output.
if required_format == "yaml":
# use write because to_yaml already has a newline.
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
elif required_format == "json":
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
else:
sys.stdout.write(spec.tree(color=sys.stdout.isatty(), **kwargs))
print()
if result.unsolved_specs and "solutions" in show:

View File

@@ -105,19 +105,11 @@ def spec(parser, args):
if env:
env.concretize()
specs = env.concretized_specs()
# environments are printed together in a combined tree() invocation,
# except when using --yaml or --json, which we print spec by spec below.
if not args.format:
tree_kwargs["key"] = spack.traverse.by_dag_hash
tree_kwargs["hashes"] = args.long or args.very_long
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
return
else:
tty.die("spack spec requires at least one spec or an active environment")
for input, output in specs:
# With --yaml or --json, just print the raw specs to output
# With -y, just print YAML to output.
if args.format:
if args.format == "yaml":
# use write because to_yaml already has a newline.

View File

@@ -23,7 +23,7 @@
# tutorial configuration parameters
tutorial_branch = "releases/v0.22"
tutorial_branch = "releases/v0.21"
tutorial_mirror = "file:///mirror"
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")

Some files were not shown because too many files have changed in this diff Show More