Compare commits
1 Commits
develop-20
...
feat/merma
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
23197b78f9 |
3
.github/dependabot.yml
vendored
3
.github/dependabot.yml
vendored
@@ -12,7 +12,6 @@ updates:
|
|||||||
interval: "daily"
|
interval: "daily"
|
||||||
# Requirements to run style checks
|
# Requirements to run style checks
|
||||||
- package-ecosystem: "pip"
|
- package-ecosystem: "pip"
|
||||||
directories:
|
directory: "/.github/workflows/style"
|
||||||
- "/.github/workflows/requirements/*"
|
|
||||||
schedule:
|
schedule:
|
||||||
interval: "daily"
|
interval: "daily"
|
||||||
|
|||||||
6
.github/workflows/audit.yaml
vendored
6
.github/workflows/audit.yaml
vendored
@@ -28,8 +28,8 @@ jobs:
|
|||||||
run:
|
run:
|
||||||
shell: ${{ matrix.system.shell }}
|
shell: ${{ matrix.system.shell }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: ${{inputs.python_version}}
|
python-version: ${{inputs.python_version}}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -61,7 +61,7 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
spack -d audit externals
|
spack -d audit externals
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||||
if: ${{ inputs.with_coverage == 'true' }}
|
if: ${{ inputs.with_coverage == 'true' }}
|
||||||
with:
|
with:
|
||||||
flags: unittests,audits
|
flags: unittests,audits
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -ex
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
$PYTHON bin/spack bootstrap disable github-actions-v0.4
|
|
||||||
$PYTHON bin/spack bootstrap disable spack-install
|
$PYTHON bin/spack bootstrap disable spack-install
|
||||||
$PYTHON bin/spack $SPACK_FLAGS solve zlib
|
$PYTHON bin/spack -d solve zlib
|
||||||
tree $BOOTSTRAP/store
|
tree $BOOTSTRAP/store
|
||||||
exit 0
|
exit 0
|
||||||
391
.github/workflows/bootstrap.yml
vendored
391
.github/workflows/bootstrap.yml
vendored
@@ -13,22 +13,118 @@ concurrency:
|
|||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
distros-clingo-sources:
|
fedora-clingo-sources:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container: ${{ matrix.image }}
|
container: "fedora:latest"
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
image: ["fedora:latest", "opensuse/leap:latest"]
|
|
||||||
steps:
|
steps:
|
||||||
- name: Setup Fedora
|
- name: Install dependencies
|
||||||
if: ${{ matrix.image == 'fedora:latest' }}
|
|
||||||
run: |
|
run: |
|
||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison bison-devel libstdc++-static
|
cmake bison bison-devel libstdc++-static
|
||||||
- name: Setup OpenSUSE
|
- name: Checkout
|
||||||
if: ${{ matrix.image == 'opensuse/leap:latest' }}
|
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Setup non-root user
|
||||||
|
run: |
|
||||||
|
# See [1] below
|
||||||
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
|
useradd spack-test && mkdir -p ~spack-test
|
||||||
|
chown -R spack-test . ~spack-test
|
||||||
|
- name: Setup repo
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
|
run: |
|
||||||
|
git --version
|
||||||
|
. .github/workflows/setup_git.sh
|
||||||
|
- name: Bootstrap clingo
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
|
run: |
|
||||||
|
source share/spack/setup-env.sh
|
||||||
|
spack bootstrap disable github-actions-v0.5
|
||||||
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack external find cmake bison
|
||||||
|
spack -d solve zlib
|
||||||
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
|
ubuntu-clingo-sources:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container: "ubuntu:latest"
|
||||||
|
steps:
|
||||||
|
- name: Install dependencies
|
||||||
|
env:
|
||||||
|
DEBIAN_FRONTEND: noninteractive
|
||||||
|
run: |
|
||||||
|
apt-get update -y && apt-get upgrade -y
|
||||||
|
apt-get install -y \
|
||||||
|
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||||
|
make patch unzip xz-utils python3 python3-dev tree \
|
||||||
|
cmake bison
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Setup non-root user
|
||||||
|
run: |
|
||||||
|
# See [1] below
|
||||||
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
|
useradd spack-test && mkdir -p ~spack-test
|
||||||
|
chown -R spack-test . ~spack-test
|
||||||
|
- name: Setup repo
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
|
run: |
|
||||||
|
git --version
|
||||||
|
. .github/workflows/setup_git.sh
|
||||||
|
- name: Bootstrap clingo
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
|
run: |
|
||||||
|
source share/spack/setup-env.sh
|
||||||
|
spack bootstrap disable github-actions-v0.5
|
||||||
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack external find cmake bison
|
||||||
|
spack -d solve zlib
|
||||||
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
|
ubuntu-clingo-binaries-and-patchelf:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container: "ubuntu:latest"
|
||||||
|
steps:
|
||||||
|
- name: Install dependencies
|
||||||
|
env:
|
||||||
|
DEBIAN_FRONTEND: noninteractive
|
||||||
|
run: |
|
||||||
|
apt-get update -y && apt-get upgrade -y
|
||||||
|
apt-get install -y \
|
||||||
|
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||||
|
make patch unzip xz-utils python3 python3-dev tree
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Setup non-root user
|
||||||
|
run: |
|
||||||
|
# See [1] below
|
||||||
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
|
useradd spack-test && mkdir -p ~spack-test
|
||||||
|
chown -R spack-test . ~spack-test
|
||||||
|
- name: Setup repo
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
|
run: |
|
||||||
|
git --version
|
||||||
|
. .github/workflows/setup_git.sh
|
||||||
|
- name: Bootstrap clingo
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
|
run: |
|
||||||
|
source share/spack/setup-env.sh
|
||||||
|
spack -d solve zlib
|
||||||
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
|
opensuse-clingo-sources:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container: "opensuse/leap:latest"
|
||||||
|
steps:
|
||||||
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
|
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
|
||||||
zypper update -y || zypper update -y
|
zypper update -y || zypper update -y
|
||||||
@@ -37,9 +133,15 @@ jobs:
|
|||||||
make patch unzip which xz python3 python3-devel tree \
|
make patch unzip which xz python3 python3-devel tree \
|
||||||
cmake bison
|
cmake bison
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
- name: Setup repo
|
||||||
|
run: |
|
||||||
|
# See [1] below
|
||||||
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
|
git --version
|
||||||
|
. .github/workflows/setup_git.sh
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
@@ -49,56 +151,194 @@ jobs:
|
|||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
clingo-sources:
|
macos-clingo-sources:
|
||||||
runs-on: ${{ matrix.runner }}
|
runs-on: macos-latest
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
|
|
||||||
steps:
|
steps:
|
||||||
- name: Setup macOS
|
- name: Install dependencies
|
||||||
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest' }}
|
|
||||||
run: |
|
run: |
|
||||||
brew install cmake bison tree
|
brew install cmake bison@2.7 tree
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
with:
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
|
||||||
with:
|
with:
|
||||||
python-version: "3.12"
|
python-version: "3.12"
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
env:
|
|
||||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
|
||||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
|
||||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
|
||||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
|
||||||
run: |
|
run: |
|
||||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
source share/spack/setup-env.sh
|
||||||
|
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||||
spack bootstrap disable github-actions-v0.5
|
spack bootstrap disable github-actions-v0.5
|
||||||
spack bootstrap disable github-actions-v0.4
|
spack bootstrap disable github-actions-v0.4
|
||||||
spack external find --not-buildable cmake bison
|
spack external find --not-buildable cmake bison
|
||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
${{ env.VALIDATE_LAST_EXIT }}
|
tree ~/.spack/bootstrap/store/
|
||||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
|
||||||
|
|
||||||
gnupg-sources:
|
macos-clingo-binaries:
|
||||||
runs-on: ${{ matrix.runner }}
|
runs-on: ${{ matrix.macos-version }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
runner: [ 'macos-13', 'macos-14', "ubuntu-latest" ]
|
macos-version: ['macos-11', 'macos-12']
|
||||||
steps:
|
steps:
|
||||||
- name: Setup macOS
|
- name: Install dependencies
|
||||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
|
||||||
run: |
|
run: |
|
||||||
brew install tree gawk
|
brew install tree
|
||||||
sudo rm -rf $(command -v gpg gpg2)
|
|
||||||
- name: Setup Ubuntu
|
|
||||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
|
||||||
run: sudo rm -rf $(command -v gpg gpg2 patchelf)
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
|
- name: Bootstrap clingo
|
||||||
|
run: |
|
||||||
|
set -ex
|
||||||
|
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||||
|
not_found=1
|
||||||
|
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||||
|
echo "Testing $ver_dir"
|
||||||
|
if [[ -d "$ver_dir" ]] ; then
|
||||||
|
if $ver_dir/python --version ; then
|
||||||
|
export PYTHON="$ver_dir/python"
|
||||||
|
not_found=0
|
||||||
|
old_path="$PATH"
|
||||||
|
export PATH="$ver_dir:$PATH"
|
||||||
|
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
||||||
|
export PATH="$old_path"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
# NOTE: test all pythons that exist, not all do on 12
|
||||||
|
done
|
||||||
|
|
||||||
|
ubuntu-clingo-binaries:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
- name: Setup repo
|
||||||
|
run: |
|
||||||
|
git --version
|
||||||
|
. .github/workflows/setup_git.sh
|
||||||
|
- name: Bootstrap clingo
|
||||||
|
run: |
|
||||||
|
set -ex
|
||||||
|
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||||
|
not_found=1
|
||||||
|
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||||
|
echo "Testing $ver_dir"
|
||||||
|
if [[ -d "$ver_dir" ]] ; then
|
||||||
|
if $ver_dir/python --version ; then
|
||||||
|
export PYTHON="$ver_dir/python"
|
||||||
|
not_found=0
|
||||||
|
old_path="$PATH"
|
||||||
|
export PATH="$ver_dir:$PATH"
|
||||||
|
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
||||||
|
export PATH="$old_path"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
if (($not_found)) ; then
|
||||||
|
echo Required python version $ver not found in runner!
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
ubuntu-gnupg-binaries:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container: "ubuntu:latest"
|
||||||
|
steps:
|
||||||
|
- name: Install dependencies
|
||||||
|
env:
|
||||||
|
DEBIAN_FRONTEND: noninteractive
|
||||||
|
run: |
|
||||||
|
apt-get update -y && apt-get upgrade -y
|
||||||
|
apt-get install -y \
|
||||||
|
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||||
|
make patch unzip xz-utils python3 python3-dev tree
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Setup non-root user
|
||||||
|
run: |
|
||||||
|
# See [1] below
|
||||||
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
|
useradd spack-test && mkdir -p ~spack-test
|
||||||
|
chown -R spack-test . ~spack-test
|
||||||
|
- name: Setup repo
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
|
run: |
|
||||||
|
git --version
|
||||||
|
. .github/workflows/setup_git.sh
|
||||||
|
- name: Bootstrap GnuPG
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
|
run: |
|
||||||
|
source share/spack/setup-env.sh
|
||||||
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack bootstrap disable spack-install
|
||||||
|
spack -d gpg list
|
||||||
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
|
ubuntu-gnupg-sources:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container: "ubuntu:latest"
|
||||||
|
steps:
|
||||||
|
- name: Install dependencies
|
||||||
|
env:
|
||||||
|
DEBIAN_FRONTEND: noninteractive
|
||||||
|
run: |
|
||||||
|
apt-get update -y && apt-get upgrade -y
|
||||||
|
apt-get install -y \
|
||||||
|
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||||
|
make patch unzip xz-utils python3 python3-dev tree \
|
||||||
|
gawk
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Setup non-root user
|
||||||
|
run: |
|
||||||
|
# See [1] below
|
||||||
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
|
useradd spack-test && mkdir -p ~spack-test
|
||||||
|
chown -R spack-test . ~spack-test
|
||||||
|
- name: Setup repo
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
|
run: |
|
||||||
|
git --version
|
||||||
|
. .github/workflows/setup_git.sh
|
||||||
|
- name: Bootstrap GnuPG
|
||||||
|
shell: runuser -u spack-test -- bash {0}
|
||||||
|
run: |
|
||||||
|
source share/spack/setup-env.sh
|
||||||
|
spack solve zlib
|
||||||
|
spack bootstrap disable github-actions-v0.5
|
||||||
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack -d gpg list
|
||||||
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
|
macos-gnupg-binaries:
|
||||||
|
runs-on: macos-latest
|
||||||
|
steps:
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
brew install tree
|
||||||
|
# Remove GnuPG since we want to bootstrap it
|
||||||
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
|
- name: Bootstrap GnuPG
|
||||||
|
run: |
|
||||||
|
source share/spack/setup-env.sh
|
||||||
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack bootstrap disable spack-install
|
||||||
|
spack -d gpg list
|
||||||
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
|
macos-gnupg-sources:
|
||||||
|
runs-on: macos-latest
|
||||||
|
steps:
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
brew install gawk tree
|
||||||
|
# Remove GnuPG since we want to bootstrap it
|
||||||
|
sudo rm -rf /usr/local/bin/gpg
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
@@ -108,63 +348,10 @@ jobs:
|
|||||||
spack -d gpg list
|
spack -d gpg list
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
from-binaries:
|
|
||||||
runs-on: ${{ matrix.runner }}
|
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
|
||||||
strategy:
|
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
|
||||||
matrix:
|
# See:
|
||||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||||
steps:
|
# - https://github.com/actions/checkout/issues/760
|
||||||
- name: Setup macOS
|
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
|
||||||
run: |
|
|
||||||
brew install tree
|
|
||||||
# Remove GnuPG since we want to bootstrap it
|
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
|
||||||
- name: Setup Ubuntu
|
|
||||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
|
||||||
run: |
|
|
||||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
|
||||||
with:
|
|
||||||
python-version: |
|
|
||||||
3.8
|
|
||||||
3.9
|
|
||||||
3.10
|
|
||||||
3.11
|
|
||||||
3.12
|
|
||||||
- name: Set bootstrap sources
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack bootstrap disable github-actions-v0.4
|
|
||||||
spack bootstrap disable spack-install
|
|
||||||
- name: Bootstrap clingo
|
|
||||||
run: |
|
|
||||||
set -e
|
|
||||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
|
||||||
not_found=1
|
|
||||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
|
||||||
if [[ -d "$ver_dir" ]] ; then
|
|
||||||
echo "Testing $ver_dir"
|
|
||||||
if $ver_dir/python --version ; then
|
|
||||||
export PYTHON="$ver_dir/python"
|
|
||||||
not_found=0
|
|
||||||
old_path="$PATH"
|
|
||||||
export PATH="$ver_dir:$PATH"
|
|
||||||
./bin/spack-tmpconfig -b ./.github/workflows/bin/bootstrap-test.sh
|
|
||||||
export PATH="$old_path"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if (($not_found)) ; then
|
|
||||||
echo Required python version $ver not found in runner!
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
- name: Bootstrap GnuPG
|
|
||||||
run: |
|
|
||||||
source share/spack/setup-env.sh
|
|
||||||
spack -d gpg list
|
|
||||||
tree ~/.spack/bootstrap/store/
|
|
||||||
|
|||||||
23
.github/workflows/build-containers.yml
vendored
23
.github/workflows/build-containers.yml
vendored
@@ -40,7 +40,8 @@ jobs:
|
|||||||
# 1: Platforms to build for
|
# 1: Platforms to build for
|
||||||
# 2: Base image (e.g. ubuntu:22.04)
|
# 2: Base image (e.g. ubuntu:22.04)
|
||||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
|
||||||
|
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
|
||||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||||
@@ -49,13 +50,15 @@ jobs:
|
|||||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||||
|
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
||||||
|
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38'],
|
||||||
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
||||||
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
||||||
name: Build ${{ matrix.dockerfile[0] }}
|
name: Build ${{ matrix.dockerfile[0] }}
|
||||||
if: github.repository == 'spack/spack'
|
if: github.repository == 'spack/spack'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
|
|
||||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||||
id: docker_meta
|
id: docker_meta
|
||||||
@@ -76,7 +79,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
|
SPACK_YAML_OS: "${{ matrix.dockerfile[2] }}"
|
||||||
run: |
|
run: |
|
||||||
.github/workflows/bin/generate_spack_yaml_containerize.sh
|
.github/workflows/generate_spack_yaml_containerize.sh
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
|
mkdir -p dockerfiles/${{ matrix.dockerfile[0] }}
|
||||||
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
|
spack containerize --last-stage=bootstrap | tee dockerfiles/${{ matrix.dockerfile[0] }}/Dockerfile
|
||||||
@@ -87,19 +90,19 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload Dockerfile
|
- name: Upload Dockerfile
|
||||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b
|
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
|
||||||
with:
|
with:
|
||||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||||
path: dockerfiles
|
path: dockerfiles
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@5927c834f5b4fdf503fca6f4c7eccda82949e1ee
|
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@4fd812986e6c8c2a69e18311145f9371337f27d4
|
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
|
||||||
|
|
||||||
- name: Log in to GitHub Container Registry
|
- name: Log in to GitHub Container Registry
|
||||||
uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446
|
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
@@ -107,13 +110,13 @@ jobs:
|
|||||||
|
|
||||||
- name: Log in to DockerHub
|
- name: Log in to DockerHub
|
||||||
if: github.event_name != 'pull_request'
|
if: github.event_name != 'pull_request'
|
||||||
uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446
|
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||||
uses: docker/build-push-action@1ca370b3a9802c92e886402e0dd88098a2533b12
|
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
|
||||||
with:
|
with:
|
||||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||||
platforms: ${{ matrix.dockerfile[1] }}
|
platforms: ${{ matrix.dockerfile[1] }}
|
||||||
@@ -126,7 +129,7 @@ jobs:
|
|||||||
needs: deploy-images
|
needs: deploy-images
|
||||||
steps:
|
steps:
|
||||||
- name: Merge Artifacts
|
- name: Merge Artifacts
|
||||||
uses: actions/upload-artifact/merge@0b2256b8c012f0828dc542b3febcab082c67f72b
|
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
|
||||||
with:
|
with:
|
||||||
name: dockerfiles
|
name: dockerfiles
|
||||||
pattern: dockerfiles_*
|
pattern: dockerfiles_*
|
||||||
|
|||||||
16
.github/workflows/ci.yaml
vendored
16
.github/workflows/ci.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
|||||||
core: ${{ steps.filter.outputs.core }}
|
core: ${{ steps.filter.outputs.core }}
|
||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
if: ${{ github.event_name == 'push' }}
|
if: ${{ github.event_name == 'push' }}
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
@@ -53,13 +53,6 @@ jobs:
|
|||||||
- 'var/spack/repos/builtin/packages/clingo/**'
|
- 'var/spack/repos/builtin/packages/clingo/**'
|
||||||
- 'var/spack/repos/builtin/packages/python/**'
|
- 'var/spack/repos/builtin/packages/python/**'
|
||||||
- 'var/spack/repos/builtin/packages/re2c/**'
|
- 'var/spack/repos/builtin/packages/re2c/**'
|
||||||
- 'var/spack/repos/builtin/packages/gnupg/**'
|
|
||||||
- 'var/spack/repos/builtin/packages/libassuan/**'
|
|
||||||
- 'var/spack/repos/builtin/packages/libgcrypt/**'
|
|
||||||
- 'var/spack/repos/builtin/packages/libgpg-error/**'
|
|
||||||
- 'var/spack/repos/builtin/packages/libksba/**'
|
|
||||||
- 'var/spack/repos/builtin/packages/npth/**'
|
|
||||||
- 'var/spack/repos/builtin/packages/pinentry/**'
|
|
||||||
- 'lib/spack/**'
|
- 'lib/spack/**'
|
||||||
- 'share/spack/**'
|
- 'share/spack/**'
|
||||||
- '.github/workflows/bootstrap.yml'
|
- '.github/workflows/bootstrap.yml'
|
||||||
@@ -84,8 +77,13 @@ jobs:
|
|||||||
needs: [ prechecks, changes ]
|
needs: [ prechecks, changes ]
|
||||||
uses: ./.github/workflows/unit_tests.yaml
|
uses: ./.github/workflows/unit_tests.yaml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
windows:
|
||||||
|
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||||
|
needs: [ prechecks ]
|
||||||
|
uses: ./.github/workflows/windows_python.yml
|
||||||
|
secrets: inherit
|
||||||
all:
|
all:
|
||||||
needs: [ unit-tests, bootstrap ]
|
needs: [ windows, unit-tests, bootstrap ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Success
|
- name: Success
|
||||||
|
|||||||
8
.github/workflows/install_spack.sh
vendored
Executable file
8
.github/workflows/install_spack.sh
vendored
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/usr/bin/env sh
|
||||||
|
. share/spack/setup-env.sh
|
||||||
|
echo -e "config:\n build_jobs: 2" > etc/spack/config.yaml
|
||||||
|
spack config add "packages:all:target:[x86_64]"
|
||||||
|
spack compiler find
|
||||||
|
spack compiler info apple-clang
|
||||||
|
spack debug report
|
||||||
|
spack solve zlib
|
||||||
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
|||||||
build-paraview-deps:
|
build-paraview-deps:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
black==24.4.2
|
black==24.4.2
|
||||||
clingo==5.7.1
|
clingo==5.7.1
|
||||||
flake8==7.1.0
|
flake8==7.0.0
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
mypy==1.8.0
|
mypy==1.8.0
|
||||||
types-six==1.16.21.20240513
|
types-six==1.16.21.9
|
||||||
vermin==1.6.0
|
vermin==1.6.0
|
||||||
69
.github/workflows/unit_tests.yaml
vendored
69
.github/workflows/unit_tests.yaml
vendored
@@ -51,10 +51,10 @@ jobs:
|
|||||||
on_develop: false
|
on_develop: false
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -72,7 +72,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
git --version
|
git --version
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
if: ${{ matrix.concretizer == 'clingo' }}
|
if: ${{ matrix.concretizer == 'clingo' }}
|
||||||
env:
|
env:
|
||||||
@@ -91,7 +91,7 @@ jobs:
|
|||||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,${{ matrix.concretizer }}
|
flags: unittests,linux,${{ matrix.concretizer }}
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -100,10 +100,10 @@ jobs:
|
|||||||
shell:
|
shell:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -118,13 +118,13 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
git --version
|
git --version
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
- name: Run shell tests
|
- name: Run shell tests
|
||||||
env:
|
env:
|
||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-shell-tests
|
share/spack/qa/run-shell-tests
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||||
with:
|
with:
|
||||||
flags: shelltests,linux
|
flags: shelltests,linux
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -141,13 +141,13 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
useradd spack-test
|
useradd spack-test
|
||||||
chown -R spack-test .
|
chown -R spack-test .
|
||||||
- name: Run unit tests
|
- name: Run unit tests
|
||||||
@@ -160,10 +160,10 @@ jobs:
|
|||||||
clingo-cffi:
|
clingo-cffi:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
@@ -178,14 +178,14 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
git --version
|
git --version
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
- name: Run unit tests (full suite with coverage)
|
- name: Run unit tests (full suite with coverage)
|
||||||
env:
|
env:
|
||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
SPACK_TEST_SOLVER: clingo
|
SPACK_TEST_SOLVER: clingo
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,clingo
|
flags: unittests,linux,clingo
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
@@ -195,13 +195,13 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [macos-13, macos-14]
|
os: [macos-latest, macos-14]
|
||||||
python-version: ["3.11"]
|
python-version: ["3.11"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
@@ -217,45 +217,14 @@ jobs:
|
|||||||
SPACK_TEST_PARALLEL: 4
|
SPACK_TEST_PARALLEL: 4
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
$(which spack) bootstrap disable spack-install
|
$(which spack) bootstrap disable spack-install
|
||||||
$(which spack) solve zlib
|
$(which spack) solve zlib
|
||||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||||
with:
|
with:
|
||||||
flags: unittests,macos
|
flags: unittests,macos
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
verbose: true
|
verbose: true
|
||||||
# Run unit tests on Windows
|
|
||||||
windows:
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell:
|
|
||||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
|
||||||
runs-on: windows-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
- name: Install Python packages
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
|
||||||
- name: Create local develop
|
|
||||||
run: |
|
|
||||||
./.github/workflows/bin/setup_git.ps1
|
|
||||||
- name: Unit Test
|
|
||||||
run: |
|
|
||||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
|
||||||
coverage combine -a
|
|
||||||
coverage xml
|
|
||||||
- uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673
|
|
||||||
with:
|
|
||||||
flags: unittests,windows
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
verbose: true
|
|
||||||
|
|||||||
18
.github/workflows/valid-style.yml
vendored
18
.github/workflows/valid-style.yml
vendored
@@ -18,15 +18,15 @@ jobs:
|
|||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
- name: Install Python Packages
|
- name: Install Python Packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip setuptools
|
pip install --upgrade pip setuptools
|
||||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
pip install -r .github/workflows/style/requirements.txt
|
||||||
- name: vermin (Spack's Core)
|
- name: vermin (Spack's Core)
|
||||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||||
- name: vermin (Repositories)
|
- name: vermin (Repositories)
|
||||||
@@ -35,22 +35,22 @@ jobs:
|
|||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
python-version: '3.11'
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip setuptools
|
pip install --upgrade pip setuptools
|
||||||
pip install -r .github/workflows/requirements/style/requirements.txt
|
pip install -r .github/workflows/style/requirements.txt
|
||||||
- name: Setup git configuration
|
- name: Setup git configuration
|
||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
git --version
|
git --version
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
- name: Run style tests
|
- name: Run style tests
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-style-tests
|
share/spack/qa/run-style-tests
|
||||||
@@ -70,13 +70,13 @@ jobs:
|
|||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
useradd spack-test
|
useradd spack-test
|
||||||
chown -R spack-test .
|
chown -R spack-test .
|
||||||
- name: Bootstrap Spack development environment
|
- name: Bootstrap Spack development environment
|
||||||
|
|||||||
83
.github/workflows/windows_python.yml
vendored
Normal file
83
.github/workflows/windows_python.yml
vendored
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
name: windows
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: windows-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell:
|
||||||
|
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||||
|
jobs:
|
||||||
|
unit-tests:
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
- name: Install Python packages
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||||
|
- name: Create local develop
|
||||||
|
run: |
|
||||||
|
./.github/workflows/setup_git.ps1
|
||||||
|
- name: Unit Test
|
||||||
|
run: |
|
||||||
|
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
||||||
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
|
coverage combine -a
|
||||||
|
coverage xml
|
||||||
|
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||||
|
with:
|
||||||
|
flags: unittests,windows
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
verbose: true
|
||||||
|
unit-tests-cmd:
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
- name: Install Python packages
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip pywin32 setuptools coverage pytest-cov clingo
|
||||||
|
- name: Create local develop
|
||||||
|
run: |
|
||||||
|
./.github/workflows/setup_git.ps1
|
||||||
|
- name: Command Unit Test
|
||||||
|
run: |
|
||||||
|
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
||||||
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
|
coverage combine -a
|
||||||
|
coverage xml
|
||||||
|
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||||
|
with:
|
||||||
|
flags: unittests,windows
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
verbose: true
|
||||||
|
build-abseil:
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
- name: Install Python packages
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip pywin32 setuptools coverage
|
||||||
|
- name: Build Test
|
||||||
|
run: |
|
||||||
|
spack compiler find
|
||||||
|
spack -d external find cmake ninja
|
||||||
|
spack -d install abseil-cpp
|
||||||
366
CHANGELOG.md
366
CHANGELOG.md
@@ -1,369 +1,3 @@
|
|||||||
|
|
||||||
# v0.22.0 (2024-05-12)
|
|
||||||
|
|
||||||
`v0.22.0` is a major feature release.
|
|
||||||
|
|
||||||
## Features in this release
|
|
||||||
|
|
||||||
1. **Compiler dependencies**
|
|
||||||
|
|
||||||
We are in the process of making compilers proper dependencies in Spack, and a number
|
|
||||||
of changes in `v0.22` support that effort. You may notice nodes in your dependency
|
|
||||||
graphs for compiler runtime libraries like `gcc-runtime` or `libgfortran`, and you
|
|
||||||
may notice that Spack graphs now include `libc`. We've also begun moving compiler
|
|
||||||
configuration from `compilers.yaml` to `packages.yaml` to make it consistent with
|
|
||||||
other externals. We are trying to do this with the least disruption possible, so
|
|
||||||
your existing `compilers.yaml` files should still work. We expect to be done with
|
|
||||||
this transition by the `v0.23` release in November.
|
|
||||||
|
|
||||||
* #41104: Packages compiled with `%gcc` on Linux, macOS and FreeBSD now depend on a
|
|
||||||
new package `gcc-runtime`, which contains a copy of the shared compiler runtime
|
|
||||||
libraries. This enables gcc runtime libraries to be installed and relocated when
|
|
||||||
using a build cache. When building minimal Spack-generated container images it is
|
|
||||||
no longer necessary to install libgfortran, libgomp etc. using the system package
|
|
||||||
manager.
|
|
||||||
|
|
||||||
* #42062: Packages compiled with `%oneapi` now depend on a new package
|
|
||||||
`intel-oneapi-runtime`. This is similar to `gcc-runtime`, and the runtimes can
|
|
||||||
provide virtuals and compilers can inject dependencies on virtuals into compiled
|
|
||||||
packages. This allows us to model library soname compatibility and allows
|
|
||||||
compilers like `%oneapi` to provide virtuals like `sycl` (which can also be
|
|
||||||
provided by standalone libraries). Note that until we have an agreement in place
|
|
||||||
with intel, Intel packages are marked `redistribute(source=False, binary=False)`
|
|
||||||
and must be downloaded outside of Spack.
|
|
||||||
|
|
||||||
* #43272: changes to the optimization criteria of the solver improve the hit-rate of
|
|
||||||
buildcaches by a fair amount. The solver more relaxed compatibility rules and will
|
|
||||||
not try to strictly match compilers or targets of reused specs. Users can still
|
|
||||||
enforce the previous strict behavior with `require:` sections in `packages.yaml`.
|
|
||||||
Note that to enforce correct linking, Spack will *not* reuse old `%gcc` and
|
|
||||||
`%oneapi` specs that do not have the runtime libraries as a dependency.
|
|
||||||
|
|
||||||
* #43539: Spack will reuse specs built with compilers that are *not* explicitly
|
|
||||||
configured in `compilers.yaml`. Because we can now keep runtime libraries in build
|
|
||||||
cache, we do not require you to also have a local configured compiler to *use* the
|
|
||||||
runtime libraries. This improves reuse in buildcaches and avoids conflicts with OS
|
|
||||||
updates that happen underneath Spack.
|
|
||||||
|
|
||||||
* #43190: binary compatibility on `linux` is now based on the `libc` version,
|
|
||||||
instead of on the `os` tag. Spack builds now detect the host `libc` (`glibc` or
|
|
||||||
`musl`) and add it as an implicit external node in the dependency graph. Binaries
|
|
||||||
with a `libc` with the same name and a version less than or equal to that of the
|
|
||||||
detected `libc` can be reused. This is only on `linux`, not `macos` or `Windows`.
|
|
||||||
|
|
||||||
* #43464: each package that can provide a compiler is now detectable using `spack
|
|
||||||
external find`. External packages defining compiler paths are effectively used as
|
|
||||||
compilers, and `spack external find -t compiler` can be used as a substitute for
|
|
||||||
`spack compiler find`. More details on this transition are in
|
|
||||||
[the docs](https://spack.readthedocs.io/en/latest/getting_started.html#manual-compiler-configuration)
|
|
||||||
|
|
||||||
2. **Improved `spack find` UI for Environments**
|
|
||||||
|
|
||||||
If you're working in an enviroment, you likely care about:
|
|
||||||
|
|
||||||
* What are the roots
|
|
||||||
* Which ones are installed / not installed
|
|
||||||
* What's been added that still needs to be concretized
|
|
||||||
|
|
||||||
We've tweaked `spack find` in environments to show this information much more
|
|
||||||
clearly. Installation status is shown next to each root, so you can see what is
|
|
||||||
installed. Roots are also shown in bold in the list of installed packages. There is
|
|
||||||
also a new option for `spack find -r` / `--only-roots` that will only show env
|
|
||||||
roots, if you don't want to look at all the installed specs.
|
|
||||||
|
|
||||||
More details in #42334.
|
|
||||||
|
|
||||||
3. **Improved command-line string quoting**
|
|
||||||
|
|
||||||
We are making some breaking changes to how Spack parses specs on the CLI in order to
|
|
||||||
respect shell quoting instead of trying to fight it. If you (sadly) had to write
|
|
||||||
something like this on the command line:
|
|
||||||
|
|
||||||
```
|
|
||||||
spack install zlib cflags=\"-O2 -g\"
|
|
||||||
```
|
|
||||||
|
|
||||||
That will now result in an error, but you can now write what you probably expected
|
|
||||||
to work in the first place:
|
|
||||||
|
|
||||||
```
|
|
||||||
spack install zlib cflags="-O2 -g"
|
|
||||||
```
|
|
||||||
|
|
||||||
Quoted can also now include special characters, so you can supply flags like:
|
|
||||||
|
|
||||||
```
|
|
||||||
spack intall zlib ldflags='-Wl,-rpath=$ORIGIN/_libs'
|
|
||||||
```
|
|
||||||
|
|
||||||
To reduce ambiguity in parsing, we now require that you *not* put spaces around `=`
|
|
||||||
and `==` when for flags or variants. This would not have broken before but will now
|
|
||||||
result in an error:
|
|
||||||
|
|
||||||
```
|
|
||||||
spack install zlib cflags = "-O2 -g"
|
|
||||||
```
|
|
||||||
|
|
||||||
More details and discussion in #30634.
|
|
||||||
|
|
||||||
4. **Revert default `spack install` behavior to `--reuse`**
|
|
||||||
|
|
||||||
We changed the default concretizer behavior from `--reuse` to `--reuse-deps` in
|
|
||||||
#30990 (in `v0.20`), which meant that *every* `spack install` invocation would
|
|
||||||
attempt to build a new version of the requested package / any environment roots.
|
|
||||||
While this is a common ask for *upgrading* and for *developer* workflows, we don't
|
|
||||||
think it should be the default for a package manager.
|
|
||||||
|
|
||||||
We are going to try to stick to this policy:
|
|
||||||
1. Prioritize reuse and build as little as possible by default.
|
|
||||||
2. Only upgrade or install duplicates if they are explicitly asked for, or if there
|
|
||||||
is a known security issue that necessitates an upgrade.
|
|
||||||
|
|
||||||
With the install command you now have three options:
|
|
||||||
|
|
||||||
* `--reuse` (default): reuse as many existing installations as possible.
|
|
||||||
* `--reuse-deps` / `--fresh-roots`: upgrade (freshen) roots but reuse dependencies if possible.
|
|
||||||
* `--fresh`: install fresh versions of requested packages (roots) and their dependencies.
|
|
||||||
|
|
||||||
We've also introduced `--fresh-roots` as an alias for `--reuse-deps` to make it more clear
|
|
||||||
that it may give you fresh versions. More details in #41302 and #43988.
|
|
||||||
|
|
||||||
5. **More control over reused specs**
|
|
||||||
|
|
||||||
You can now control which packages to reuse and how. There is a new
|
|
||||||
`concretizer:reuse` config option, which accepts the following properties:
|
|
||||||
|
|
||||||
- `roots`: `true` to reuse roots, `false` to reuse just dependencies
|
|
||||||
- `exclude`: list of constraints used to select which specs *not* to reuse
|
|
||||||
- `include`: list of constraints used to select which specs *to* reuse
|
|
||||||
- `from`: list of sources for reused specs (some combination of `local`,
|
|
||||||
`buildcache`, or `external`)
|
|
||||||
|
|
||||||
For example, to reuse only specs compiled with GCC, you could write:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
concretizer:
|
|
||||||
reuse:
|
|
||||||
roots: true
|
|
||||||
include:
|
|
||||||
- "%gcc"
|
|
||||||
```
|
|
||||||
|
|
||||||
Or, if `openmpi` must be used from externals, and it must be the only external used:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
concretizer:
|
|
||||||
reuse:
|
|
||||||
roots: true
|
|
||||||
from:
|
|
||||||
- type: local
|
|
||||||
exclude: ["openmpi"]
|
|
||||||
- type: buildcache
|
|
||||||
exclude: ["openmpi"]
|
|
||||||
- type: external
|
|
||||||
include: ["openmpi"]
|
|
||||||
```
|
|
||||||
|
|
||||||
6. **New `redistribute()` directive**
|
|
||||||
|
|
||||||
Some packages can't be redistributed in source or binary form. We need an explicit
|
|
||||||
way to say that in a package.
|
|
||||||
|
|
||||||
Now there is a `redistribute()` directive so that package authors can write:
|
|
||||||
|
|
||||||
```python
|
|
||||||
class MyPackage(Package):
|
|
||||||
redistribute(source=False, binary=False)
|
|
||||||
```
|
|
||||||
|
|
||||||
Like other directives, this works with `when=`:
|
|
||||||
|
|
||||||
```python
|
|
||||||
class MyPackage(Package):
|
|
||||||
# 12.0 and higher are proprietary
|
|
||||||
redistribute(source=False, binary=False, when="@12.0:")
|
|
||||||
|
|
||||||
# can't redistribute when we depend on some proprietary dependency
|
|
||||||
redistribute(source=False, binary=False, when="^proprietary-dependency")
|
|
||||||
```
|
|
||||||
|
|
||||||
More in #20185.
|
|
||||||
|
|
||||||
7. **New `conflict:` and `prefer:` syntax for package preferences**
|
|
||||||
|
|
||||||
Previously, you could express conflicts and preferences in `packages.yaml` through
|
|
||||||
some contortions with `require:`:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
packages:
|
|
||||||
zlib-ng:
|
|
||||||
require:
|
|
||||||
- one_of: ["%clang", "@:"] # conflict on %clang
|
|
||||||
- any_of: ["+shared", "@:"] # strong preference for +shared
|
|
||||||
```
|
|
||||||
|
|
||||||
You can now use `require:` and `prefer:` for a much more readable configuration:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
packages:
|
|
||||||
zlib-ng:
|
|
||||||
conflict:
|
|
||||||
- "%clang"
|
|
||||||
prefer:
|
|
||||||
- "+shared"
|
|
||||||
```
|
|
||||||
|
|
||||||
See [the documentation](https://spack.readthedocs.io/en/latest/packages_yaml.html#conflicts-and-strong-preferences)
|
|
||||||
and #41832 for more details.
|
|
||||||
|
|
||||||
8. **`include_concrete` in environments**
|
|
||||||
|
|
||||||
You may want to build on the *concrete* contents of another environment without
|
|
||||||
changing that environment. You can now include the concrete specs from another
|
|
||||||
environment's `spack.lock` with `include_concrete`:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
spack:
|
|
||||||
specs: []
|
|
||||||
concretizer:
|
|
||||||
unify: true
|
|
||||||
include_concrete:
|
|
||||||
- /path/to/environment1
|
|
||||||
- /path/to/environment2
|
|
||||||
```
|
|
||||||
|
|
||||||
Now, when *this* environment is concretized, it will bring in the already concrete
|
|
||||||
specs from `environment1` and `environment2`, and build on top of them without
|
|
||||||
changing them. This is useful if you have phased deployments, where old deployments
|
|
||||||
should not be modified but you want to use as many of them as possible. More details
|
|
||||||
in #33768.
|
|
||||||
|
|
||||||
9. **`python-venv` isolation**
|
|
||||||
|
|
||||||
Spack has unique requirements for Python because it:
|
|
||||||
1. installs every package in its own independent directory, and
|
|
||||||
2. allows users to register *external* python installations.
|
|
||||||
|
|
||||||
External installations may contain their own installed packages that can interfere
|
|
||||||
with Spack installations, and some distributions (Debian and Ubuntu) even change the
|
|
||||||
`sysconfig` in ways that alter the installation layout of installed Python packages
|
|
||||||
(e.g., with the addition of a `/local` prefix on Debian or Ubuntu). To isolate Spack
|
|
||||||
from these and other issues, we now insert a small `python-venv` package in between
|
|
||||||
`python` and packages that need to install Python code. This isolates Spack's build
|
|
||||||
environment, isolates Spack from any issues with an external python, and resolves a
|
|
||||||
large number of issues we've had with Python installations.
|
|
||||||
|
|
||||||
See #40773 for further details.
|
|
||||||
|
|
||||||
## New commands, options, and directives
|
|
||||||
|
|
||||||
* Allow packages to be pushed to build cache after install from source (#42423)
|
|
||||||
* `spack develop`: stage build artifacts in same root as non-dev builds #41373
|
|
||||||
* Don't delete `spack develop` build artifacts after install (#43424)
|
|
||||||
* `spack find`: add options for local/upstream only (#42999)
|
|
||||||
* `spack logs`: print log files for packages (either partially built or installed) (#42202)
|
|
||||||
* `patch`: support reversing patches (#43040)
|
|
||||||
* `develop`: Add -b/--build-directory option to set build_directory package attribute (#39606)
|
|
||||||
* `spack list`: add `--namesapce` / `--repo` option (#41948)
|
|
||||||
* directives: add `checked_by` field to `license()`, add some license checks
|
|
||||||
* `spack gc`: add options for environments and build dependencies (#41731)
|
|
||||||
* Add `--create` to `spack env activate` (#40896)
|
|
||||||
|
|
||||||
## Performance improvements
|
|
||||||
|
|
||||||
* environment.py: fix excessive re-reads (#43746)
|
|
||||||
* ruamel yaml: fix quadratic complexity bug (#43745)
|
|
||||||
* Refactor to improve `spec format` speed (#43712)
|
|
||||||
* Do not acquire a write lock on the env post install if no views (#43505)
|
|
||||||
* asp.py: fewer calls to `spec.copy()` (#43715)
|
|
||||||
* spec.py: early return in `__str__`
|
|
||||||
* avoid `jinja2` import at startup unless needed (#43237)
|
|
||||||
|
|
||||||
## Other new features of note
|
|
||||||
|
|
||||||
* `archspec`: update to `v0.2.4`: support for Windows, bugfixes for `neoverse-v1` and
|
|
||||||
`neoverse-v2` detection.
|
|
||||||
* `spack config get`/`blame`: with no args, show entire config
|
|
||||||
* `spack env create <env>`: dir if dir-like (#44024)
|
|
||||||
* ASP-based solver: update os compatibility for macOS (#43862)
|
|
||||||
* Add handling of custom ssl certs in urllib ops (#42953)
|
|
||||||
* Add ability to rename environments (#43296)
|
|
||||||
* Add config option and compiler support to reuse across OS's (#42693)
|
|
||||||
* Support for prereleases (#43140)
|
|
||||||
* Only reuse externals when configured (#41707)
|
|
||||||
* Environments: Add support for including views (#42250)
|
|
||||||
|
|
||||||
## Binary caches
|
|
||||||
* Build cache: make signed/unsigned a mirror property (#41507)
|
|
||||||
* tools stack
|
|
||||||
|
|
||||||
## Removals, deprecations, and syntax changes
|
|
||||||
* remove `dpcpp` compiler and package (#43418)
|
|
||||||
* spack load: remove --only argument (#42120)
|
|
||||||
|
|
||||||
## Notable Bugfixes
|
|
||||||
* repo.py: drop deleted packages from provider cache (#43779)
|
|
||||||
* Allow `+` in module file names (#41999)
|
|
||||||
* `cmd/python`: use runpy to allow multiprocessing in scripts (#41789)
|
|
||||||
* Show extension commands with spack -h (#41726)
|
|
||||||
* Support environment variable expansion inside module projections (#42917)
|
|
||||||
* Alert user to failed concretizations (#42655)
|
|
||||||
* shell: fix zsh color formatting for PS1 in environments (#39497)
|
|
||||||
* spack mirror create --all: include patches (#41579)
|
|
||||||
|
|
||||||
## Spack community stats
|
|
||||||
|
|
||||||
* 7,994 total packages; 525 since `v0.21.0`
|
|
||||||
* 178 new Python packages, 5 new R packages
|
|
||||||
* 358 people contributed to this release
|
|
||||||
* 344 committers to packages
|
|
||||||
* 45 committers to core
|
|
||||||
|
|
||||||
# v0.21.2 (2024-03-01)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
|
|
||||||
- Containerize: accommodate nested or pre-existing spack-env paths (#41558)
|
|
||||||
- Fix setup-env script, when going back and forth between instances (#40924)
|
|
||||||
- Fix using fully-qualified namespaces from root specs (#41957)
|
|
||||||
- Fix a bug when a required provider is requested for multiple virtuals (#42088)
|
|
||||||
- OCI buildcaches:
|
|
||||||
- only push in parallel when forking (#42143)
|
|
||||||
- use pickleable errors (#42160)
|
|
||||||
- Fix using sticky variants in externals (#42253)
|
|
||||||
- Fix a rare issue with conditional requirements and multi-valued variants (#42566)
|
|
||||||
|
|
||||||
## Package updates
|
|
||||||
- rust: add v1.75, rework a few variants (#41161,#41903)
|
|
||||||
- py-transformers: add v4.35.2 (#41266)
|
|
||||||
- mgard: fix OpenMP on AppleClang (#42933)
|
|
||||||
|
|
||||||
# v0.21.1 (2024-01-11)
|
|
||||||
|
|
||||||
## New features
|
|
||||||
- Add support for reading buildcaches created by Spack v0.22 (#41773)
|
|
||||||
|
|
||||||
## Bugfixes
|
|
||||||
|
|
||||||
- spack graph: fix coloring with environments (#41240)
|
|
||||||
- spack info: sort variants in --variants-by-name (#41389)
|
|
||||||
- Spec.format: error on old style format strings (#41934)
|
|
||||||
- ASP-based solver:
|
|
||||||
- fix infinite recursion when computing concretization errors (#41061)
|
|
||||||
- don't error for type mismatch on preferences (#41138)
|
|
||||||
- don't emit spurious debug output (#41218)
|
|
||||||
- Improve the error message for deprecated preferences (#41075)
|
|
||||||
- Fix MSVC preview version breaking clingo build on Windows (#41185)
|
|
||||||
- Fix multi-word aliases (#41126)
|
|
||||||
- Add a warning for unconfigured compiler (#41213)
|
|
||||||
- environment: fix an issue with deconcretization/reconcretization of specs (#41294)
|
|
||||||
- buildcache: don't error if a patch is missing, when installing from binaries (#41986)
|
|
||||||
- Multiple improvements to unit-tests (#41215,#41369,#41495,#41359,#41361,#41345,#41342,#41308,#41226)
|
|
||||||
|
|
||||||
## Package updates
|
|
||||||
- root: add a webgui patch to address security issue (#41404)
|
|
||||||
- BerkeleyGW: update source urls (#38218)
|
|
||||||
|
|
||||||
# v0.21.0 (2023-11-11)
|
# v0.21.0 (2023-11-11)
|
||||||
|
|
||||||
`v0.21.0` is a major feature release.
|
`v0.21.0` is a major feature release.
|
||||||
|
|||||||
@@ -32,7 +32,7 @@
|
|||||||
|
|
||||||
Spack is a multi-platform package manager that builds and installs
|
Spack is a multi-platform package manager that builds and installs
|
||||||
multiple versions and configurations of software. It works on Linux,
|
multiple versions and configurations of software. It works on Linux,
|
||||||
macOS, Windows, and many supercomputers. Spack is non-destructive: installing a
|
macOS, and many supercomputers. Spack is non-destructive: installing a
|
||||||
new version of a package does not break existing installations, so many
|
new version of a package does not break existing installations, so many
|
||||||
configurations of the same package can coexist.
|
configurations of the same package can coexist.
|
||||||
|
|
||||||
|
|||||||
@@ -22,4 +22,4 @@
|
|||||||
#
|
#
|
||||||
# This is compatible across platforms.
|
# This is compatible across platforms.
|
||||||
#
|
#
|
||||||
exec spack python "$@"
|
exec /usr/bin/env spack python "$@"
|
||||||
|
|||||||
@@ -188,27 +188,25 @@ if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
|||||||
goto :end_switch
|
goto :end_switch
|
||||||
|
|
||||||
:case_load
|
:case_load
|
||||||
if NOT defined _sp_args (
|
:: If args contain --sh, --csh, or -h/--help: just execute.
|
||||||
exit /B 0
|
if defined _sp_args (
|
||||||
)
|
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||||
|
goto :default_case
|
||||||
:: If args contain --bat, or -h/--help: just execute.
|
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
goto :default_case
|
||||||
goto :default_case
|
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
goto :default_case
|
||||||
goto :default_case
|
)
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
|
||||||
goto :default_case
|
|
||||||
) else if NOT "%_sp_args%"=="%_sp_args:--list=%" (
|
|
||||||
goto :default_case
|
|
||||||
)
|
)
|
||||||
|
|
||||||
for /f "tokens=* USEBACKQ" %%I in (
|
for /f "tokens=* USEBACKQ" %%I in (
|
||||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`
|
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
|
||||||
) do %%I
|
|
||||||
|
|
||||||
goto :end_switch
|
goto :end_switch
|
||||||
|
|
||||||
|
:case_unload
|
||||||
|
goto :case_load
|
||||||
|
|
||||||
:default_case
|
:default_case
|
||||||
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
|
python "%spack%" %_sp_flags% %_sp_subcommand% %_sp_args%
|
||||||
goto :end_switch
|
goto :end_switch
|
||||||
|
|||||||
@@ -144,5 +144,3 @@ switch($SpackSubCommand)
|
|||||||
"unload" {Invoke-SpackLoad}
|
"unload" {Invoke-SpackLoad}
|
||||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||||
}
|
}
|
||||||
|
|
||||||
exit $LASTEXITCODE
|
|
||||||
|
|||||||
16
etc/spack/defaults/cray/modules.yaml
Normal file
16
etc/spack/defaults/cray/modules.yaml
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
# -------------------------------------------------------------------------
|
||||||
|
# This is the default configuration for Spack's module file generation.
|
||||||
|
#
|
||||||
|
# Settings here are versioned with Spack and are intended to provide
|
||||||
|
# sensible defaults out of the box. Spack maintainers should edit this
|
||||||
|
# file to keep it current.
|
||||||
|
#
|
||||||
|
# Users can override these settings by editing the following files.
|
||||||
|
#
|
||||||
|
# Per-spack-instance settings (overrides defaults):
|
||||||
|
# $SPACK_ROOT/etc/spack/modules.yaml
|
||||||
|
#
|
||||||
|
# Per-user settings (overrides default and site settings):
|
||||||
|
# ~/.spack/modules.yaml
|
||||||
|
# -------------------------------------------------------------------------
|
||||||
|
modules: {}
|
||||||
19
etc/spack/defaults/cray/packages.yaml
Normal file
19
etc/spack/defaults/cray/packages.yaml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# -------------------------------------------------------------------------
|
||||||
|
# This file controls default concretization preferences for Spack.
|
||||||
|
#
|
||||||
|
# Settings here are versioned with Spack and are intended to provide
|
||||||
|
# sensible defaults out of the box. Spack maintainers should edit this
|
||||||
|
# file to keep it current.
|
||||||
|
#
|
||||||
|
# Users can override these settings by editing the following files.
|
||||||
|
#
|
||||||
|
# Per-spack-instance settings (overrides defaults):
|
||||||
|
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||||
|
#
|
||||||
|
# Per-user settings (overrides default and site settings):
|
||||||
|
# ~/.spack/packages.yaml
|
||||||
|
# -------------------------------------------------------------------------
|
||||||
|
packages:
|
||||||
|
all:
|
||||||
|
providers:
|
||||||
|
iconv: [glibc, musl, libiconv]
|
||||||
19
etc/spack/defaults/linux/packages.yaml
Normal file
19
etc/spack/defaults/linux/packages.yaml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# -------------------------------------------------------------------------
|
||||||
|
# This file controls default concretization preferences for Spack.
|
||||||
|
#
|
||||||
|
# Settings here are versioned with Spack and are intended to provide
|
||||||
|
# sensible defaults out of the box. Spack maintainers should edit this
|
||||||
|
# file to keep it current.
|
||||||
|
#
|
||||||
|
# Users can override these settings by editing the following files.
|
||||||
|
#
|
||||||
|
# Per-spack-instance settings (overrides defaults):
|
||||||
|
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||||
|
#
|
||||||
|
# Per-user settings (overrides default and site settings):
|
||||||
|
# ~/.spack/packages.yaml
|
||||||
|
# -------------------------------------------------------------------------
|
||||||
|
packages:
|
||||||
|
all:
|
||||||
|
providers:
|
||||||
|
iconv: [glibc, musl, libiconv]
|
||||||
@@ -38,9 +38,10 @@ packages:
|
|||||||
lapack: [openblas, amdlibflame]
|
lapack: [openblas, amdlibflame]
|
||||||
libc: [glibc, musl]
|
libc: [glibc, musl]
|
||||||
libgfortran: [ gcc-runtime ]
|
libgfortran: [ gcc-runtime ]
|
||||||
libglx: [mesa+glx]
|
libglx: [mesa+glx, mesa18+glx]
|
||||||
libifcore: [ intel-oneapi-runtime ]
|
libifcore: [ intel-oneapi-runtime ]
|
||||||
libllvm: [llvm]
|
libllvm: [llvm]
|
||||||
|
libosmesa: [mesa+osmesa, mesa18+osmesa]
|
||||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||||
luajit: [lua-luajit-openresty, lua-luajit]
|
luajit: [lua-luajit-openresty, lua-luajit]
|
||||||
mariadb-client: [mariadb-c-client, mariadb]
|
mariadb-client: [mariadb-c-client, mariadb]
|
||||||
|
|||||||
@@ -865,7 +865,7 @@ There are several different ways to use Spack packages once you have
|
|||||||
installed them. As you've seen, spack packages are installed into long
|
installed them. As you've seen, spack packages are installed into long
|
||||||
paths with hashes, and you need a way to get them into your path. The
|
paths with hashes, and you need a way to get them into your path. The
|
||||||
easiest way is to use :ref:`spack load <cmd-spack-load>`, which is
|
easiest way is to use :ref:`spack load <cmd-spack-load>`, which is
|
||||||
described in this section.
|
described in the next section.
|
||||||
|
|
||||||
Some more advanced ways to use Spack packages include:
|
Some more advanced ways to use Spack packages include:
|
||||||
|
|
||||||
@@ -959,86 +959,7 @@ use ``spack find --loaded``.
|
|||||||
You can also use ``spack load --list`` to get the same output, but it
|
You can also use ``spack load --list`` to get the same output, but it
|
||||||
does not have the full set of query options that ``spack find`` offers.
|
does not have the full set of query options that ``spack find`` offers.
|
||||||
|
|
||||||
We'll learn more about Spack's spec syntax in :ref:`a later section <sec-specs>`.
|
We'll learn more about Spack's spec syntax in the next section.
|
||||||
|
|
||||||
|
|
||||||
.. _extensions:
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Python packages and virtual environments
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Spack can install a large number of Python packages. Their names are
|
|
||||||
typically prefixed with ``py-``. Installing and using them is no
|
|
||||||
different from any other package:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack install py-numpy
|
|
||||||
$ spack load py-numpy
|
|
||||||
$ python3
|
|
||||||
>>> import numpy
|
|
||||||
|
|
||||||
The ``spack load`` command sets the ``PATH`` variable so that the right Python
|
|
||||||
executable is used, and makes sure that ``numpy`` and its dependencies can be
|
|
||||||
located in the ``PYTHONPATH``.
|
|
||||||
|
|
||||||
Spack is different from other Python package managers in that it installs
|
|
||||||
every package into its *own* prefix. This is in contrast to ``pip``, which
|
|
||||||
installs all packages into the same prefix, be it in a virtual environment
|
|
||||||
or not.
|
|
||||||
|
|
||||||
For many users, **virtual environments** are more convenient than repeated
|
|
||||||
``spack load`` commands, particularly when working with multiple Python
|
|
||||||
packages. Fortunately Spack supports environments itself, which together
|
|
||||||
with a view are no different from Python virtual environments.
|
|
||||||
|
|
||||||
The recommended way of working with Python extensions such as ``py-numpy``
|
|
||||||
is through :ref:`Environments <environments>`. The following example creates
|
|
||||||
a Spack environment with ``numpy`` in the current working directory. It also
|
|
||||||
puts a filesystem view in ``./view``, which is a more traditional combined
|
|
||||||
prefix for all packages in the environment.
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack env create --with-view view --dir .
|
|
||||||
$ spack -e . add py-numpy
|
|
||||||
$ spack -e . concretize
|
|
||||||
$ spack -e . install
|
|
||||||
|
|
||||||
Now you can activate the environment and start using the packages:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack env activate .
|
|
||||||
$ python3
|
|
||||||
>>> import numpy
|
|
||||||
|
|
||||||
The environment view is also a virtual environment, which is useful if you are
|
|
||||||
sharing the environment with others who are unfamiliar with Spack. They can
|
|
||||||
either use the Python executable directly:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ ./view/bin/python3
|
|
||||||
>>> import numpy
|
|
||||||
|
|
||||||
or use the activation script:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ source ./view/bin/activate
|
|
||||||
$ python3
|
|
||||||
>>> import numpy
|
|
||||||
|
|
||||||
In general, there should not be much difference between ``spack env activate``
|
|
||||||
and using the virtual environment. The main advantage of ``spack env activate``
|
|
||||||
is that it knows about more packages than just Python packages, and it may set
|
|
||||||
additional runtime variables that are not covered by the virtual environment
|
|
||||||
activation script.
|
|
||||||
|
|
||||||
See :ref:`environments` for a more in-depth description of Spack
|
|
||||||
environments and customizations to views.
|
|
||||||
|
|
||||||
|
|
||||||
.. _sec-specs:
|
.. _sec-specs:
|
||||||
@@ -1433,12 +1354,22 @@ the reserved keywords ``platform``, ``os`` and ``target``:
|
|||||||
$ spack install libelf os=ubuntu18.04
|
$ spack install libelf os=ubuntu18.04
|
||||||
$ spack install libelf target=broadwell
|
$ spack install libelf target=broadwell
|
||||||
|
|
||||||
|
or together by using the reserved keyword ``arch``:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack install libelf arch=cray-CNL10-haswell
|
||||||
|
|
||||||
Normally users don't have to bother specifying the architecture if they
|
Normally users don't have to bother specifying the architecture if they
|
||||||
are installing software for their current host, as in that case the
|
are installing software for their current host, as in that case the
|
||||||
values will be detected automatically. If you need fine-grained control
|
values will be detected automatically. If you need fine-grained control
|
||||||
over which packages use which targets (or over *all* packages' default
|
over which packages use which targets (or over *all* packages' default
|
||||||
target), see :ref:`package-preferences`.
|
target), see :ref:`package-preferences`.
|
||||||
|
|
||||||
|
.. admonition:: Cray machines
|
||||||
|
|
||||||
|
The situation is a little bit different for Cray machines and a detailed
|
||||||
|
explanation on how the architecture can be set on them can be found at :ref:`cray-support`
|
||||||
|
|
||||||
.. _support-for-microarchitectures:
|
.. _support-for-microarchitectures:
|
||||||
|
|
||||||
@@ -1774,6 +1705,165 @@ check only local packages (as opposed to those used transparently from
|
|||||||
``upstream`` spack instances) and the ``-j,--json`` option to output
|
``upstream`` spack instances) and the ``-j,--json`` option to output
|
||||||
machine-readable json data for any errors.
|
machine-readable json data for any errors.
|
||||||
|
|
||||||
|
|
||||||
|
.. _extensions:
|
||||||
|
|
||||||
|
---------------------------
|
||||||
|
Extensions & Python support
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
Spack's installation model assumes that each package will live in its
|
||||||
|
own install prefix. However, certain packages are typically installed
|
||||||
|
*within* the directory hierarchy of other packages. For example,
|
||||||
|
`Python <https://www.python.org>`_ packages are typically installed in the
|
||||||
|
``$prefix/lib/python-2.7/site-packages`` directory.
|
||||||
|
|
||||||
|
In Spack, installation prefixes are immutable, so this type of installation
|
||||||
|
is not directly supported. However, it is possible to create views that
|
||||||
|
allow you to merge install prefixes of multiple packages into a single new prefix.
|
||||||
|
Views are a convenient way to get a more traditional filesystem structure.
|
||||||
|
Using *extensions*, you can ensure that Python packages always share the
|
||||||
|
same prefix in the view as Python itself. Suppose you have
|
||||||
|
Python installed like so:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack find python
|
||||||
|
==> 1 installed packages.
|
||||||
|
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||||
|
python@2.7.8
|
||||||
|
|
||||||
|
.. _cmd-spack-extensions:
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^
|
||||||
|
``spack extensions``
|
||||||
|
^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
You can find extensions for your Python installation like this:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack extensions python
|
||||||
|
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
|
||||||
|
==> 36 extensions:
|
||||||
|
geos py-ipython py-pexpect py-pyside py-sip
|
||||||
|
py-basemap py-libxml2 py-pil py-pytz py-six
|
||||||
|
py-biopython py-mako py-pmw py-rpy2 py-sympy
|
||||||
|
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
|
||||||
|
py-dateutil py-mpi4py py-pygments py-scikit-learn
|
||||||
|
py-epydoc py-mx py-pylint py-scipy
|
||||||
|
py-gnuplot py-nose py-pyparsing py-setuptools
|
||||||
|
py-h5py py-numpy py-pyqt py-shiboken
|
||||||
|
|
||||||
|
==> 12 installed:
|
||||||
|
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||||
|
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
|
||||||
|
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
|
||||||
|
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
||||||
|
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
||||||
|
|
||||||
|
The extensions are a subset of what's returned by ``spack list``, and
|
||||||
|
they are packages like any other. They are installed into their own
|
||||||
|
prefixes, and you can see this with ``spack find --paths``:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack find --paths py-numpy
|
||||||
|
==> 1 installed packages.
|
||||||
|
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||||
|
py-numpy@1.9.1 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/py-numpy@1.9.1-66733244
|
||||||
|
|
||||||
|
However, even though this package is installed, you cannot use it
|
||||||
|
directly when you run ``python``:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack load python
|
||||||
|
$ python
|
||||||
|
Python 2.7.8 (default, Feb 17 2015, 01:35:25)
|
||||||
|
[GCC 4.4.7 20120313 (Red Hat 4.4.7-11)] on linux2
|
||||||
|
Type "help", "copyright", "credits" or "license" for more information.
|
||||||
|
>>> import numpy
|
||||||
|
Traceback (most recent call last):
|
||||||
|
File "<stdin>", line 1, in <module>
|
||||||
|
ImportError: No module named numpy
|
||||||
|
>>>
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Using Extensions in Environments
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
The recommended way of working with extensions such as ``py-numpy``
|
||||||
|
above is through :ref:`Environments <environments>`. For example,
|
||||||
|
the following creates an environment in the current working directory
|
||||||
|
with a filesystem view in the ``./view`` directory:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env create --with-view view --dir .
|
||||||
|
$ spack -e . add py-numpy
|
||||||
|
$ spack -e . concretize
|
||||||
|
$ spack -e . install
|
||||||
|
|
||||||
|
We recommend environments for two reasons. Firstly, environments
|
||||||
|
can be activated (requires :ref:`shell-support`):
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env activate .
|
||||||
|
|
||||||
|
which sets all the right environment variables such as ``PATH`` and
|
||||||
|
``PYTHONPATH``. This ensures that
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ python
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
works. Secondly, even without shell support, the view ensures
|
||||||
|
that Python can locate its extensions:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ ./view/bin/python
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
See :ref:`environments` for a more in-depth description of Spack
|
||||||
|
environments and customizations to views.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Using ``spack load``
|
||||||
|
^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
A more traditional way of using Spack and extensions is ``spack load``
|
||||||
|
(requires :ref:`shell-support`). This will add the extension to ``PYTHONPATH``
|
||||||
|
in your current shell, and Python itself will be available in the ``PATH``:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack load py-numpy
|
||||||
|
$ python
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
The loaded packages can be checked using ``spack find --loaded``
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Loading Extensions via Modules
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Apart from ``spack env activate`` and ``spack load``, you can load numpy
|
||||||
|
through your environment modules (using ``environment-modules`` or
|
||||||
|
``lmod``). This will also add the extension to the ``PYTHONPATH`` in
|
||||||
|
your current shell.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ module load <name of numpy module>
|
||||||
|
|
||||||
|
If you do not know the name of the specific numpy module you wish to
|
||||||
|
load, you can use the ``spack module tcl|lmod loads`` command to get
|
||||||
|
the name of the module from the Spack spec.
|
||||||
|
|
||||||
-----------------------
|
-----------------------
|
||||||
Filesystem requirements
|
Filesystem requirements
|
||||||
-----------------------
|
-----------------------
|
||||||
|
|||||||
@@ -147,15 +147,6 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
|
|||||||
def autoreconf(self, spec, prefix):
|
def autoreconf(self, spec, prefix):
|
||||||
which("bash")("autogen.sh")
|
which("bash")("autogen.sh")
|
||||||
|
|
||||||
If the ``package.py`` has build instructions in a separate
|
|
||||||
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class AutotoolsBuilder(AutotoolsBuilder):
|
|
||||||
def autoreconf(self, pkg, spec, prefix):
|
|
||||||
which("bash")("autogen.sh")
|
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""""""
|
||||||
patching configure or Makefile.in files
|
patching configure or Makefile.in files
|
||||||
"""""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""""""
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ use Spack to build packages with the tools.
|
|||||||
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
||||||
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
||||||
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
||||||
packages. Search for ``oneAPI`` at `packages.spack.io <https://packages.spack.io>`_ for the full
|
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
|
||||||
list of available oneAPI packages, or use::
|
list of available oneAPI packages, or use::
|
||||||
|
|
||||||
spack list -d oneAPI
|
spack list -d oneAPI
|
||||||
|
|||||||
@@ -718,45 +718,23 @@ command-line tool, or C/C++/Fortran program with optional Python
|
|||||||
modules? The former should be prepended with ``py-``, while the
|
modules? The former should be prepended with ``py-``, while the
|
||||||
latter should not.
|
latter should not.
|
||||||
|
|
||||||
""""""""""""""""""""""""""""""
|
""""""""""""""""""""""
|
||||||
``extends`` vs. ``depends_on``
|
extends vs. depends_on
|
||||||
""""""""""""""""""""""""""""""
|
""""""""""""""""""""""
|
||||||
|
|
||||||
|
This is very similar to the naming dilemma above, with a slight twist.
|
||||||
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
||||||
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
||||||
that the extension and extendee share the same prefix in views.
|
that the extension and extendee share the same prefix in views.
|
||||||
This allows the user to import a Python module without
|
This allows the user to import a Python module without
|
||||||
having to add that module to ``PYTHONPATH``.
|
having to add that module to ``PYTHONPATH``.
|
||||||
|
|
||||||
Additionally, ``extends("python")`` adds a dependency on the package
|
When deciding between ``extends`` and ``depends_on``, the best rule of
|
||||||
``python-venv``. This improves isolation from the system, whether
|
thumb is to check the installation prefix. If Python libraries are
|
||||||
it's during the build or at runtime: user and system site packages
|
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
|
||||||
cannot accidentally be used by any package that ``extends("python")``.
|
should use ``extends``. If Python libraries are installed elsewhere
|
||||||
|
or the only files that get installed reside in ``<prefix>/bin``, then
|
||||||
As a rule of thumb: if a package does not install any Python modules
|
don't use ``extends``.
|
||||||
of its own, and merely puts a Python script in the ``bin`` directory,
|
|
||||||
then there is no need for ``extends``. If the package installs modules
|
|
||||||
in the ``site-packages`` directory, it requires ``extends``.
|
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""""""""""
|
|
||||||
Executing ``python`` during the build
|
|
||||||
"""""""""""""""""""""""""""""""""""""
|
|
||||||
|
|
||||||
Whenever you need to execute a Python command or pass the path of the
|
|
||||||
Python interpreter to the build system, it is best to use the global
|
|
||||||
variable ``python`` directly. For example:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
@run_before("install")
|
|
||||||
def recythonize(self):
|
|
||||||
python("setup.py", "clean") # use the `python` global
|
|
||||||
|
|
||||||
As mentioned in the previous section, ``extends("python")`` adds an
|
|
||||||
automatic dependency on ``python-venv``, which is a virtual environment
|
|
||||||
that guarantees build isolation. The ``python`` global always refers to
|
|
||||||
the correct Python interpreter, whether the package uses ``extends("python")``
|
|
||||||
or ``depends_on("python")``.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
Alternatives to Spack
|
Alternatives to Spack
|
||||||
|
|||||||
@@ -11,8 +11,7 @@ Chaining Spack Installations
|
|||||||
|
|
||||||
You can point your Spack installation to another installation to use any
|
You can point your Spack installation to another installation to use any
|
||||||
packages that are installed there. To register the other Spack instance,
|
packages that are installed there. To register the other Spack instance,
|
||||||
you can add it as an entry to ``upstreams.yaml`` at any of the
|
you can add it as an entry to ``upstreams.yaml``:
|
||||||
:ref:`configuration-scopes`:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
@@ -23,8 +22,7 @@ you can add it as an entry to ``upstreams.yaml`` at any of the
|
|||||||
install_tree: /path/to/another/spack/opt/spack
|
install_tree: /path/to/another/spack/opt/spack
|
||||||
|
|
||||||
``install_tree`` must point to the ``opt/spack`` directory inside of the
|
``install_tree`` must point to the ``opt/spack`` directory inside of the
|
||||||
Spack base directory, or the location of the ``install_tree`` defined
|
Spack base directory.
|
||||||
in :ref:`config.yaml <config-yaml>`.
|
|
||||||
|
|
||||||
Once the upstream Spack instance has been added, ``spack find`` will
|
Once the upstream Spack instance has been added, ``spack find`` will
|
||||||
automatically check the upstream instance when querying installed packages,
|
automatically check the upstream instance when querying installed packages,
|
||||||
|
|||||||
@@ -150,7 +150,7 @@ this can expose you to attacks. Use at your own risk.
|
|||||||
--------------------
|
--------------------
|
||||||
|
|
||||||
Path to custom certificats for SSL verification. The value can be a
|
Path to custom certificats for SSL verification. The value can be a
|
||||||
filesytem path, or an environment variable that expands to an absolute file path.
|
filesytem path, or an environment variable that expands to a file path.
|
||||||
The default value is set to the environment variable ``SSL_CERT_FILE``
|
The default value is set to the environment variable ``SSL_CERT_FILE``
|
||||||
to use the same syntax used by many other applications that automatically
|
to use the same syntax used by many other applications that automatically
|
||||||
detect custom certificates.
|
detect custom certificates.
|
||||||
@@ -160,9 +160,6 @@ in the subprocess calling ``curl``.
|
|||||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||||
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
||||||
will work.
|
will work.
|
||||||
In all cases the expanded path must be absolute for Spack to use the certificates.
|
|
||||||
Certificates relative to an environment can be created by prepending the path variable
|
|
||||||
with the Spack configuration variable``$env``.
|
|
||||||
|
|
||||||
--------------------
|
--------------------
|
||||||
``checksum``
|
``checksum``
|
||||||
|
|||||||
@@ -194,6 +194,9 @@ The OS that are currently supported are summarized in the table below:
|
|||||||
* - Operating System
|
* - Operating System
|
||||||
- Base Image
|
- Base Image
|
||||||
- Spack Image
|
- Spack Image
|
||||||
|
* - Ubuntu 18.04
|
||||||
|
- ``ubuntu:18.04``
|
||||||
|
- ``spack/ubuntu-bionic``
|
||||||
* - Ubuntu 20.04
|
* - Ubuntu 20.04
|
||||||
- ``ubuntu:20.04``
|
- ``ubuntu:20.04``
|
||||||
- ``spack/ubuntu-focal``
|
- ``spack/ubuntu-focal``
|
||||||
@@ -203,9 +206,12 @@ The OS that are currently supported are summarized in the table below:
|
|||||||
* - Ubuntu 24.04
|
* - Ubuntu 24.04
|
||||||
- ``ubuntu:24.04``
|
- ``ubuntu:24.04``
|
||||||
- ``spack/ubuntu-noble``
|
- ``spack/ubuntu-noble``
|
||||||
* - CentOS Stream9
|
* - CentOS 7
|
||||||
- ``quay.io/centos/centos:stream9``
|
- ``centos:7``
|
||||||
- ``spack/centos-stream9``
|
- ``spack/centos7``
|
||||||
|
* - CentOS Stream
|
||||||
|
- ``quay.io/centos/centos:stream``
|
||||||
|
- ``spack/centos-stream``
|
||||||
* - openSUSE Leap
|
* - openSUSE Leap
|
||||||
- ``opensuse/leap``
|
- ``opensuse/leap``
|
||||||
- ``spack/leap15``
|
- ``spack/leap15``
|
||||||
@@ -224,6 +230,12 @@ The OS that are currently supported are summarized in the table below:
|
|||||||
* - Rocky Linux 9
|
* - Rocky Linux 9
|
||||||
- ``rockylinux:9``
|
- ``rockylinux:9``
|
||||||
- ``spack/rockylinux9``
|
- ``spack/rockylinux9``
|
||||||
|
* - Fedora Linux 37
|
||||||
|
- ``fedora:37``
|
||||||
|
- ``spack/fedora37``
|
||||||
|
* - Fedora Linux 38
|
||||||
|
- ``fedora:38``
|
||||||
|
- ``spack/fedora38``
|
||||||
* - Fedora Linux 39
|
* - Fedora Linux 39
|
||||||
- ``fedora:39``
|
- ``fedora:39``
|
||||||
- ``spack/fedora39``
|
- ``spack/fedora39``
|
||||||
|
|||||||
@@ -142,8 +142,12 @@ user's prompt to begin with the environment name in brackets.
|
|||||||
$ spack env activate -p myenv
|
$ spack env activate -p myenv
|
||||||
[myenv] $ ...
|
[myenv] $ ...
|
||||||
|
|
||||||
The ``activate`` command can also be used to create a new environment if it does not already
|
The ``activate`` command can also be used to create a new environment, if it is
|
||||||
exist.
|
not already defined, by adding the ``--create`` flag. Managed and anonymous
|
||||||
|
environments, anonymous environments are explained in the next section,
|
||||||
|
can both be created using the same flags that `spack env create` accepts.
|
||||||
|
If an environment already exists then spack will simply activate it and ignore the
|
||||||
|
create specific flags.
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -172,36 +176,21 @@ environment will remove the view from the user environment.
|
|||||||
Anonymous Environments
|
Anonymous Environments
|
||||||
^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Apart from managed environments, Spack also supports anonymous environments.
|
Any directory can be treated as an environment if it contains a file
|
||||||
|
``spack.yaml``. To load an anonymous environment, use:
|
||||||
Anonymous environments can be placed in any directory of choice.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
When uninstalling packages, Spack asks the user to confirm the removal of packages
|
|
||||||
that are still used in a managed environment. This is not the case for anonymous
|
|
||||||
environments.
|
|
||||||
|
|
||||||
To create an anonymous environment, use one of the following commands:
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env create --dir my_env
|
$ spack env activate -d /path/to/directory
|
||||||
$ spack env create ./my_env
|
|
||||||
|
|
||||||
As a shorthand, you can also create an anonymous environment upon activation if it does not
|
Anonymous specs can be created in place using the command:
|
||||||
already exist:
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env activate --create ./my_env
|
$ spack env create -d .
|
||||||
|
|
||||||
For convenience, Spack can also place an anonymous environment in a temporary directory for you:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack env activate --temp
|
|
||||||
|
|
||||||
|
In this case Spack simply creates a ``spack.yaml`` file in the requested
|
||||||
|
directory.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Environment Sensitive Commands
|
Environment Sensitive Commands
|
||||||
@@ -460,125 +449,6 @@ Sourcing that file in Bash will make the environment available to the
|
|||||||
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
||||||
file may also be copied out of the environment, renamed, etc.
|
file may also be copied out of the environment, renamed, etc.
|
||||||
|
|
||||||
|
|
||||||
.. _environment_include_concrete:
|
|
||||||
|
|
||||||
------------------------------
|
|
||||||
Included Concrete Environments
|
|
||||||
------------------------------
|
|
||||||
|
|
||||||
Spack environments can create an environment based off of information in already
|
|
||||||
established environments. You can think of it as a combination of existing
|
|
||||||
environments. It will gather information from the existing environment's
|
|
||||||
``spack.lock`` and use that during the creation of this included concrete
|
|
||||||
environment. When an included concrete environment is created it will generate
|
|
||||||
a ``spack.lock`` file for the newly created environment.
|
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Creating included environments
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
To create a combined concrete environment, you must have at least one existing
|
|
||||||
concrete environment. You will use the command ``spack env create`` with the
|
|
||||||
argument ``--include-concrete`` followed by the name or path of the environment
|
|
||||||
you'd like to include. Here is an example of how to create a combined environment
|
|
||||||
from the command line.
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack env create myenv
|
|
||||||
$ spack -e myenv add python
|
|
||||||
$ spack -e myenv concretize
|
|
||||||
$ spack env create --include-concrete myenv included_env
|
|
||||||
|
|
||||||
|
|
||||||
You can also include an environment directly in the ``spack.yaml`` file. It
|
|
||||||
involves adding the ``include_concrete`` heading in the yaml followed by the
|
|
||||||
absolute path to the independent environments.
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
spack:
|
|
||||||
specs: []
|
|
||||||
concretizer:
|
|
||||||
unify: true
|
|
||||||
include_concrete:
|
|
||||||
- /absolute/path/to/environment1
|
|
||||||
- /absolute/path/to/environment2
|
|
||||||
|
|
||||||
|
|
||||||
Once the ``spack.yaml`` has been updated you must concretize the environment to
|
|
||||||
get the concrete specs from the included environments.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Updating an included environment
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
If changes were made to the base environment and you want that reflected in the
|
|
||||||
included environment you will need to reconcretize both the base environment and the
|
|
||||||
included environment for the change to be implemented. For example:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack env create myenv
|
|
||||||
$ spack -e myenv add python
|
|
||||||
$ spack -e myenv concretize
|
|
||||||
$ spack env create --include-concrete myenv included_env
|
|
||||||
|
|
||||||
|
|
||||||
$ spack -e myenv find
|
|
||||||
==> In environment myenv
|
|
||||||
==> Root specs
|
|
||||||
python
|
|
||||||
|
|
||||||
==> 0 installed packages
|
|
||||||
|
|
||||||
|
|
||||||
$ spack -e included_env find
|
|
||||||
==> In environment included_env
|
|
||||||
==> No root specs
|
|
||||||
==> Included specs
|
|
||||||
python
|
|
||||||
|
|
||||||
==> 0 installed packages
|
|
||||||
|
|
||||||
Here we see that ``included_env`` has access to the python package through
|
|
||||||
the ``myenv`` environment. But if we were to add another spec to ``myenv``,
|
|
||||||
``included_env`` will not be able to access the new information.
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack -e myenv add perl
|
|
||||||
$ spack -e myenv concretize
|
|
||||||
$ spack -e myenv find
|
|
||||||
==> In environment myenv
|
|
||||||
==> Root specs
|
|
||||||
perl python
|
|
||||||
|
|
||||||
==> 0 installed packages
|
|
||||||
|
|
||||||
|
|
||||||
$ spack -e included_env find
|
|
||||||
==> In environment included_env
|
|
||||||
==> No root specs
|
|
||||||
==> Included specs
|
|
||||||
python
|
|
||||||
|
|
||||||
==> 0 installed packages
|
|
||||||
|
|
||||||
It isn't until you run the ``spack concretize`` command that the combined
|
|
||||||
environment will get the updated information from the reconcretized base environmennt.
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack -e included_env concretize
|
|
||||||
$ spack -e included_env find
|
|
||||||
==> In environment included_env
|
|
||||||
==> No root specs
|
|
||||||
==> Included specs
|
|
||||||
perl python
|
|
||||||
|
|
||||||
==> 0 installed packages
|
|
||||||
|
|
||||||
.. _environment-configuration:
|
.. _environment-configuration:
|
||||||
|
|
||||||
------------------------
|
------------------------
|
||||||
@@ -930,85 +800,32 @@ For example, the following environment has three root packages:
|
|||||||
This allows for a much-needed reduction in redundancy between packages
|
This allows for a much-needed reduction in redundancy between packages
|
||||||
and constraints.
|
and constraints.
|
||||||
|
|
||||||
|
----------------
|
||||||
|
Filesystem Views
|
||||||
|
----------------
|
||||||
|
|
||||||
-----------------
|
Spack Environments can define filesystem views, which provide a direct access point
|
||||||
Environment Views
|
for software similar to the directory hierarchy that might exist under ``/usr/local``.
|
||||||
-----------------
|
Filesystem views are updated every time the environment is written out to the lock
|
||||||
|
file ``spack.lock``, so the concrete environment and the view are always compatible.
|
||||||
Spack Environments can have an associated filesystem view, which is a directory
|
The files of the view's installed packages are brought into the view by symbolic or
|
||||||
with a more traditional structure ``<view>/bin``, ``<view>/lib``, ``<view>/include``
|
hard links, referencing the original Spack installation, or by copy.
|
||||||
in which all files of the installed packages are linked.
|
|
||||||
|
|
||||||
By default a view is created for each environment, thanks to the ``view: true``
|
|
||||||
option in the ``spack.yaml`` manifest file:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
spack:
|
|
||||||
specs: [perl, python]
|
|
||||||
view: true
|
|
||||||
|
|
||||||
The view is created in a hidden directory ``.spack-env/view`` relative to the environment.
|
|
||||||
If you've used ``spack env activate``, you may have already interacted with this view. Spack
|
|
||||||
prepends its ``<view>/bin`` dir to ``PATH`` when the environment is activated, so that
|
|
||||||
you can directly run executables from all installed packages in the environment.
|
|
||||||
|
|
||||||
Views are highly customizable: you can control where they are put, modify their structure,
|
|
||||||
include and exclude specs, change how files are linked, and you can even generate multiple
|
|
||||||
views for a single environment.
|
|
||||||
|
|
||||||
.. _configuring_environment_views:
|
.. _configuring_environment_views:
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Minimal view configuration
|
Configuration in ``spack.yaml``
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
The minimal configuration
|
The Spack Environment manifest file has a top-level keyword
|
||||||
|
``view``. Each entry under that heading is a **view descriptor**, headed
|
||||||
.. code-block:: yaml
|
by a name. Any number of views may be defined under the ``view`` heading.
|
||||||
|
The view descriptor contains the root of the view, and
|
||||||
spack:
|
optionally the projections for the view, ``select`` and
|
||||||
# ...
|
``exclude`` lists for the view and link information via ``link`` and
|
||||||
view: true
|
|
||||||
|
|
||||||
lets Spack generate a single view with default settings under the
|
|
||||||
``.spack-env/view`` directory of the environment.
|
|
||||||
|
|
||||||
Another short way to configure a view is to specify just where to put it:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
spack:
|
|
||||||
# ...
|
|
||||||
view: /path/to/view
|
|
||||||
|
|
||||||
Views can also be disabled by setting ``view: false``.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Advanced view configuration
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
One or more **view descriptors** can be defined under ``view``, keyed by a name.
|
|
||||||
The example from the previous section with ``view: /path/to/view`` is equivalent
|
|
||||||
to defining a view descriptor named ``default`` with a ``root`` attribute:
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
|
||||||
|
|
||||||
spack:
|
|
||||||
# ...
|
|
||||||
view:
|
|
||||||
default: # name of the view
|
|
||||||
root: /path/to/view # view descriptor attribute
|
|
||||||
|
|
||||||
The ``default`` view descriptor name is special: when you ``spack env activate`` your
|
|
||||||
environment, this view will be used to update (among other things) your ``PATH``
|
|
||||||
variable.
|
|
||||||
|
|
||||||
View descriptors must contain the root of the view, and optionally projections,
|
|
||||||
``select`` and ``exclude`` lists and link information via ``link`` and
|
|
||||||
``link_type``.
|
``link_type``.
|
||||||
|
|
||||||
As a more advanced example, in the following manifest
|
For example, in the following manifest
|
||||||
file snippet we define a view named ``mpis``, rooted at
|
file snippet we define a view named ``mpis``, rooted at
|
||||||
``/path/to/view`` in which all projections use the package name,
|
``/path/to/view`` in which all projections use the package name,
|
||||||
version, and compiler name to determine the path for a given
|
version, and compiler name to determine the path for a given
|
||||||
@@ -1053,10 +870,59 @@ of ``hardlink`` or ``copy``.
|
|||||||
when the environment is not activated, and linked libraries will be located
|
when the environment is not activated, and linked libraries will be located
|
||||||
*outside* of the view thanks to rpaths.
|
*outside* of the view thanks to rpaths.
|
||||||
|
|
||||||
|
|
||||||
|
There are two shorthands for environments with a single view. If the
|
||||||
|
environment at ``/path/to/env`` has a single view, with a root at
|
||||||
|
``/path/to/env/.spack-env/view``, with default selection and exclusion
|
||||||
|
and the default projection, we can put ``view: True`` in the
|
||||||
|
environment manifest. Similarly, if the environment has a view with a
|
||||||
|
different root, but default selection, exclusion, and projections, the
|
||||||
|
manifest can say ``view: /path/to/view``. These views are
|
||||||
|
automatically named ``default``, so that
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
# ...
|
||||||
|
view: True
|
||||||
|
|
||||||
|
is equivalent to
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
# ...
|
||||||
|
view:
|
||||||
|
default:
|
||||||
|
root: .spack-env/view
|
||||||
|
|
||||||
|
and
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
# ...
|
||||||
|
view: /path/to/view
|
||||||
|
|
||||||
|
is equivalent to
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
# ...
|
||||||
|
view:
|
||||||
|
default:
|
||||||
|
root: /path/to/view
|
||||||
|
|
||||||
|
By default, Spack environments are configured with ``view: True`` in
|
||||||
|
the manifest. Environments can be configured without views using
|
||||||
|
``view: False``. For backwards compatibility reasons, environments
|
||||||
|
with no ``view`` key are treated the same as ``view: True``.
|
||||||
|
|
||||||
From the command line, the ``spack env create`` command takes an
|
From the command line, the ``spack env create`` command takes an
|
||||||
argument ``--with-view [PATH]`` that sets the path for a single, default
|
argument ``--with-view [PATH]`` that sets the path for a single, default
|
||||||
view. If no path is specified, the default path is used (``view:
|
view. If no path is specified, the default path is used (``view:
|
||||||
true``). The argument ``--without-view`` can be used to create an
|
True``). The argument ``--without-view`` can be used to create an
|
||||||
environment without any view configured.
|
environment without any view configured.
|
||||||
|
|
||||||
The ``spack env view`` command can be used to change the manage views
|
The ``spack env view`` command can be used to change the manage views
|
||||||
@@ -1122,18 +988,11 @@ the projection under ``all`` before reaching those entries.
|
|||||||
Activating environment views
|
Activating environment views
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
The ``spack env activate <env>`` has two effects:
|
The ``spack env activate`` command will put the default view for the
|
||||||
|
environment into the user's path, in addition to activating the
|
||||||
1. It activates the environment so that further Spack commands such
|
environment for Spack commands. The arguments ``-v,--with-view`` and
|
||||||
as ``spack install`` will run in the context of the environment.
|
``-V,--without-view`` can be used to tune this behavior. The default
|
||||||
2. It activates the view so that environment variables such as
|
behavior is to activate with the environment view if there is one.
|
||||||
``PATH`` are updated to include the view.
|
|
||||||
|
|
||||||
Without further arguments, the ``default`` view of the environment is
|
|
||||||
activated. If a view with a different name has to be activated,
|
|
||||||
``spack env activate --with-view <name> <env>`` can be
|
|
||||||
used instead. You can also activate the environment without modifying
|
|
||||||
further environment variables using ``--without-view``.
|
|
||||||
|
|
||||||
The environment variables affected by the ``spack env activate``
|
The environment variables affected by the ``spack env activate``
|
||||||
command and the paths that are used to update them are determined by
|
command and the paths that are used to update them are determined by
|
||||||
@@ -1156,8 +1015,8 @@ relevant variable if the path exists. For this reason, it is not
|
|||||||
recommended to use non-default projections with the default view of an
|
recommended to use non-default projections with the default view of an
|
||||||
environment.
|
environment.
|
||||||
|
|
||||||
The ``spack env deactivate`` command will remove the active view of
|
The ``spack env deactivate`` command will remove the default view of
|
||||||
the Spack environment from the user's environment variables.
|
the environment from the user's path.
|
||||||
|
|
||||||
|
|
||||||
.. _env-generate-depfile:
|
.. _env-generate-depfile:
|
||||||
@@ -1174,7 +1033,7 @@ other targets to depend on the environment installation.
|
|||||||
|
|
||||||
A typical workflow is as follows:
|
A typical workflow is as follows:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code:: console
|
||||||
|
|
||||||
spack env create -d .
|
spack env create -d .
|
||||||
spack -e . add perl
|
spack -e . add perl
|
||||||
@@ -1267,7 +1126,7 @@ its dependencies. This can be useful when certain flags should only apply to
|
|||||||
dependencies. Below we show a use case where a spec is installed with verbose
|
dependencies. Below we show a use case where a spec is installed with verbose
|
||||||
output (``spack install --verbose``) while its dependencies are installed silently:
|
output (``spack install --verbose``) while its dependencies are installed silently:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code:: console
|
||||||
|
|
||||||
$ spack env depfile -o Makefile
|
$ spack env depfile -o Makefile
|
||||||
|
|
||||||
@@ -1289,7 +1148,7 @@ This can be accomplished through the generated ``[<prefix>/]SPACK_PACKAGE_IDS``
|
|||||||
variable. Assuming we have an active and concrete environment, we generate the
|
variable. Assuming we have an active and concrete environment, we generate the
|
||||||
associated ``Makefile`` with a prefix ``example``:
|
associated ``Makefile`` with a prefix ``example``:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code:: console
|
||||||
|
|
||||||
$ spack env depfile -o env.mk --make-prefix example
|
$ spack env depfile -o env.mk --make-prefix example
|
||||||
|
|
||||||
@@ -1316,7 +1175,7 @@ index once every package is pushed. Note how this target uses the generated
|
|||||||
example/push/%: example/install/%
|
example/push/%: example/install/%
|
||||||
@mkdir -p $(dir $@)
|
@mkdir -p $(dir $@)
|
||||||
$(info About to push $(SPEC) to a buildcache)
|
$(info About to push $(SPEC) to a buildcache)
|
||||||
$(SPACK) -e . buildcache push --only=package $(BUILDCACHE_DIR) /$(HASH)
|
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||||
@touch $@
|
@touch $@
|
||||||
|
|
||||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||||
|
|||||||
@@ -478,13 +478,6 @@ prefix, you can add them to the ``extra_attributes`` field. Similarly,
|
|||||||
all other fields from the compilers config can be added to the
|
all other fields from the compilers config can be added to the
|
||||||
``extra_attributes`` field for an external representing a compiler.
|
``extra_attributes`` field for an external representing a compiler.
|
||||||
|
|
||||||
Note that the format for the ``paths`` field in the
|
|
||||||
``extra_attributes`` section is different than in the ``compilers``
|
|
||||||
config. For compilers configured as external packages, the section is
|
|
||||||
named ``compilers`` and the dictionary maps language names (``c``,
|
|
||||||
``cxx``, ``fortran``) to paths, rather than using the names ``cc``,
|
|
||||||
``fc``, and ``f77``.
|
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
packages:
|
packages:
|
||||||
@@ -500,10 +493,11 @@ named ``compilers`` and the dictionary maps language names (``c``,
|
|||||||
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
||||||
prefix: /usr
|
prefix: /usr
|
||||||
extra_attributes:
|
extra_attributes:
|
||||||
compilers:
|
paths:
|
||||||
c: /usr/bin/clang-with-suffix
|
cc: /usr/bin/clang-with-suffix
|
||||||
cxx: /usr/bin/clang++-with-extra-info
|
cxx: /usr/bin/clang++-with-extra-info
|
||||||
fortran: /usr/bin/gfortran
|
fc: /usr/bin/gfortran
|
||||||
|
f77: /usr/bin/gfortran
|
||||||
extra_rpaths:
|
extra_rpaths:
|
||||||
- /usr/lib/llvm/
|
- /usr/lib/llvm/
|
||||||
|
|
||||||
@@ -1364,6 +1358,187 @@ This will write the private key to the file `dinosaur.priv`.
|
|||||||
or for help on an issue or the Spack slack.
|
or for help on an issue or the Spack slack.
|
||||||
|
|
||||||
|
|
||||||
|
.. _cray-support:
|
||||||
|
|
||||||
|
-------------
|
||||||
|
Spack on Cray
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Spack differs slightly when used on a Cray system. The architecture spec
|
||||||
|
can differentiate between the front-end and back-end processor and operating system.
|
||||||
|
For example, on Edison at NERSC, the back-end target processor
|
||||||
|
is "Ivy Bridge", so you can specify to use the back-end this way:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack install zlib target=ivybridge
|
||||||
|
|
||||||
|
You can also use the operating system to build against the back-end:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack install zlib os=CNL10
|
||||||
|
|
||||||
|
Notice that the name includes both the operating system name and the major
|
||||||
|
version number concatenated together.
|
||||||
|
|
||||||
|
Alternatively, if you want to build something for the front-end,
|
||||||
|
you can specify the front-end target processor. The processor for a login node
|
||||||
|
on Edison is "Sandy bridge" so we specify on the command line like so:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack install zlib target=sandybridge
|
||||||
|
|
||||||
|
And the front-end operating system is:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack install zlib os=SuSE11
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Cray compiler detection
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Spack can detect compilers using two methods. For the front-end, we treat
|
||||||
|
everything the same. The difference lies in back-end compiler detection.
|
||||||
|
Back-end compiler detection is made via the Tcl module avail command.
|
||||||
|
Once it detects the compiler it writes the appropriate PrgEnv and compiler
|
||||||
|
module name to compilers.yaml and sets the paths to each compiler with Cray\'s
|
||||||
|
compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load
|
||||||
|
the correct PrgEnv and compiler module and will call appropriate wrapper.
|
||||||
|
|
||||||
|
The compilers.yaml config file will also differ. There is a
|
||||||
|
modules section that is filled with the compiler's Programming Environment
|
||||||
|
and module name. On other systems, this field is empty []:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
- compiler:
|
||||||
|
modules:
|
||||||
|
- PrgEnv-intel
|
||||||
|
- intel/15.0.109
|
||||||
|
|
||||||
|
As mentioned earlier, the compiler paths will look different on a Cray system.
|
||||||
|
Since most compilers are invoked using cc, CC and ftn, the paths for each
|
||||||
|
compiler are replaced with their respective Cray compiler wrapper names:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
paths:
|
||||||
|
cc: cc
|
||||||
|
cxx: CC
|
||||||
|
f77: ftn
|
||||||
|
fc: ftn
|
||||||
|
|
||||||
|
As opposed to an explicit path to the compiler executable. This allows Spack
|
||||||
|
to call the Cray compiler wrappers during build time.
|
||||||
|
|
||||||
|
For more on compiler configuration, check out :ref:`compiler-config`.
|
||||||
|
|
||||||
|
Spack sets the default Cray link type to dynamic, to better match other
|
||||||
|
other platforms. Individual packages can enable static linking (which is the
|
||||||
|
default outside of Spack on cray systems) using the ``-static`` flag.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Setting defaults and using Cray modules
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
If you want to use default compilers for each PrgEnv and also be able
|
||||||
|
to load cray external modules, you will need to set up a ``packages.yaml``.
|
||||||
|
|
||||||
|
Here's an example of an external configuration for cray modules:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
packages:
|
||||||
|
mpich:
|
||||||
|
externals:
|
||||||
|
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10"
|
||||||
|
modules:
|
||||||
|
- cray-mpich
|
||||||
|
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10"
|
||||||
|
modules:
|
||||||
|
- cray-mpich
|
||||||
|
all:
|
||||||
|
providers:
|
||||||
|
mpi: [mpich]
|
||||||
|
|
||||||
|
This tells Spack that for whatever package that depends on mpi, load the
|
||||||
|
cray-mpich module into the environment. You can then be able to use whatever
|
||||||
|
environment variables, libraries, etc, that are brought into the environment
|
||||||
|
via module load.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
For Cray-provided packages, it is best to use ``modules:`` instead of ``prefix:``
|
||||||
|
in ``packages.yaml``, because the Cray Programming Environment heavily relies on
|
||||||
|
modules (e.g., loading the ``cray-mpich`` module adds MPI libraries to the
|
||||||
|
compiler wrapper link line).
|
||||||
|
|
||||||
|
You can set the default compiler that Spack can use for each compiler type.
|
||||||
|
If you want to use the Cray defaults, then set them under ``all:`` in packages.yaml.
|
||||||
|
In the compiler field, set the compiler specs in your order of preference.
|
||||||
|
Whenever you build with that compiler type, Spack will concretize to that version.
|
||||||
|
|
||||||
|
Here is an example of a full packages.yaml used at NERSC
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
packages:
|
||||||
|
mpich:
|
||||||
|
externals:
|
||||||
|
- spec: "mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
||||||
|
modules:
|
||||||
|
- cray-mpich
|
||||||
|
- spec: "mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge"
|
||||||
|
modules:
|
||||||
|
- cray-mpich
|
||||||
|
buildable: False
|
||||||
|
netcdf:
|
||||||
|
externals:
|
||||||
|
- spec: "netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
||||||
|
modules:
|
||||||
|
- cray-netcdf
|
||||||
|
- spec: "netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
||||||
|
modules:
|
||||||
|
- cray-netcdf
|
||||||
|
buildable: False
|
||||||
|
hdf5:
|
||||||
|
externals:
|
||||||
|
- spec: "hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge"
|
||||||
|
modules:
|
||||||
|
- cray-hdf5
|
||||||
|
- spec: "hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge"
|
||||||
|
modules:
|
||||||
|
- cray-hdf5
|
||||||
|
buildable: False
|
||||||
|
all:
|
||||||
|
compiler: [gcc@5.2.0, intel@16.0.0.109]
|
||||||
|
providers:
|
||||||
|
mpi: [mpich]
|
||||||
|
|
||||||
|
Here we tell spack that whenever we want to build with gcc use version 5.2.0 or
|
||||||
|
if we want to build with intel compilers, use version 16.0.0.109. We add a spec
|
||||||
|
for each compiler type for each cray modules. This ensures that for each
|
||||||
|
compiler on our system we can use that external module.
|
||||||
|
|
||||||
|
For more on external packages check out the section :ref:`sec-external-packages`.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Using Linux containers on Cray machines
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Spack uses environment variables particular to the Cray programming
|
||||||
|
environment to determine which systems are Cray platforms. These
|
||||||
|
environment variables may be propagated into containers that are not
|
||||||
|
using the Cray programming environment.
|
||||||
|
|
||||||
|
To ensure that Spack does not autodetect the Cray programming
|
||||||
|
environment, unset the environment variable ``MODULEPATH``. This
|
||||||
|
will cause Spack to treat a linux container on a Cray system as a base
|
||||||
|
linux distro.
|
||||||
|
|
||||||
.. _windows_support:
|
.. _windows_support:
|
||||||
|
|
||||||
----------------
|
----------------
|
||||||
|
|||||||
@@ -2344,27 +2344,6 @@ you set ``parallel`` to ``False`` at the package level, then each call
|
|||||||
to ``make()`` will be sequential by default, but packagers can call
|
to ``make()`` will be sequential by default, but packagers can call
|
||||||
``make(parallel=True)`` to override it.
|
``make(parallel=True)`` to override it.
|
||||||
|
|
||||||
Note that the ``--jobs`` option works out of the box for all standard
|
|
||||||
build systems. If you are using a non-standard build system instead, you
|
|
||||||
can use the variable ``make_jobs`` to extract the number of jobs specified
|
|
||||||
by the ``--jobs`` option:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
:emphasize-lines: 7, 11
|
|
||||||
:linenos:
|
|
||||||
|
|
||||||
class Xios(Package):
|
|
||||||
...
|
|
||||||
def install(self, spec, prefix):
|
|
||||||
...
|
|
||||||
options = [
|
|
||||||
...
|
|
||||||
'--jobs', str(make_jobs),
|
|
||||||
]
|
|
||||||
...
|
|
||||||
make_xios = Executable("./make_xios")
|
|
||||||
make_xios(*options)
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Install-level build parallelism
|
Install-level build parallelism
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
@@ -5194,6 +5173,12 @@ installed executable. The check is implemented as follows:
|
|||||||
reframe = Executable(self.prefix.bin.reframe)
|
reframe = Executable(self.prefix.bin.reframe)
|
||||||
reframe("-l")
|
reframe("-l")
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
The API for adding tests is not yet considered stable and may change
|
||||||
|
in future releases.
|
||||||
|
|
||||||
|
|
||||||
""""""""""""""""""""""""""""""""
|
""""""""""""""""""""""""""""""""
|
||||||
Checking build-time test results
|
Checking build-time test results
|
||||||
""""""""""""""""""""""""""""""""
|
""""""""""""""""""""""""""""""""
|
||||||
@@ -5231,42 +5216,38 @@ be left in the build stage directory as illustrated below:
|
|||||||
Stand-alone tests
|
Stand-alone tests
|
||||||
^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
While build-time tests are integrated with the installation process, stand-alone
|
While build-time tests are integrated with the build process, stand-alone
|
||||||
tests are expected to run days, weeks, even months after the software is
|
tests are expected to run days, weeks, even months after the software is
|
||||||
installed. The goal is to provide a mechanism for gaining confidence that
|
installed. The goal is to provide a mechanism for gaining confidence that
|
||||||
packages work as installed **and** *continue* to work as the underlying
|
packages work as installed **and** *continue* to work as the underlying
|
||||||
software evolves. Packages can add and inherit stand-alone tests. The
|
software evolves. Packages can add and inherit stand-alone tests. The
|
||||||
``spack test`` command is used for stand-alone testing.
|
`spack test`` command is used to manage stand-alone testing.
|
||||||
|
|
||||||
.. admonition:: Stand-alone test methods should complete within a few minutes.
|
.. note::
|
||||||
|
|
||||||
Execution speed is important since these tests are intended to quickly
|
Execution speed is important since these tests are intended to quickly
|
||||||
assess whether installed specs work on the system. Spack cannot spare
|
assess whether installed specs work on the system. Consequently, they
|
||||||
resources for more extensive testing of packages included in CI stacks.
|
should run relatively quickly -- as in on the order of at most a few
|
||||||
|
minutes -- while ideally executing all, or at least key aspects of the
|
||||||
|
installed software.
|
||||||
|
|
||||||
Consequently, stand-alone tests should run relatively quickly -- as in
|
.. note::
|
||||||
on the order of at most a few minutes -- while testing at least key aspects
|
|
||||||
of the installed software. Save more extensive testing for other tools.
|
Failing stand-alone tests indicate problems with the installation and,
|
||||||
|
therefore, there is no reason to proceed with more resource-intensive
|
||||||
|
tests until those have been investigated.
|
||||||
|
|
||||||
|
Passing stand-alone tests indicate that more thorough testing, such
|
||||||
|
as running extensive unit or regression tests, or tests that run at
|
||||||
|
scale can proceed without wasting resources on a problematic installation.
|
||||||
|
|
||||||
Tests are defined in the package using methods with names beginning ``test_``.
|
Tests are defined in the package using methods with names beginning ``test_``.
|
||||||
This allows Spack to support multiple independent checks, or parts. Files
|
This allows Spack to support multiple independent checks, or parts. Files
|
||||||
needed for testing, such as source, data, and expected outputs, may be saved
|
needed for testing, such as source, data, and expected outputs, may be saved
|
||||||
from the build and or stored with the package in the repository. Regardless
|
from the build and or stored with the package in the repository. Regardless
|
||||||
of origin, these files are automatically copied to the spec's test stage
|
of origin, these files are automatically copied to the spec's test stage
|
||||||
directory prior to execution of the test method(s). Spack also provides helper
|
directory prior to execution of the test method(s). Spack also provides some
|
||||||
functions to facilitate common processing.
|
helper functions to facilitate processing.
|
||||||
|
|
||||||
.. tip::
|
|
||||||
|
|
||||||
**The status of stand-alone tests can be used to guide follow-up testing efforts.**
|
|
||||||
|
|
||||||
Passing stand-alone tests justify performing more thorough testing, such
|
|
||||||
as running extensive unit or regression tests or tests that run at scale,
|
|
||||||
when available. These tests are outside of the scope of Spack packaging.
|
|
||||||
|
|
||||||
Failing stand-alone tests indicate problems with the installation and,
|
|
||||||
therefore, no reason to proceed with more resource-intensive tests until
|
|
||||||
the failures have been investigated.
|
|
||||||
|
|
||||||
.. _configure-test-stage:
|
.. _configure-test-stage:
|
||||||
|
|
||||||
@@ -5274,26 +5255,30 @@ functions to facilitate common processing.
|
|||||||
Configuring the test stage directory
|
Configuring the test stage directory
|
||||||
""""""""""""""""""""""""""""""""""""
|
""""""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
Stand-alone tests utilize a test stage directory to build, run, and track
|
Stand-alone tests utilize a test stage directory for building, running,
|
||||||
tests in the same way Spack uses a build stage directory to install software.
|
and tracking results in the same way Spack uses a build stage directory.
|
||||||
The default test stage root directory, ``$HOME/.spack/test``, is defined in
|
The default test stage root directory, ``~/.spack/test``, is defined in
|
||||||
:ref:`config.yaml <config-yaml>`. This location is customizable by adding or
|
:ref:`etc/spack/defaults/config.yaml <config-yaml>`. This location is
|
||||||
changing the ``test_stage`` path such that:
|
customizable by adding or changing the ``test_stage`` path in the high-level
|
||||||
|
``config`` of the appropriate ``config.yaml`` file such that:
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
config:
|
config:
|
||||||
test_stage: /path/to/test/stage
|
test_stage: /path/to/test/stage
|
||||||
|
|
||||||
Packages can use the ``self.test_suite.stage`` property to access the path.
|
Packages can use the ``self.test_suite.stage`` property to access this setting.
|
||||||
|
Other package properties that provide access to spec-specific subdirectories
|
||||||
|
and files are described in :ref:`accessing staged files <accessing-files>`.
|
||||||
|
|
||||||
.. admonition:: Each spec being tested has its own test stage directory.
|
.. note::
|
||||||
|
|
||||||
The ``config:test_stage`` option is the path to the root of a
|
The test stage path is the root directory for the **entire suite**.
|
||||||
**test suite**'s stage directories.
|
In other words, it is the root directory for **all specs** being
|
||||||
|
tested by the ``spack test run`` command. Each spec gets its own
|
||||||
|
stage subdirectory. Use ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||||
|
to access the spec-specific test stage directory.
|
||||||
|
|
||||||
Other package properties that provide paths to spec-specific subdirectories
|
|
||||||
and files are described in :ref:`accessing-files`.
|
|
||||||
|
|
||||||
.. _adding-standalone-tests:
|
.. _adding-standalone-tests:
|
||||||
|
|
||||||
@@ -5306,144 +5291,61 @@ Test recipes are defined in the package using methods with names beginning
|
|||||||
Each method has access to the information Spack tracks on the package, such
|
Each method has access to the information Spack tracks on the package, such
|
||||||
as options, compilers, and dependencies, supporting the customization of tests
|
as options, compilers, and dependencies, supporting the customization of tests
|
||||||
to the build. Standard python ``assert`` statements and other error reporting
|
to the build. Standard python ``assert`` statements and other error reporting
|
||||||
mechanisms can be used. These exceptions are automatically caught and reported
|
mechanisms are available. Such exceptions are automatically caught and reported
|
||||||
as test failures.
|
as test failures.
|
||||||
|
|
||||||
Each test method is an *implicit test part* named by the method. Its purpose
|
Each test method is an implicit test part named by the method and whose
|
||||||
is the method's docstring. Providing a meaningful purpose for the test gives
|
purpose is the method's docstring. Providing a purpose gives context for
|
||||||
context that can aid debugging. Spack outputs both the name and purpose at the
|
aiding debugging. A test method may contain embedded test parts. Spack
|
||||||
start of test execution so it's also important that the docstring/purpose be
|
outputs the test name and purpose prior to running each test method and
|
||||||
brief.
|
any embedded test parts. For example, ``MyPackage`` below provides two basic
|
||||||
|
examples of installation tests: ``test_always_fails`` and ``test_example``.
|
||||||
.. tip::
|
As the name indicates, the first always fails. The second simply runs the
|
||||||
|
installed example.
|
||||||
We recommend naming test methods so it is clear *what* is being tested.
|
|
||||||
For example, if a test method is building and or running an executable
|
|
||||||
called ``example``, then call the method ``test_example``. This, together
|
|
||||||
with a similarly meaningful test purpose, will aid test comprehension,
|
|
||||||
debugging, and maintainability.
|
|
||||||
|
|
||||||
Stand-alone tests run in an environment that provides access to information
|
|
||||||
on the installed software, such as build options, dependencies, and compilers.
|
|
||||||
Build options and dependencies are accessed using the same spec checks used
|
|
||||||
by build recipes. Examples of checking :ref:`variant settings <variants>` and
|
|
||||||
:ref:`spec constraints <testing-specs>` can be found at the provided links.
|
|
||||||
|
|
||||||
.. admonition:: Spack automatically sets up the test stage directory and environment.
|
|
||||||
|
|
||||||
Spack automatically creates the test stage directory and copies
|
|
||||||
relevant files *prior to* running tests. It can also ensure build
|
|
||||||
dependencies are available **if** necessary.
|
|
||||||
|
|
||||||
The path to the test stage is configurable (see :ref:`configure-test-stage`).
|
|
||||||
|
|
||||||
Files that Spack knows to copy are those saved from the build (see
|
|
||||||
:ref:`cache_extra_test_sources`) and those added to the package repository
|
|
||||||
(see :ref:`cache_custom_files`).
|
|
||||||
|
|
||||||
Spack will use the value of the ``test_requires_compiler`` property to
|
|
||||||
determine whether it needs to also set up build dependencies (see
|
|
||||||
:ref:`test-build-tests`).
|
|
||||||
|
|
||||||
The ``MyPackage`` package below provides two basic test examples:
|
|
||||||
``test_example`` and ``test_example2``. The first runs the installed
|
|
||||||
``example`` and ensures its output contains an expected string. The second
|
|
||||||
runs ``example2`` without checking output so is only concerned with confirming
|
|
||||||
the executable runs successfully. If the installed spec is not expected to have
|
|
||||||
``example2``, then the check at the top of the method will raise a special
|
|
||||||
``SkipTest`` exception, which is captured to facilitate reporting skipped test
|
|
||||||
parts to tools like CDash.
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
class MyPackage(Package):
|
class MyPackage(Package):
|
||||||
...
|
...
|
||||||
|
|
||||||
|
def test_always_fails(self):
|
||||||
|
"""use assert to always fail"""
|
||||||
|
assert False
|
||||||
|
|
||||||
def test_example(self):
|
def test_example(self):
|
||||||
"""ensure installed example works"""
|
"""run installed example"""
|
||||||
expected = "Done."
|
|
||||||
example = which(self.prefix.bin.example)
|
example = which(self.prefix.bin.example)
|
||||||
|
example()
|
||||||
# Capture stdout and stderr from running the Executable
|
|
||||||
# and check that the expected output was produced.
|
|
||||||
out = example(output=str.split, error=str.split)
|
|
||||||
assert expected in out, f"Expected '{expected}' in the output"
|
|
||||||
|
|
||||||
def test_example2(self):
|
|
||||||
"""run installed example2"""
|
|
||||||
if self.spec.satisfies("@:1.0"):
|
|
||||||
# Raise SkipTest to ensure flagging the test as skipped for
|
|
||||||
# test reporting purposes.
|
|
||||||
raise SkipTest("Test is only available for v1.1 on")
|
|
||||||
|
|
||||||
example2 = which(self.prefix.bin.example2)
|
|
||||||
example2()
|
|
||||||
|
|
||||||
Output showing the identification of each test part after running the tests
|
Output showing the identification of each test part after running the tests
|
||||||
is illustrated below.
|
is illustrated below.
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack test run --alias mypackage mypackage@2.0
|
$ spack test run --alias mypackage mypackage@1.0
|
||||||
==> Spack test mypackage
|
==> Spack test mypackage
|
||||||
...
|
...
|
||||||
$ spack test results -l mypackage
|
$ spack test results -l mypackage
|
||||||
==> Results for test suite 'mypackage':
|
==> Results for test suite 'mypackage':
|
||||||
...
|
...
|
||||||
==> [2024-03-10-16:03:56.625439] test: test_example: ensure installed example works
|
==> [2023-03-10-16:03:56.625204] test: test_always_fails: use assert to always fail
|
||||||
...
|
...
|
||||||
PASSED: MyPackage::test_example
|
FAILED
|
||||||
==> [2024-03-10-16:03:56.625439] test: test_example2: run installed example2
|
==> [2023-03-10-16:03:56.625439] test: test_example: run installed example
|
||||||
...
|
...
|
||||||
PASSED: MyPackage::test_example2
|
PASSED
|
||||||
|
|
||||||
.. admonition:: Do NOT implement tests that must run in the installation prefix.
|
|
||||||
|
|
||||||
Use of the package spec's installation prefix for building and running
|
.. note::
|
||||||
tests is **strongly discouraged**. Doing so causes permission errors for
|
|
||||||
shared spack instances *and* facilities that install the software in
|
|
||||||
read-only file systems or directories.
|
|
||||||
|
|
||||||
Instead, start these test methods by explicitly copying the needed files
|
If ``MyPackage`` were a recipe for a library, the tests should build
|
||||||
from the installation prefix to the test stage directory. Note the test
|
an example or test program that is then executed.
|
||||||
stage directory is the current directory when the test is executed with
|
|
||||||
the ``spack test run`` command.
|
|
||||||
|
|
||||||
.. admonition:: Test methods for library packages should build test executables.
|
A test method can include test parts using the ``test_part`` context manager.
|
||||||
|
Each part is treated as an independent check to allow subsequent test parts
|
||||||
|
to execute even after a test part fails.
|
||||||
|
|
||||||
Stand-alone tests for library packages *should* build test executables
|
.. _test-part:
|
||||||
that utilize the *installed* library. Doing so ensures the tests follow
|
|
||||||
a similar build process that users of the library would follow.
|
|
||||||
|
|
||||||
For more information on how to do this, see :ref:`test-build-tests`.
|
|
||||||
|
|
||||||
.. tip::
|
|
||||||
|
|
||||||
If you want to see more examples from packages with stand-alone tests, run
|
|
||||||
``spack pkg grep "def\stest" | sed "s/\/package.py.*//g" | sort -u``
|
|
||||||
from the command line to get a list of the packages.
|
|
||||||
|
|
||||||
.. _adding-standalone-test-parts:
|
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""
|
|
||||||
Adding stand-alone test parts
|
|
||||||
"""""""""""""""""""""""""""""
|
|
||||||
|
|
||||||
Sometimes dependencies between steps of a test lend themselves to being
|
|
||||||
broken into parts. Tracking the pass/fail status of each part may aid
|
|
||||||
debugging. Spack provides a ``test_part`` context manager for use within
|
|
||||||
test methods.
|
|
||||||
|
|
||||||
Each test part is independently run, tracked, and reported. Test parts are
|
|
||||||
executed in the order they appear. If one fails, subsequent test parts are
|
|
||||||
still performed even if they would also fail. This allows tools like CDash
|
|
||||||
to track and report the status of test parts across runs. The pass/fail status
|
|
||||||
of the enclosing test is derived from the statuses of the embedded test parts.
|
|
||||||
|
|
||||||
.. admonition:: Test method and test part names **must** be unique.
|
|
||||||
|
|
||||||
Test results reporting requires that test methods and embedded test parts
|
|
||||||
within a package have unique names.
|
|
||||||
|
|
||||||
The signature for ``test_part`` is:
|
The signature for ``test_part`` is:
|
||||||
|
|
||||||
@@ -5465,68 +5367,40 @@ where each argument has the following meaning:
|
|||||||
* ``work_dir`` is the path to the directory in which the test will run.
|
* ``work_dir`` is the path to the directory in which the test will run.
|
||||||
|
|
||||||
The default of ``None``, or ``"."``, corresponds to the the spec's test
|
The default of ``None``, or ``"."``, corresponds to the the spec's test
|
||||||
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``).
|
stage (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``.
|
||||||
|
|
||||||
.. admonition:: Start test part names with the name of the enclosing test.
|
.. admonition:: Tests should **not** run under the installation directory.
|
||||||
|
|
||||||
We **highly recommend** starting the names of test parts with the name
|
Use of the package spec's installation directory for building and running
|
||||||
of the enclosing test. Doing so helps with the comprehension, readability
|
tests is **strongly** discouraged. Doing so causes permission errors for
|
||||||
and debugging of test results.
|
shared spack instances *and* facilities that install the software in
|
||||||
|
read-only file systems or directories.
|
||||||
|
|
||||||
Suppose ``MyPackage`` installs multiple executables that need to run in a
|
Suppose ``MyPackage`` actually installs two examples we want to use for tests.
|
||||||
specific order since the outputs from one are inputs of others. Further suppose
|
These checks can be implemented as separate checks or, as illustrated below,
|
||||||
we want to add an integration test that runs the executables in order. We can
|
embedded test parts.
|
||||||
accomplish this goal by implementing a stand-alone test method consisting of
|
|
||||||
test parts for each executable as follows:
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
class MyPackage(Package):
|
class MyPackage(Package):
|
||||||
...
|
...
|
||||||
|
|
||||||
def test_series(self):
|
def test_example(self):
|
||||||
"""run setup, perform, and report"""
|
"""run installed examples"""
|
||||||
|
for example in ["ex1", "ex2"]:
|
||||||
|
with test_part(
|
||||||
|
self,
|
||||||
|
f"test_example_{example}",
|
||||||
|
purpose=f"run installed {example}",
|
||||||
|
):
|
||||||
|
exe = which(join_path(self.prefix.bin, example))
|
||||||
|
exe()
|
||||||
|
|
||||||
with test_part(self, "test_series_setup", purpose="setup operation"):
|
In this case, there will be an implicit test part for ``test_example``
|
||||||
exe = which(self.prefix.bin.setup))
|
and separate sub-parts for ``ex1`` and ``ex2``. The second sub-part
|
||||||
exe()
|
will be executed regardless of whether the first passes. The test
|
||||||
|
log for a run where the first executable fails and the second passes
|
||||||
with test_part(self, "test_series_run", purpose="perform operation"):
|
is illustrated below.
|
||||||
exe = which(self.prefix.bin.run))
|
|
||||||
exe()
|
|
||||||
|
|
||||||
with test_part(self, "test_series_report", purpose="generate report"):
|
|
||||||
exe = which(self.prefix.bin.report))
|
|
||||||
exe()
|
|
||||||
|
|
||||||
The result is ``test_series`` runs the following executable in order: ``setup``,
|
|
||||||
``run``, and ``report``. In this case no options are passed to any of the
|
|
||||||
executables and no outputs from running them are checked. Consequently, the
|
|
||||||
implementation could be simplified with a for-loop as follows:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class MyPackage(Package):
|
|
||||||
...
|
|
||||||
|
|
||||||
def test_series(self):
|
|
||||||
"""execute series setup, run, and report"""
|
|
||||||
|
|
||||||
for exe, reason in [
|
|
||||||
("setup", "setup operation"),
|
|
||||||
("run", "perform operation"),
|
|
||||||
("report", "generate report")
|
|
||||||
]:
|
|
||||||
with test_part(self, f"test_series_{exe}", purpose=reason):
|
|
||||||
exe = which(self.prefix.bin.join(exe))
|
|
||||||
exe()
|
|
||||||
|
|
||||||
In both cases, since we're using a context manager, each test part in
|
|
||||||
``test_series`` will execute regardless of the status of the other test
|
|
||||||
parts.
|
|
||||||
|
|
||||||
Now let's look at the output from running the stand-alone tests where
|
|
||||||
the second test part, ``test_series_run``, fails.
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -5536,68 +5410,50 @@ the second test part, ``test_series_run``, fails.
|
|||||||
$ spack test results -l mypackage
|
$ spack test results -l mypackage
|
||||||
==> Results for test suite 'mypackage':
|
==> Results for test suite 'mypackage':
|
||||||
...
|
...
|
||||||
==> [2024-03-10-16:03:56.625204] test: test_series: execute series setup, run, and report
|
==> [2023-03-10-16:03:56.625204] test: test_example: run installed examples
|
||||||
==> [2024-03-10-16:03:56.625439] test: test_series_setup: setup operation
|
==> [2023-03-10-16:03:56.625439] test: test_example_ex1: run installed ex1
|
||||||
...
|
...
|
||||||
PASSED: MyPackage::test_series_setup
|
FAILED
|
||||||
==> [2024-03-10-16:03:56.625555] test: test_series_run: perform operation
|
==> [2023-03-10-16:03:56.625555] test: test_example_ex2: run installed ex2
|
||||||
...
|
...
|
||||||
FAILED: MyPackage::test_series_run
|
PASSED
|
||||||
==> [2024-03-10-16:03:57.003456] test: test_series_report: generate report
|
|
||||||
...
|
|
||||||
FAILED: MyPackage::test_series_report
|
|
||||||
FAILED: MyPackage::test_series
|
|
||||||
...
|
...
|
||||||
|
|
||||||
Since test parts depended on the success of previous parts, we see that the
|
.. warning::
|
||||||
failure of one results in the failure of subsequent checks and the overall
|
|
||||||
result of the test method, ``test_series``, is failure.
|
|
||||||
|
|
||||||
.. tip::
|
Test results reporting requires that each test method and embedded
|
||||||
|
test part for a package have a unique name.
|
||||||
|
|
||||||
If you want to see more examples from packages using ``test_part``, run
|
Stand-alone tests run in an environment that provides access to information
|
||||||
``spack pkg grep "test_part(" | sed "s/\/package.py.*//g" | sort -u``
|
Spack has on how the software was built, such as build options, dependencies,
|
||||||
from the command line to get a list of the packages.
|
and compilers. Build options and dependencies are accessed with the normal
|
||||||
|
spec checks. Examples of checking :ref:`variant settings <variants>` and
|
||||||
|
:ref:`spec constraints <testing-specs>` can be found at the provided links.
|
||||||
|
Accessing compilers in stand-alone tests that are used by the build requires
|
||||||
|
setting a package property as described :ref:`below <test-compilation>`.
|
||||||
|
|
||||||
.. _test-build-tests:
|
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""""""""""
|
.. _test-compilation:
|
||||||
Building and running test executables
|
|
||||||
"""""""""""""""""""""""""""""""""""""
|
|
||||||
|
|
||||||
.. admonition:: Re-use build-time sources and (small) input data sets when possible.
|
"""""""""""""""""""""""""
|
||||||
|
Enabling test compilation
|
||||||
|
"""""""""""""""""""""""""
|
||||||
|
|
||||||
We **highly recommend** re-using build-time test sources and pared down
|
If you want to build and run binaries in tests, then you'll need to tell
|
||||||
input files for testing installed software. These files are easier
|
Spack to load the package's compiler configuration. This is accomplished
|
||||||
to keep synchronized with software capabilities when they reside
|
by setting the package's ``test_requires_compiler`` property to ``True``.
|
||||||
within the software's repository. More information on saving files from
|
|
||||||
the installation process can be found at :ref:`cache_extra_test_sources`.
|
|
||||||
|
|
||||||
If that is not possible, you can add test-related files to the package
|
Setting the property to ``True`` ensures access to the compiler through
|
||||||
repository (see :ref:`cache_custom_files`). It will be important to
|
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
|
||||||
remember to maintain them so they work across listed or supported versions
|
It also gives access to build dependencies like ``cmake`` through their
|
||||||
of the package.
|
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake``).
|
||||||
|
|
||||||
Packages that build libraries are good examples of cases where you'll want
|
.. note::
|
||||||
to build test executables from the installed software before running them.
|
|
||||||
Doing so requires you to let Spack know it needs to load the package's
|
|
||||||
compiler configuration. This is accomplished by setting the package's
|
|
||||||
``test_requires_compiler`` property to ``True``.
|
|
||||||
|
|
||||||
.. admonition:: ``test_requires_compiler = True`` is required to build test executables.
|
The ``test_requires_compiler`` property should be added at the top of
|
||||||
|
the package near other attributes, such as the ``homepage`` and ``url``.
|
||||||
|
|
||||||
Setting the property to ``True`` ensures access to the compiler through
|
Below illustrates using this feature to compile an example.
|
||||||
canonical environment variables (e.g., ``CC``, ``CXX``, ``FC``, ``F77``).
|
|
||||||
It also gives access to build dependencies like ``cmake`` through their
|
|
||||||
``spec objects`` (e.g., ``self.spec["cmake"].prefix.bin.cmake`` for the
|
|
||||||
path or ``self.spec["cmake"].command`` for the ``Executable`` instance).
|
|
||||||
|
|
||||||
Be sure to add the property at the top of the package class under other
|
|
||||||
properties like the ``homepage``.
|
|
||||||
|
|
||||||
The example below, which ignores how ``cxx-example.cpp`` is acquired,
|
|
||||||
illustrates the basic process of compiling a test executable using the
|
|
||||||
installed library before running it.
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
@@ -5621,22 +5477,28 @@ installed library before running it.
|
|||||||
cxx_example = which(exe)
|
cxx_example = which(exe)
|
||||||
cxx_example()
|
cxx_example()
|
||||||
|
|
||||||
Typically the files used to build and or run test executables are either
|
|
||||||
cached from the installation (see :ref:`cache_extra_test_sources`) or added
|
|
||||||
to the package repository (see :ref:`cache_custom_files`). There is nothing
|
|
||||||
preventing the use of both.
|
|
||||||
|
|
||||||
.. _cache_extra_test_sources:
|
.. _cache_extra_test_sources:
|
||||||
|
|
||||||
""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""
|
||||||
Saving build- and install-time files
|
Saving build-time files
|
||||||
""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""
|
||||||
|
|
||||||
You can use the ``cache_extra_test_sources`` helper routine to copy
|
.. note::
|
||||||
directories and or files from the source build stage directory to the
|
|
||||||
package's installation directory. Spack will automatically copy these
|
We highly recommend re-using build-time test sources and pared down
|
||||||
files for you when it sets up the test stage directory and before it
|
input files for testing installed software. These files are easier
|
||||||
begins running the tests.
|
to keep synchronized with software capabilities since they reside
|
||||||
|
within the software's repository.
|
||||||
|
|
||||||
|
If that is not possible, you can add test-related files to the package
|
||||||
|
repository (see :ref:`adding custom files <cache_custom_files>`). It
|
||||||
|
will be important to maintain them so they work across listed or supported
|
||||||
|
versions of the package.
|
||||||
|
|
||||||
|
You can use the ``cache_extra_test_sources`` helper to copy directories
|
||||||
|
and or files from the source build stage directory to the package's
|
||||||
|
installation directory.
|
||||||
|
|
||||||
The signature for ``cache_extra_test_sources`` is:
|
The signature for ``cache_extra_test_sources`` is:
|
||||||
|
|
||||||
@@ -5651,69 +5513,46 @@ where each argument has the following meaning:
|
|||||||
* ``srcs`` is a string *or* a list of strings corresponding to the
|
* ``srcs`` is a string *or* a list of strings corresponding to the
|
||||||
paths of subdirectories and or files needed for stand-alone testing.
|
paths of subdirectories and or files needed for stand-alone testing.
|
||||||
|
|
||||||
.. warning::
|
The paths must be relative to the staged source directory. Contents of
|
||||||
|
subdirectories and files are copied to a special test cache subdirectory
|
||||||
|
of the installation prefix. They are automatically copied to the appropriate
|
||||||
|
relative paths under the test stage directory prior to executing stand-alone
|
||||||
|
tests.
|
||||||
|
|
||||||
Paths provided in the ``srcs`` argument **must be relative** to the
|
For example, a package method for copying everything in the ``tests``
|
||||||
staged source directory. They will be copied to the equivalent relative
|
subdirectory plus the ``foo.c`` and ``bar.c`` files from ``examples``
|
||||||
location under the test stage directory prior to test execution.
|
and using ``foo.c`` in a test method is illustrated below.
|
||||||
|
|
||||||
Contents of subdirectories and files are copied to a special test cache
|
|
||||||
subdirectory of the installation prefix. They are automatically copied to
|
|
||||||
the appropriate relative paths under the test stage directory prior to
|
|
||||||
executing stand-alone tests.
|
|
||||||
|
|
||||||
.. tip::
|
|
||||||
|
|
||||||
*Perform test-related conversions once when copying files.*
|
|
||||||
|
|
||||||
If one or more of the copied files needs to be modified to reference
|
|
||||||
the installed software, it is recommended that those changes be made
|
|
||||||
to the cached files **once** in the post-``install`` copy method
|
|
||||||
**after** the call to ``cache_extra_test_sources``. This will reduce
|
|
||||||
the amount of unnecessary work in the test method **and** avoid problems
|
|
||||||
running stand-alone tests in shared instances and facility deployments.
|
|
||||||
|
|
||||||
The ``filter_file`` function can be quite useful for such changes
|
|
||||||
(see :ref:`file-filtering`).
|
|
||||||
|
|
||||||
Below is a basic example of a test that relies on files from the installation.
|
|
||||||
This package method re-uses the contents of the ``examples`` subdirectory,
|
|
||||||
which is assumed to have all of the files implemented to allow ``make`` to
|
|
||||||
compile and link ``foo.c`` and ``bar.c`` against the package's installed
|
|
||||||
library.
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
class MyLibPackage(MakefilePackage):
|
class MyLibPackage(Package):
|
||||||
...
|
...
|
||||||
|
|
||||||
@run_after("install")
|
@run_after("install")
|
||||||
def copy_test_files(self):
|
def copy_test_files(self):
|
||||||
cache_extra_test_sources(self, "examples")
|
srcs = ["tests",
|
||||||
|
join_path("examples", "foo.c"),
|
||||||
|
join_path("examples", "bar.c")]
|
||||||
|
cache_extra_test_sources(self, srcs)
|
||||||
|
|
||||||
def test_example(self):
|
def test_foo(self):
|
||||||
"""build and run the examples"""
|
exe = "foo"
|
||||||
examples_dir = self.test_suite.current_test_cache_dir.examples
|
src_dir = self.test_suite.current_test_cache_dir.examples
|
||||||
with working_dir(examples_dir):
|
with working_dir(src_dir):
|
||||||
make = which("make")
|
cc = which(os.environ["CC"])
|
||||||
make()
|
cc(
|
||||||
|
f"-L{self.prefix.lib}",
|
||||||
|
f"-I{self.prefix.include}",
|
||||||
|
f"{exe}.c",
|
||||||
|
"-o", exe
|
||||||
|
)
|
||||||
|
foo = which(exe)
|
||||||
|
foo()
|
||||||
|
|
||||||
for program in ["foo", "bar"]:
|
In this case, the method copies the associated files from the build
|
||||||
with test_part(
|
stage, **after** the software is installed, to the package's test
|
||||||
self,
|
cache directory. Then ``test_foo`` builds ``foo`` using ``foo.c``
|
||||||
f"test_example_{program}",
|
before running the program.
|
||||||
purpose=f"ensure {program} runs"
|
|
||||||
):
|
|
||||||
exe = Executable(program)
|
|
||||||
exe()
|
|
||||||
|
|
||||||
In this case, ``copy_test_files`` copies the associated files from the
|
|
||||||
build stage to the package's test cache directory under the installation
|
|
||||||
prefix. Running ``spack test run`` for the package results in Spack copying
|
|
||||||
the directory and its contents to the the test stage directory. The
|
|
||||||
``working_dir`` context manager ensures the commands within it are executed
|
|
||||||
from the ``examples_dir``. The test builds the software using ``make`` before
|
|
||||||
running each executable, ``foo`` and ``bar``, as independent test parts.
|
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
@@ -5722,18 +5561,43 @@ running each executable, ``foo`` and ``bar``, as independent test parts.
|
|||||||
|
|
||||||
The key to copying files for stand-alone testing at build time is use
|
The key to copying files for stand-alone testing at build time is use
|
||||||
of the ``run_after`` directive, which ensures the associated files are
|
of the ``run_after`` directive, which ensures the associated files are
|
||||||
copied **after** the provided build stage (``install``) when the installation
|
copied **after** the provided build stage where the files **and**
|
||||||
prefix **and** files are available.
|
installation prefix are available.
|
||||||
|
|
||||||
The test method uses the path contained in the package's
|
These paths are **automatically copied** from cache to the test stage
|
||||||
``self.test_suite.current_test_cache_dir`` property for the root directory
|
directory prior to the execution of any stand-alone tests. Tests access
|
||||||
of the copied files. In this case, that's the ``examples`` subdirectory.
|
the files using the ``self.test_suite.current_test_cache_dir`` property.
|
||||||
|
In our example above, test methods can use the following paths to reference
|
||||||
|
the copy of each entry listed in ``srcs``, respectively:
|
||||||
|
|
||||||
.. tip::
|
* ``self.test_suite.current_test_cache_dir.tests``
|
||||||
|
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||||
|
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
|
||||||
|
|
||||||
|
.. admonition:: Library packages should build stand-alone tests
|
||||||
|
|
||||||
|
Library developers will want to build the associated tests
|
||||||
|
against their **installed** libraries before running them.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
While source and input files are generally recommended, binaries
|
||||||
|
**may** also be cached by the build process. Only you, as the package
|
||||||
|
writer or maintainer, know whether these files would be appropriate
|
||||||
|
for testing the installed software weeks to months later.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
If one or more of the copied files needs to be modified to reference
|
||||||
|
the installed software, it is recommended that those changes be made
|
||||||
|
to the cached files **once** in the ``copy_test_sources`` method and
|
||||||
|
***after** the call to ``cache_extra_test_sources()``. This will
|
||||||
|
reduce the amount of unnecessary work in the test method **and** avoid
|
||||||
|
problems testing in shared instances and facility deployments.
|
||||||
|
|
||||||
|
The ``filter_file`` function can be quite useful for such changes.
|
||||||
|
See :ref:`file manipulation <file-manipulation>`.
|
||||||
|
|
||||||
If you want to see more examples from packages that cache build files, run
|
|
||||||
``spack pkg grep cache_extra_test_sources | sed "s/\/package.py.*//g" | sort -u``
|
|
||||||
from the command line to get a list of the packages.
|
|
||||||
|
|
||||||
.. _cache_custom_files:
|
.. _cache_custom_files:
|
||||||
|
|
||||||
@@ -5741,9 +5605,8 @@ running each executable, ``foo`` and ``bar``, as independent test parts.
|
|||||||
Adding custom files
|
Adding custom files
|
||||||
"""""""""""""""""""
|
"""""""""""""""""""
|
||||||
|
|
||||||
Sometimes it is helpful or necessary to include custom files for building and
|
In some cases it can be useful to have files that can be used to build or
|
||||||
or checking the results of tests as part of the package. Examples of the types
|
check the results of tests. Examples include:
|
||||||
of files that might be useful are:
|
|
||||||
|
|
||||||
- test source files
|
- test source files
|
||||||
- test input files
|
- test input files
|
||||||
@@ -5751,15 +5614,17 @@ of files that might be useful are:
|
|||||||
- expected test outputs
|
- expected test outputs
|
||||||
|
|
||||||
While obtaining such files from the software repository is preferred (see
|
While obtaining such files from the software repository is preferred (see
|
||||||
:ref:`cache_extra_test_sources`), there are circumstances where doing so is not
|
:ref:`adding build-time files <cache_extra_test_sources>`), there are
|
||||||
feasible such as when the software is not being actively maintained. When test
|
circumstances where that is not feasible (e.g., the software is not being
|
||||||
files cannot be obtained from the repository or there is a need to supplement
|
actively maintained). When test files can't be obtained from the repository
|
||||||
files that can, Spack supports the inclusion of additional files under the
|
or as a supplement to files that can, Spack supports the inclusion of
|
||||||
``test`` subdirectory of the package in the Spack repository.
|
additional files under the ``test`` subdirectory of the package in the
|
||||||
|
Spack repository.
|
||||||
|
|
||||||
The following example assumes a ``custom-example.c`` is saved in ``MyLibary``
|
Spack **automatically copies** the contents of that directory to the
|
||||||
package's ``test`` subdirectory. It also assumes the program simply needs to
|
test staging directory prior to running stand-alone tests. Test methods
|
||||||
be compiled and linked against the installed ``MyLibrary`` software.
|
access those files using the ``self.test_suite.current_test_data_dir``
|
||||||
|
property as shown below.
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
@@ -5769,29 +5634,17 @@ be compiled and linked against the installed ``MyLibrary`` software.
|
|||||||
test_requires_compiler = True
|
test_requires_compiler = True
|
||||||
...
|
...
|
||||||
|
|
||||||
def test_custom_example(self):
|
def test_example(self):
|
||||||
"""build and run custom-example"""
|
"""build and run custom-example"""
|
||||||
src_dir = self.test_suite.current_test_data_dir
|
data_dir = self.test_suite.current_test_data_dir
|
||||||
exe = "custom-example"
|
exe = "custom-example"
|
||||||
|
src = datadir.join(f"{exe}.cpp")
|
||||||
|
...
|
||||||
|
# TODO: Build custom-example using src and exe
|
||||||
|
...
|
||||||
|
custom_example = which(exe)
|
||||||
|
custom_example()
|
||||||
|
|
||||||
with working_dir(src_dir):
|
|
||||||
cc = which(os.environ["CC"])
|
|
||||||
cc(
|
|
||||||
f"-L{self.prefix.lib}",
|
|
||||||
f"-I{self.prefix.include}",
|
|
||||||
f"{exe}.cpp",
|
|
||||||
"-o", exe
|
|
||||||
)
|
|
||||||
|
|
||||||
custom_example = Executable(exe)
|
|
||||||
custom_example()
|
|
||||||
|
|
||||||
In this case, ``spack test run`` for the package results in Spack copying
|
|
||||||
the contents of the ``test`` subdirectory to the test stage directory path
|
|
||||||
in ``self.test_suite.current_test_data_dir`` before calling
|
|
||||||
``test_custom_example``. Use of the ``working_dir`` context manager
|
|
||||||
ensures the commands to build and run the program are performed from
|
|
||||||
within the appropriate subdirectory of the test stage.
|
|
||||||
|
|
||||||
.. _expected_test_output_from_file:
|
.. _expected_test_output_from_file:
|
||||||
|
|
||||||
@@ -5800,8 +5653,9 @@ Reading expected output from a file
|
|||||||
"""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
The helper function ``get_escaped_text_output`` is available for packages
|
The helper function ``get_escaped_text_output`` is available for packages
|
||||||
to retrieve properly formatted text from a file potentially containing
|
to retrieve and properly format the text from a file that contains the
|
||||||
special characters.
|
expected output from running an executable that may contain special
|
||||||
|
characters.
|
||||||
|
|
||||||
The signature for ``get_escaped_text_output`` is:
|
The signature for ``get_escaped_text_output`` is:
|
||||||
|
|
||||||
@@ -5811,13 +5665,10 @@ The signature for ``get_escaped_text_output`` is:
|
|||||||
|
|
||||||
where ``filename`` is the path to the file containing the expected output.
|
where ``filename`` is the path to the file containing the expected output.
|
||||||
|
|
||||||
The path provided to ``filename`` for one of the copied custom files
|
The ``filename`` for a :ref:`custom file <cache_custom_files>` can be
|
||||||
(:ref:`custom file <cache_custom_files>`) is in the path rooted at
|
accessed by tests using the ``self.test_suite.current_test_data_dir``
|
||||||
``self.test_suite.current_test_data_dir``.
|
property. The example below illustrates how to read a file that was
|
||||||
|
added to the package's ``test`` subdirectory.
|
||||||
The example below shows how to reference both the custom database
|
|
||||||
(``packages.db``) and expected output (``dump.out``) files Spack copies
|
|
||||||
to the test stage:
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
@@ -5839,9 +5690,8 @@ to the test stage:
|
|||||||
for exp in expected:
|
for exp in expected:
|
||||||
assert re.search(exp, out), f"Expected '{exp}' in output"
|
assert re.search(exp, out), f"Expected '{exp}' in output"
|
||||||
|
|
||||||
If the files were instead cached from installing the software, the paths to the
|
If the file was instead copied from the ``tests`` subdirectory of the staged
|
||||||
two files would be found under the ``self.test_suite.current_test_cache_dir``
|
source code, the path would be obtained as shown below.
|
||||||
directory as shown below:
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
@@ -5849,24 +5699,17 @@ directory as shown below:
|
|||||||
"""check example table dump"""
|
"""check example table dump"""
|
||||||
test_cache_dir = self.test_suite.current_test_cache_dir
|
test_cache_dir = self.test_suite.current_test_cache_dir
|
||||||
db_filename = test_cache_dir.join("packages.db")
|
db_filename = test_cache_dir.join("packages.db")
|
||||||
..
|
|
||||||
expected = get_escaped_text_output(test_cache_dir.join("dump.out"))
|
|
||||||
...
|
|
||||||
|
|
||||||
Alternatively, if both files had been installed by the software into the
|
Alternatively, if the file was copied to the ``share/tests`` subdirectory
|
||||||
``share/tests`` subdirectory of the installation prefix, the paths to the
|
as part of the installation process, the test could access the path as
|
||||||
two files would be referenced as follows:
|
follows:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
def test_example(self):
|
def test_example(self):
|
||||||
"""check example table dump"""
|
"""check example table dump"""
|
||||||
db_filename = self.prefix.share.tests.join("packages.db")
|
db_filename = join_path(self.prefix.share.tests, "packages.db")
|
||||||
..
|
|
||||||
expected = get_escaped_text_output(
|
|
||||||
self.prefix.share.tests.join("dump.out")
|
|
||||||
)
|
|
||||||
...
|
|
||||||
|
|
||||||
.. _check_outputs:
|
.. _check_outputs:
|
||||||
|
|
||||||
@@ -5874,9 +5717,9 @@ two files would be referenced as follows:
|
|||||||
Comparing expected to actual outputs
|
Comparing expected to actual outputs
|
||||||
""""""""""""""""""""""""""""""""""""
|
""""""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
The ``check_outputs`` helper routine is available for packages to ensure
|
The helper function ``check_outputs`` is available for packages to ensure
|
||||||
multiple expected outputs from running an executable are contained within
|
the expected outputs from running an executable are contained within the
|
||||||
the actual outputs.
|
actual outputs.
|
||||||
|
|
||||||
The signature for ``check_outputs`` is:
|
The signature for ``check_outputs`` is:
|
||||||
|
|
||||||
@@ -5902,17 +5745,11 @@ Invoking the method is the equivalent of:
|
|||||||
if errors:
|
if errors:
|
||||||
raise RuntimeError("\n ".join(errors))
|
raise RuntimeError("\n ".join(errors))
|
||||||
|
|
||||||
.. tip::
|
|
||||||
|
|
||||||
If you want to see more examples from packages that use this helper, run
|
|
||||||
``spack pkg grep check_outputs | sed "s/\/package.py.*//g" | sort -u``
|
|
||||||
from the command line to get a list of the packages.
|
|
||||||
|
|
||||||
|
|
||||||
.. _accessing-files:
|
.. _accessing-files:
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""""""""
|
||||||
Finding package- and test-related files
|
Accessing package- and test-related files
|
||||||
"""""""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""""""""
|
||||||
|
|
||||||
You may need to access files from one or more locations when writing
|
You may need to access files from one or more locations when writing
|
||||||
@@ -5921,7 +5758,8 @@ include test source files or includes them but has no way to build the
|
|||||||
executables using the installed headers and libraries. In these cases
|
executables using the installed headers and libraries. In these cases
|
||||||
you may need to reference the files relative to one or more root directory.
|
you may need to reference the files relative to one or more root directory.
|
||||||
The table below lists relevant path properties and provides additional
|
The table below lists relevant path properties and provides additional
|
||||||
examples of their use. See :ref:`expected_test_output_from_file` for
|
examples of their use.
|
||||||
|
:ref:`Reading expected output <expected_test_output_from_file>` provides
|
||||||
examples of accessing files saved from the software repository, package
|
examples of accessing files saved from the software repository, package
|
||||||
repository, and installation.
|
repository, and installation.
|
||||||
|
|
||||||
@@ -5950,6 +5788,7 @@ repository, and installation.
|
|||||||
- ``self.test_suite.current_test_data_dir``
|
- ``self.test_suite.current_test_data_dir``
|
||||||
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
||||||
|
|
||||||
|
|
||||||
.. _inheriting-tests:
|
.. _inheriting-tests:
|
||||||
|
|
||||||
""""""""""""""""""""""""""""
|
""""""""""""""""""""""""""""
|
||||||
@@ -5992,7 +5831,7 @@ maintainers provide additional stand-alone tests customized to the package.
|
|||||||
.. warning::
|
.. warning::
|
||||||
|
|
||||||
Any package that implements a test method with the same name as an
|
Any package that implements a test method with the same name as an
|
||||||
inherited method will override the inherited method. If that is not the
|
inherited method overrides the inherited method. If that is not the
|
||||||
goal and you are not explicitly calling and adding functionality to
|
goal and you are not explicitly calling and adding functionality to
|
||||||
the inherited method for the test, then make sure that all test methods
|
the inherited method for the test, then make sure that all test methods
|
||||||
and embedded test parts have unique test names.
|
and embedded test parts have unique test names.
|
||||||
@@ -6157,8 +5996,6 @@ running:
|
|||||||
This is already part of the boilerplate for packages created with
|
This is already part of the boilerplate for packages created with
|
||||||
``spack create``.
|
``spack create``.
|
||||||
|
|
||||||
.. _file-filtering:
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^
|
||||||
Filtering functions
|
Filtering functions
|
||||||
^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^
|
||||||
|
|||||||
@@ -253,6 +253,17 @@ can easily happen if it is not updated frequently, this behavior ensures that
|
|||||||
spack has a way to know for certain about the status of any concrete spec on
|
spack has a way to know for certain about the status of any concrete spec on
|
||||||
the remote mirror, but can slow down pipeline generation significantly.
|
the remote mirror, but can slow down pipeline generation significantly.
|
||||||
|
|
||||||
|
The ``--optimize`` argument is experimental and runs the generated pipeline
|
||||||
|
document through a series of optimization passes designed to reduce the size
|
||||||
|
of the generated file.
|
||||||
|
|
||||||
|
The ``--dependencies`` is also experimental and disables what in Gitlab is
|
||||||
|
referred to as DAG scheduling, internally using the ``dependencies`` keyword
|
||||||
|
rather than ``needs`` to list dependency jobs. The drawback of using this option
|
||||||
|
is that before any job can begin, all jobs in previous stages must first
|
||||||
|
complete. The benefit is that Gitlab allows more dependencies to be listed
|
||||||
|
when using ``dependencies`` instead of ``needs``.
|
||||||
|
|
||||||
The optional ``--output-file`` argument should be an absolute path (including
|
The optional ``--output-file`` argument should be an absolute path (including
|
||||||
file name) to the generated pipeline, and if not given, the default is
|
file name) to the generated pipeline, and if not given, the default is
|
||||||
``./.gitlab-ci.yml``.
|
``./.gitlab-ci.yml``.
|
||||||
|
|||||||
@@ -476,3 +476,9 @@ implemented using Python's built-in `sys.path
|
|||||||
:py:mod:`spack.repo` module implements a custom `Python importer
|
:py:mod:`spack.repo` module implements a custom `Python importer
|
||||||
<https://docs.python.org/2/library/imp.html>`_.
|
<https://docs.python.org/2/library/imp.html>`_.
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
The mechanism for extending packages is not yet extensively tested,
|
||||||
|
and extending packages across repositories imposes inter-repo
|
||||||
|
dependencies, which may be hard to manage. Use this feature at your
|
||||||
|
own risk, but let us know if you have a use case for it.
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
sphinx==7.4.6
|
sphinx==7.2.6
|
||||||
sphinxcontrib-programoutput==0.17
|
sphinxcontrib-programoutput==0.17
|
||||||
sphinx_design==0.6.0
|
sphinx_design==0.5.0
|
||||||
sphinx-rtd-theme==2.0.0
|
sphinx-rtd-theme==2.0.0
|
||||||
python-levenshtein==0.25.1
|
python-levenshtein==0.25.1
|
||||||
docutils==0.20.1
|
docutils==0.20.1
|
||||||
pygments==2.18.0
|
pygments==2.17.2
|
||||||
urllib3==2.2.2
|
urllib3==2.2.1
|
||||||
pytest==8.2.2
|
pytest==8.2.0
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
black==24.4.2
|
black==24.4.0
|
||||||
flake8==7.1.0
|
flake8==7.0.0
|
||||||
mypy==1.11.0
|
mypy==1.9.0
|
||||||
|
|||||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
* Homepage: https://pypi.python.org/pypi/archspec
|
* Homepage: https://pypi.python.org/pypi/archspec
|
||||||
* Usage: Labeling, comparison and detection of microarchitectures
|
* Usage: Labeling, comparison and detection of microarchitectures
|
||||||
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
|
* Version: 0.2.3 (commit 7b8fe60b69e2861e7dac104bc1c183decfcd3daf)
|
||||||
|
|
||||||
astunparse
|
astunparse
|
||||||
----------------
|
----------------
|
||||||
|
|||||||
2
lib/spack/external/archspec/__init__.py
vendored
2
lib/spack/external/archspec/__init__.py
vendored
@@ -1,3 +1,3 @@
|
|||||||
"""Init file to avoid namespace packages"""
|
"""Init file to avoid namespace packages"""
|
||||||
|
|
||||||
__version__ = "0.2.4"
|
__version__ = "0.2.3"
|
||||||
|
|||||||
9
lib/spack/external/archspec/cpu/__init__.py
vendored
9
lib/spack/external/archspec/cpu/__init__.py
vendored
@@ -5,10 +5,9 @@
|
|||||||
"""The "cpu" package permits to query and compare different
|
"""The "cpu" package permits to query and compare different
|
||||||
CPU microarchitectures.
|
CPU microarchitectures.
|
||||||
"""
|
"""
|
||||||
from .detect import brand_string, host
|
from .detect import host
|
||||||
from .microarchitecture import (
|
from .microarchitecture import (
|
||||||
TARGETS,
|
TARGETS,
|
||||||
InvalidCompilerVersion,
|
|
||||||
Microarchitecture,
|
Microarchitecture,
|
||||||
UnsupportedMicroarchitecture,
|
UnsupportedMicroarchitecture,
|
||||||
generic_microarchitecture,
|
generic_microarchitecture,
|
||||||
@@ -16,12 +15,10 @@
|
|||||||
)
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"brand_string",
|
|
||||||
"host",
|
|
||||||
"TARGETS",
|
|
||||||
"InvalidCompilerVersion",
|
|
||||||
"Microarchitecture",
|
"Microarchitecture",
|
||||||
"UnsupportedMicroarchitecture",
|
"UnsupportedMicroarchitecture",
|
||||||
|
"TARGETS",
|
||||||
"generic_microarchitecture",
|
"generic_microarchitecture",
|
||||||
|
"host",
|
||||||
"version_components",
|
"version_components",
|
||||||
]
|
]
|
||||||
|
|||||||
42
lib/spack/external/archspec/cpu/detect.py
vendored
42
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -155,31 +155,6 @@ def _is_bit_set(self, register: int, bit: int) -> bool:
|
|||||||
mask = 1 << bit
|
mask = 1 << bit
|
||||||
return register & mask > 0
|
return register & mask > 0
|
||||||
|
|
||||||
def brand_string(self) -> Optional[str]:
|
|
||||||
"""Returns the brand string, if available."""
|
|
||||||
if self.highest_extension_support < 0x80000004:
|
|
||||||
return None
|
|
||||||
|
|
||||||
r1 = self.cpuid.registers_for(eax=0x80000002, ecx=0)
|
|
||||||
r2 = self.cpuid.registers_for(eax=0x80000003, ecx=0)
|
|
||||||
r3 = self.cpuid.registers_for(eax=0x80000004, ecx=0)
|
|
||||||
result = struct.pack(
|
|
||||||
"IIIIIIIIIIII",
|
|
||||||
r1.eax,
|
|
||||||
r1.ebx,
|
|
||||||
r1.ecx,
|
|
||||||
r1.edx,
|
|
||||||
r2.eax,
|
|
||||||
r2.ebx,
|
|
||||||
r2.ecx,
|
|
||||||
r2.edx,
|
|
||||||
r3.eax,
|
|
||||||
r3.ebx,
|
|
||||||
r3.ecx,
|
|
||||||
r3.edx,
|
|
||||||
).decode("utf-8")
|
|
||||||
return result.strip("\x00")
|
|
||||||
|
|
||||||
|
|
||||||
@detection(operating_system="Windows")
|
@detection(operating_system="Windows")
|
||||||
def cpuid_info():
|
def cpuid_info():
|
||||||
@@ -199,8 +174,8 @@ def _check_output(args, env):
|
|||||||
|
|
||||||
|
|
||||||
WINDOWS_MAPPING = {
|
WINDOWS_MAPPING = {
|
||||||
"AMD64": X86_64,
|
"AMD64": "x86_64",
|
||||||
"ARM64": AARCH64,
|
"ARM64": "aarch64",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -434,16 +409,3 @@ def compatibility_check_for_riscv64(info, target):
|
|||||||
return (target == arch_root or arch_root in target.ancestors) and (
|
return (target == arch_root or arch_root in target.ancestors) and (
|
||||||
target.name == info.name or target.vendor == "generic"
|
target.name == info.name or target.vendor == "generic"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def brand_string() -> Optional[str]:
|
|
||||||
"""Returns the brand string of the host, if detected, or None."""
|
|
||||||
if platform.system() == "Darwin":
|
|
||||||
return _check_output(
|
|
||||||
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
|
|
||||||
).strip()
|
|
||||||
|
|
||||||
if host().family == X86_64:
|
|
||||||
return CpuidInfoCollector().brand_string()
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|||||||
@@ -208,8 +208,6 @@ def optimization_flags(self, compiler, version):
|
|||||||
"""Returns a string containing the optimization flags that needs
|
"""Returns a string containing the optimization flags that needs
|
||||||
to be used to produce code optimized for this micro-architecture.
|
to be used to produce code optimized for this micro-architecture.
|
||||||
|
|
||||||
The version is expected to be a string of dot separated digits.
|
|
||||||
|
|
||||||
If there is no information on the compiler passed as argument the
|
If there is no information on the compiler passed as argument the
|
||||||
function returns an empty string. If it is known that the compiler
|
function returns an empty string. If it is known that the compiler
|
||||||
version we want to use does not support this architecture the function
|
version we want to use does not support this architecture the function
|
||||||
@@ -218,11 +216,6 @@ def optimization_flags(self, compiler, version):
|
|||||||
Args:
|
Args:
|
||||||
compiler (str): name of the compiler to be used
|
compiler (str): name of the compiler to be used
|
||||||
version (str): version of the compiler to be used
|
version (str): version of the compiler to be used
|
||||||
|
|
||||||
Raises:
|
|
||||||
UnsupportedMicroarchitecture: if the requested compiler does not support
|
|
||||||
this micro-architecture.
|
|
||||||
ValueError: if the version doesn't match the expected format
|
|
||||||
"""
|
"""
|
||||||
# If we don't have information on compiler at all return an empty string
|
# If we don't have information on compiler at all return an empty string
|
||||||
if compiler not in self.family.compilers:
|
if compiler not in self.family.compilers:
|
||||||
@@ -239,14 +232,6 @@ def optimization_flags(self, compiler, version):
|
|||||||
msg = msg.format(compiler, best_target, best_target.family)
|
msg = msg.format(compiler, best_target, best_target.family)
|
||||||
raise UnsupportedMicroarchitecture(msg)
|
raise UnsupportedMicroarchitecture(msg)
|
||||||
|
|
||||||
# Check that the version matches the expected format
|
|
||||||
if not re.match(r"^(?:\d+\.)*\d+$", version):
|
|
||||||
msg = (
|
|
||||||
"invalid format for the compiler version argument. "
|
|
||||||
"Only dot separated digits are allowed."
|
|
||||||
)
|
|
||||||
raise InvalidCompilerVersion(msg)
|
|
||||||
|
|
||||||
# If we have information on this compiler we need to check the
|
# If we have information on this compiler we need to check the
|
||||||
# version being used
|
# version being used
|
||||||
compiler_info = self.compilers[compiler]
|
compiler_info = self.compilers[compiler]
|
||||||
@@ -307,7 +292,7 @@ def generic_microarchitecture(name):
|
|||||||
Args:
|
Args:
|
||||||
name (str): name of the micro-architecture
|
name (str): name of the micro-architecture
|
||||||
"""
|
"""
|
||||||
return Microarchitecture(name, parents=[], vendor="generic", features=set(), compilers={})
|
return Microarchitecture(name, parents=[], vendor="generic", features=[], compilers={})
|
||||||
|
|
||||||
|
|
||||||
def version_components(version):
|
def version_components(version):
|
||||||
@@ -382,15 +367,7 @@ def fill_target_from_dict(name, data, targets):
|
|||||||
TARGETS = LazyDictionary(_known_microarchitectures)
|
TARGETS = LazyDictionary(_known_microarchitectures)
|
||||||
|
|
||||||
|
|
||||||
class ArchspecError(Exception):
|
class UnsupportedMicroarchitecture(ValueError):
|
||||||
"""Base class for errors within archspec"""
|
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedMicroarchitecture(ArchspecError, ValueError):
|
|
||||||
"""Raised if a compiler version does not support optimization for a given
|
"""Raised if a compiler version does not support optimization for a given
|
||||||
micro-architecture.
|
micro-architecture.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
class InvalidCompilerVersion(ArchspecError, ValueError):
|
|
||||||
"""Raised when an invalid format is used for compiler versions in archspec."""
|
|
||||||
|
|||||||
@@ -2937,6 +2937,8 @@
|
|||||||
"ilrcpc",
|
"ilrcpc",
|
||||||
"flagm",
|
"flagm",
|
||||||
"ssbs",
|
"ssbs",
|
||||||
|
"paca",
|
||||||
|
"pacg",
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"svei8mm",
|
"svei8mm",
|
||||||
"svebf16",
|
"svebf16",
|
||||||
@@ -3064,6 +3066,8 @@
|
|||||||
"flagm",
|
"flagm",
|
||||||
"ssbs",
|
"ssbs",
|
||||||
"sb",
|
"sb",
|
||||||
|
"paca",
|
||||||
|
"pacg",
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"sve2",
|
"sve2",
|
||||||
"sveaes",
|
"sveaes",
|
||||||
@@ -3077,7 +3081,8 @@
|
|||||||
"svebf16",
|
"svebf16",
|
||||||
"i8mm",
|
"i8mm",
|
||||||
"bf16",
|
"bf16",
|
||||||
"dgh"
|
"dgh",
|
||||||
|
"bti"
|
||||||
],
|
],
|
||||||
"compilers" : {
|
"compilers" : {
|
||||||
"gcc": [
|
"gcc": [
|
||||||
|
|||||||
@@ -98,10 +98,3 @@ def path_filter_caller(*args, **kwargs):
|
|||||||
if _func:
|
if _func:
|
||||||
return holder_func(_func)
|
return holder_func(_func)
|
||||||
return holder_func
|
return holder_func
|
||||||
|
|
||||||
|
|
||||||
def sanitize_win_longpath(path: str) -> str:
|
|
||||||
"""Strip Windows extended path prefix from strings
|
|
||||||
Returns sanitized string.
|
|
||||||
no-op if extended path prefix is not present"""
|
|
||||||
return path.lstrip("\\\\?\\")
|
|
||||||
|
|||||||
@@ -187,18 +187,12 @@ def polite_filename(filename: str) -> str:
|
|||||||
return _polite_antipattern().sub("_", filename)
|
return _polite_antipattern().sub("_", filename)
|
||||||
|
|
||||||
|
|
||||||
def getuid() -> Union[str, int]:
|
def getuid():
|
||||||
"""Returns os getuid on non Windows
|
|
||||||
On Windows returns 0 for admin users, login string otherwise
|
|
||||||
This is in line with behavior from get_owner_uid which
|
|
||||||
always returns the login string on Windows
|
|
||||||
"""
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
import ctypes
|
import ctypes
|
||||||
|
|
||||||
# If not admin, use the string name of the login as a unique ID
|
|
||||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||||
return os.getlogin()
|
return 1
|
||||||
return 0
|
return 0
|
||||||
else:
|
else:
|
||||||
return os.getuid()
|
return os.getuid()
|
||||||
@@ -219,15 +213,6 @@ def _win_rename(src, dst):
|
|||||||
os.replace(src, dst)
|
os.replace(src, dst)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
|
||||||
def msdos_escape_parens(path):
|
|
||||||
"""MS-DOS interprets parens as grouping parameters even in a quoted string"""
|
|
||||||
if sys.platform == "win32":
|
|
||||||
return path.replace("(", "^(").replace(")", "^)")
|
|
||||||
else:
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def rename(src, dst):
|
def rename(src, dst):
|
||||||
# On Windows, os.rename will fail if the destination file already exists
|
# On Windows, os.rename will fail if the destination file already exists
|
||||||
@@ -568,13 +553,7 @@ def exploding_archive_handler(tarball_container, stage):
|
|||||||
|
|
||||||
|
|
||||||
@system_path_filter(arg_slice=slice(1))
|
@system_path_filter(arg_slice=slice(1))
|
||||||
def get_owner_uid(path, err_msg=None) -> Union[str, int]:
|
def get_owner_uid(path, err_msg=None):
|
||||||
"""Returns owner UID of path destination
|
|
||||||
On non Windows this is the value of st_uid
|
|
||||||
On Windows this is the login string associated with the
|
|
||||||
owning user.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
mkdirp(path, mode=stat.S_IRWXU)
|
mkdirp(path, mode=stat.S_IRWXU)
|
||||||
|
|
||||||
@@ -766,6 +745,7 @@ def copy_tree(
|
|||||||
src: str,
|
src: str,
|
||||||
dest: str,
|
dest: str,
|
||||||
symlinks: bool = True,
|
symlinks: bool = True,
|
||||||
|
allow_broken_symlinks: bool = sys.platform != "win32",
|
||||||
ignore: Optional[Callable[[str], bool]] = None,
|
ignore: Optional[Callable[[str], bool]] = None,
|
||||||
_permissions: bool = False,
|
_permissions: bool = False,
|
||||||
):
|
):
|
||||||
@@ -788,6 +768,8 @@ def copy_tree(
|
|||||||
src (str): the directory to copy
|
src (str): the directory to copy
|
||||||
dest (str): the destination directory
|
dest (str): the destination directory
|
||||||
symlinks (bool): whether or not to preserve symlinks
|
symlinks (bool): whether or not to preserve symlinks
|
||||||
|
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||||
|
On Windows, setting this to True will raise an exception. Defaults to true on unix.
|
||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
_permissions (bool): for internal use only
|
_permissions (bool): for internal use only
|
||||||
|
|
||||||
@@ -795,6 +777,8 @@ def copy_tree(
|
|||||||
IOError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
|
if allow_broken_symlinks and sys.platform == "win32":
|
||||||
|
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
|
||||||
if _permissions:
|
if _permissions:
|
||||||
tty.debug("Installing {0} to {1}".format(src, dest))
|
tty.debug("Installing {0} to {1}".format(src, dest))
|
||||||
else:
|
else:
|
||||||
@@ -838,7 +822,7 @@ def copy_tree(
|
|||||||
if islink(s):
|
if islink(s):
|
||||||
link_target = resolve_link_target_relative_to_the_link(s)
|
link_target = resolve_link_target_relative_to_the_link(s)
|
||||||
if symlinks:
|
if symlinks:
|
||||||
target = readlink(s)
|
target = os.readlink(s)
|
||||||
if os.path.isabs(target):
|
if os.path.isabs(target):
|
||||||
|
|
||||||
def escaped_path(path):
|
def escaped_path(path):
|
||||||
@@ -867,14 +851,16 @@ def escaped_path(path):
|
|||||||
copy_mode(s, d)
|
copy_mode(s, d)
|
||||||
|
|
||||||
for target, d, s in links:
|
for target, d, s in links:
|
||||||
symlink(target, d)
|
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
|
||||||
if _permissions:
|
if _permissions:
|
||||||
set_install_permissions(d)
|
set_install_permissions(d)
|
||||||
copy_mode(s, d)
|
copy_mode(s, d)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def install_tree(src, dest, symlinks=True, ignore=None):
|
def install_tree(
|
||||||
|
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
|
||||||
|
):
|
||||||
"""Recursively install an entire directory tree rooted at *src*.
|
"""Recursively install an entire directory tree rooted at *src*.
|
||||||
|
|
||||||
Same as :py:func:`copy_tree` with the addition of setting proper
|
Same as :py:func:`copy_tree` with the addition of setting proper
|
||||||
@@ -885,12 +871,21 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
|||||||
dest (str): the destination directory
|
dest (str): the destination directory
|
||||||
symlinks (bool): whether or not to preserve symlinks
|
symlinks (bool): whether or not to preserve symlinks
|
||||||
ignore (typing.Callable): function indicating which files to ignore
|
ignore (typing.Callable): function indicating which files to ignore
|
||||||
|
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||||
|
On Windows, setting this to True will raise an exception.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
IOError: if *src* does not match any files or directories
|
IOError: if *src* does not match any files or directories
|
||||||
ValueError: if *src* is a parent directory of *dest*
|
ValueError: if *src* is a parent directory of *dest*
|
||||||
"""
|
"""
|
||||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
copy_tree(
|
||||||
|
src,
|
||||||
|
dest,
|
||||||
|
symlinks=symlinks,
|
||||||
|
allow_broken_symlinks=allow_broken_symlinks,
|
||||||
|
ignore=ignore,
|
||||||
|
_permissions=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@@ -2434,10 +2429,9 @@ def add_library_dependent(self, *dest):
|
|||||||
"""
|
"""
|
||||||
for pth in dest:
|
for pth in dest:
|
||||||
if os.path.isfile(pth):
|
if os.path.isfile(pth):
|
||||||
new_pth = pathlib.Path(pth).parent
|
self._additional_library_dependents.add(pathlib.Path(pth).parent)
|
||||||
else:
|
else:
|
||||||
new_pth = pathlib.Path(pth)
|
self._additional_library_dependents.add(pathlib.Path(pth))
|
||||||
self._additional_library_dependents.add(new_pth)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def rpaths(self):
|
def rpaths(self):
|
||||||
@@ -2515,14 +2509,8 @@ def establish_link(self):
|
|||||||
|
|
||||||
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
||||||
# install a symlink to each dependent library
|
# install a symlink to each dependent library
|
||||||
|
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
||||||
# do not rpath for system libraries included in the dag
|
self._link(library, lib_dir)
|
||||||
# we should not be modifying libraries managed by the Windows system
|
|
||||||
# as this will negatively impact linker behavior and can result in permission
|
|
||||||
# errors if those system libs are not modifiable by Spack
|
|
||||||
if "windows-system" not in getattr(self.pkg, "tags", []):
|
|
||||||
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
|
||||||
self._link(library, lib_dir)
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
|
|||||||
@@ -8,75 +8,100 @@
|
|||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from typing import Union
|
|
||||||
|
|
||||||
from llnl.util import lang, tty
|
from llnl.util import lang, tty
|
||||||
|
|
||||||
from ..path import sanitize_win_longpath, system_path_filter
|
from ..path import system_path_filter
|
||||||
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
from win32file import CreateHardLink
|
from win32file import CreateHardLink
|
||||||
|
|
||||||
|
is_windows = sys.platform == "win32"
|
||||||
|
|
||||||
def _windows_symlink(
|
|
||||||
src: str, dst: str, target_is_directory: bool = False, *, dir_fd: Union[int, None] = None
|
|
||||||
):
|
|
||||||
"""On Windows with System Administrator privileges this will be a normal symbolic link via
|
|
||||||
os.symlink. On Windows without privledges the link will be a junction for a directory and a
|
|
||||||
hardlink for a file. On Windows the various link types are:
|
|
||||||
|
|
||||||
Symbolic Link: A link to a file or directory on the same or different volume (drive letter) or
|
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
|
||||||
even to a remote file or directory (using UNC in its path). Need System Administrator
|
"""
|
||||||
privileges to make these.
|
Create a link.
|
||||||
|
|
||||||
Hard Link: A link to a file on the same volume (drive letter) only. Every file (file's data)
|
On non-Windows and Windows with System Administrator
|
||||||
has at least 1 hard link (file's name). But when this method creates a new hard link there will
|
privleges this will be a normal symbolic link via
|
||||||
be 2. Deleting all hard links effectively deletes the file. Don't need System Administrator
|
os.symlink.
|
||||||
privileges.
|
|
||||||
|
|
||||||
Junction: A link to a directory on the same or different volume (drive letter) but not to a
|
On Windows without privledges the link will be a
|
||||||
remote directory. Don't need System Administrator privileges."""
|
junction for a directory and a hardlink for a file.
|
||||||
source_path = os.path.normpath(src)
|
On Windows the various link types are:
|
||||||
|
|
||||||
|
Symbolic Link: A link to a file or directory on the
|
||||||
|
same or different volume (drive letter) or even to
|
||||||
|
a remote file or directory (using UNC in its path).
|
||||||
|
Need System Administrator privileges to make these.
|
||||||
|
|
||||||
|
Hard Link: A link to a file on the same volume (drive
|
||||||
|
letter) only. Every file (file's data) has at least 1
|
||||||
|
hard link (file's name). But when this method creates
|
||||||
|
a new hard link there will be 2. Deleting all hard
|
||||||
|
links effectively deletes the file. Don't need System
|
||||||
|
Administrator privileges.
|
||||||
|
|
||||||
|
Junction: A link to a directory on the same or different
|
||||||
|
volume (drive letter) but not to a remote directory. Don't
|
||||||
|
need System Administrator privileges.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
source_path (str): The real file or directory that the link points to.
|
||||||
|
Must be absolute OR relative to the link.
|
||||||
|
link_path (str): The path where the link will exist.
|
||||||
|
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
|
||||||
|
doesn't exist. This will still raise an exception on Windows.
|
||||||
|
"""
|
||||||
|
source_path = os.path.normpath(source_path)
|
||||||
win_source_path = source_path
|
win_source_path = source_path
|
||||||
link_path = os.path.normpath(dst)
|
link_path = os.path.normpath(link_path)
|
||||||
|
|
||||||
# Perform basic checks to make sure symlinking will succeed
|
# Never allow broken links on Windows.
|
||||||
if os.path.lexists(link_path):
|
if sys.platform == "win32" and allow_broken_symlinks:
|
||||||
raise AlreadyExistsError(f"Link path ({link_path}) already exists. Cannot create link.")
|
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
|
||||||
|
|
||||||
if not os.path.exists(source_path):
|
if not allow_broken_symlinks:
|
||||||
if os.path.isabs(source_path):
|
# Perform basic checks to make sure symlinking will succeed
|
||||||
# An absolute source path that does not exist will result in a broken link.
|
if os.path.lexists(link_path):
|
||||||
raise SymlinkError(
|
raise AlreadyExistsError(
|
||||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
f"Link path ({link_path}) already exists. Cannot create link."
|
||||||
f"link would be broken so not making link."
|
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
# os.symlink can create a link when the given source path is relative to
|
if not os.path.exists(source_path):
|
||||||
# the link path. Emulate this behavior and check to see if the source exists
|
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
||||||
# relative to the link path ahead of link creation to prevent broken
|
# An absolute source path that does not exist will result in a broken link.
|
||||||
# links from being made.
|
|
||||||
link_parent_dir = os.path.dirname(link_path)
|
|
||||||
relative_path = os.path.join(link_parent_dir, source_path)
|
|
||||||
if os.path.exists(relative_path):
|
|
||||||
# In order to work on windows, the source path needs to be modified to be
|
|
||||||
# relative because hardlink/junction dont resolve relative paths the same
|
|
||||||
# way as os.symlink. This is ignored on other operating systems.
|
|
||||||
win_source_path = relative_path
|
|
||||||
else:
|
|
||||||
raise SymlinkError(
|
raise SymlinkError(
|
||||||
f"The source path ({source_path}) is not relative to the link path "
|
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||||
f"({link_path}). Resulting link would be broken so not making link."
|
f"link would be broken so not making link."
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
# os.symlink can create a link when the given source path is relative to
|
||||||
|
# the link path. Emulate this behavior and check to see if the source exists
|
||||||
|
# relative to the link path ahead of link creation to prevent broken
|
||||||
|
# links from being made.
|
||||||
|
link_parent_dir = os.path.dirname(link_path)
|
||||||
|
relative_path = os.path.join(link_parent_dir, source_path)
|
||||||
|
if os.path.exists(relative_path):
|
||||||
|
# In order to work on windows, the source path needs to be modified to be
|
||||||
|
# relative because hardlink/junction dont resolve relative paths the same
|
||||||
|
# way as os.symlink. This is ignored on other operating systems.
|
||||||
|
win_source_path = relative_path
|
||||||
|
elif not allow_broken_symlinks:
|
||||||
|
raise SymlinkError(
|
||||||
|
f"The source path ({source_path}) is not relative to the link path "
|
||||||
|
f"({link_path}). Resulting link would be broken so not making link."
|
||||||
|
)
|
||||||
|
|
||||||
# Create the symlink
|
# Create the symlink
|
||||||
if not _windows_can_symlink():
|
if sys.platform == "win32" and not _windows_can_symlink():
|
||||||
_windows_create_link(win_source_path, link_path)
|
_windows_create_link(win_source_path, link_path)
|
||||||
else:
|
else:
|
||||||
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
||||||
|
|
||||||
|
|
||||||
def _windows_islink(path: str) -> bool:
|
def islink(path: str) -> bool:
|
||||||
"""Override os.islink to give correct answer for spack logic.
|
"""Override os.islink to give correct answer for spack logic.
|
||||||
|
|
||||||
For Non-Windows: a link can be determined with the os.path.islink method.
|
For Non-Windows: a link can be determined with the os.path.islink method.
|
||||||
@@ -222,9 +247,9 @@ def _windows_create_junction(source: str, link: str):
|
|||||||
out, err = proc.communicate()
|
out, err = proc.communicate()
|
||||||
tty.debug(out.decode())
|
tty.debug(out.decode())
|
||||||
if proc.returncode != 0:
|
if proc.returncode != 0:
|
||||||
err_str = err.decode()
|
err = err.decode()
|
||||||
tty.error(err_str)
|
tty.error(err)
|
||||||
raise SymlinkError("Make junction command returned a non-zero return code.", err_str)
|
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
||||||
|
|
||||||
|
|
||||||
def _windows_create_hard_link(path: str, link: str):
|
def _windows_create_hard_link(path: str, link: str):
|
||||||
@@ -244,14 +269,14 @@ def _windows_create_hard_link(path: str, link: str):
|
|||||||
CreateHardLink(link, path)
|
CreateHardLink(link, path)
|
||||||
|
|
||||||
|
|
||||||
def _windows_readlink(path: str, *, dir_fd=None):
|
def readlink(path: str):
|
||||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||||
if _windows_is_hardlink(path):
|
if _windows_is_hardlink(path):
|
||||||
return _windows_read_hard_link(path)
|
return _windows_read_hard_link(path)
|
||||||
elif _windows_is_junction(path):
|
elif _windows_is_junction(path):
|
||||||
return _windows_read_junction(path)
|
return _windows_read_junction(path)
|
||||||
else:
|
else:
|
||||||
return sanitize_win_longpath(os.readlink(path, dir_fd=dir_fd))
|
return os.readlink(path)
|
||||||
|
|
||||||
|
|
||||||
def _windows_read_hard_link(link: str) -> str:
|
def _windows_read_hard_link(link: str) -> str:
|
||||||
@@ -313,16 +338,6 @@ def resolve_link_target_relative_to_the_link(link):
|
|||||||
return os.path.join(link_dir, target)
|
return os.path.join(link_dir, target)
|
||||||
|
|
||||||
|
|
||||||
if sys.platform == "win32":
|
|
||||||
symlink = _windows_symlink
|
|
||||||
readlink = _windows_readlink
|
|
||||||
islink = _windows_islink
|
|
||||||
else:
|
|
||||||
symlink = os.symlink
|
|
||||||
readlink = os.readlink
|
|
||||||
islink = os.path.islink
|
|
||||||
|
|
||||||
|
|
||||||
class SymlinkError(RuntimeError):
|
class SymlinkError(RuntimeError):
|
||||||
"""Exception class for errors raised while creating symlinks,
|
"""Exception class for errors raised while creating symlinks,
|
||||||
junctions and hard links
|
junctions and hard links
|
||||||
|
|||||||
@@ -59,7 +59,6 @@
|
|||||||
|
|
||||||
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
||||||
"""
|
"""
|
||||||
import os
|
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
@@ -102,29 +101,9 @@ def __init__(self, message):
|
|||||||
# Mapping from color arguments to values for tty.set_color
|
# Mapping from color arguments to values for tty.set_color
|
||||||
color_when_values = {"always": True, "auto": None, "never": False}
|
color_when_values = {"always": True, "auto": None, "never": False}
|
||||||
|
|
||||||
|
# Force color; None: Only color if stdout is a tty
|
||||||
def _color_when_value(when):
|
# True: Always colorize output, False: Never colorize output
|
||||||
"""Raise a ValueError for an invalid color setting.
|
_force_color = None
|
||||||
|
|
||||||
Valid values are 'always', 'never', and 'auto', or equivalently,
|
|
||||||
True, False, and None.
|
|
||||||
"""
|
|
||||||
if when in color_when_values:
|
|
||||||
return color_when_values[when]
|
|
||||||
elif when not in color_when_values.values():
|
|
||||||
raise ValueError("Invalid color setting: %s" % when)
|
|
||||||
return when
|
|
||||||
|
|
||||||
|
|
||||||
def _color_from_environ() -> Optional[bool]:
|
|
||||||
try:
|
|
||||||
return _color_when_value(os.environ.get("SPACK_COLOR", "auto"))
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
#: When `None` colorize when stdout is tty, when `True` or `False` always or never colorize resp.
|
|
||||||
_force_color = _color_from_environ()
|
|
||||||
|
|
||||||
|
|
||||||
def try_enable_terminal_color_on_windows():
|
def try_enable_terminal_color_on_windows():
|
||||||
@@ -185,6 +164,19 @@ def _err_check(result, func, args):
|
|||||||
debug("Unable to support color on Windows terminal")
|
debug("Unable to support color on Windows terminal")
|
||||||
|
|
||||||
|
|
||||||
|
def _color_when_value(when):
|
||||||
|
"""Raise a ValueError for an invalid color setting.
|
||||||
|
|
||||||
|
Valid values are 'always', 'never', and 'auto', or equivalently,
|
||||||
|
True, False, and None.
|
||||||
|
"""
|
||||||
|
if when in color_when_values:
|
||||||
|
return color_when_values[when]
|
||||||
|
elif when not in color_when_values.values():
|
||||||
|
raise ValueError("Invalid color setting: %s" % when)
|
||||||
|
return when
|
||||||
|
|
||||||
|
|
||||||
def get_color_when():
|
def get_color_when():
|
||||||
"""Return whether commands should print color or not."""
|
"""Return whether commands should print color or not."""
|
||||||
if _force_color is not None:
|
if _force_color is not None:
|
||||||
|
|||||||
@@ -33,23 +33,8 @@
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
esc, bell, lbracket, bslash, newline = r"\x1b", r"\x07", r"\[", r"\\", r"\n"
|
|
||||||
# Ansi Control Sequence Introducers (CSI) are a well-defined format
|
|
||||||
# Standard ECMA-48: Control Functions for Character-Imaging I/O Devices, section 5.4
|
|
||||||
# https://www.ecma-international.org/wp-content/uploads/ECMA-48_5th_edition_june_1991.pdf
|
|
||||||
csi_pre = f"{esc}{lbracket}"
|
|
||||||
csi_param, csi_inter, csi_post = r"[0-?]", r"[ -/]", r"[@-~]"
|
|
||||||
ansi_csi = f"{csi_pre}{csi_param}*{csi_inter}*{csi_post}"
|
|
||||||
# General ansi escape sequences have well-defined prefixes,
|
|
||||||
# but content and suffixes are less reliable.
|
|
||||||
# Conservatively assume they end with either "<ESC>\" or "<BELL>",
|
|
||||||
# with no intervening "<ESC>"/"<BELL>" keys or newlines
|
|
||||||
esc_pre = f"{esc}[@-_]"
|
|
||||||
esc_content = f"[^{esc}{bell}{newline}]"
|
|
||||||
esc_post = f"(?:{esc}{bslash}|{bell})"
|
|
||||||
ansi_esc = f"{esc_pre}{esc_content}*{esc_post}"
|
|
||||||
# Use this to strip escape sequences
|
# Use this to strip escape sequences
|
||||||
_escape = re.compile(f"{ansi_csi}|{ansi_esc}")
|
_escape = re.compile(r"\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07")
|
||||||
|
|
||||||
# control characters for enabling/disabling echo
|
# control characters for enabling/disabling echo
|
||||||
#
|
#
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
__version__ = "0.23.0.dev0"
|
__version__ = "0.22.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -254,8 +254,8 @@ def _search_duplicate_specs_in_externals(error_cls):
|
|||||||
|
|
||||||
@config_packages
|
@config_packages
|
||||||
def _deprecated_preferences(error_cls):
|
def _deprecated_preferences(error_cls):
|
||||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
|
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.22)"""
|
||||||
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
|
# TODO (v0.22): remove this audit as the attributes will not be allowed in config
|
||||||
errors = []
|
errors = []
|
||||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||||
|
|
||||||
@@ -421,10 +421,6 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||||
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||||
)
|
)
|
||||||
github_pull_commits_re = (
|
|
||||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
|
||||||
r".+/.+/pull/\d+/commits/[a-fA-F0-9]+\.(?:patch|diff)"
|
|
||||||
)
|
|
||||||
# Only .diff URLs have stable/full hashes:
|
# Only .diff URLs have stable/full hashes:
|
||||||
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
||||||
gitlab_patch_url_re = (
|
gitlab_patch_url_re = (
|
||||||
@@ -440,24 +436,14 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
if not isinstance(patch, spack.patch.UrlPatch):
|
if not isinstance(patch, spack.patch.UrlPatch):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if re.match(github_pull_commits_re, patch.url):
|
if re.match(github_patch_url_re, patch.url):
|
||||||
url = re.sub(r"/pull/\d+/commits/", r"/commit/", patch.url)
|
|
||||||
url = re.sub(r"^(.*)(?<!full_index=1)$", r"\1?full_index=1", url)
|
|
||||||
errors.append(
|
|
||||||
error_cls(
|
|
||||||
f"patch URL in package {pkg_cls.name} "
|
|
||||||
+ "must not be a pull request commit; "
|
|
||||||
+ f"instead use {url}",
|
|
||||||
[patch.url],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
elif re.match(github_patch_url_re, patch.url):
|
|
||||||
full_index_arg = "?full_index=1"
|
full_index_arg = "?full_index=1"
|
||||||
if not patch.url.endswith(full_index_arg):
|
if not patch.url.endswith(full_index_arg):
|
||||||
errors.append(
|
errors.append(
|
||||||
error_cls(
|
error_cls(
|
||||||
f"patch URL in package {pkg_cls.name} "
|
"patch URL in package {0} must end with {1}".format(
|
||||||
+ f"must end with {full_index_arg}",
|
pkg_cls.name, full_index_arg
|
||||||
|
),
|
||||||
[patch.url],
|
[patch.url],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -465,7 +451,9 @@ def _check_patch_urls(pkgs, error_cls):
|
|||||||
if not patch.url.endswith(".diff"):
|
if not patch.url.endswith(".diff"):
|
||||||
errors.append(
|
errors.append(
|
||||||
error_cls(
|
error_cls(
|
||||||
f"patch URL in package {pkg_cls.name} must end with .diff",
|
"patch URL in package {0} must end with .diff".format(
|
||||||
|
pkg_cls.name
|
||||||
|
),
|
||||||
[patch.url],
|
[patch.url],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -791,7 +779,7 @@ def check_virtual_with_variants(spec, msg):
|
|||||||
return
|
return
|
||||||
error = error_cls(
|
error = error_cls(
|
||||||
f"{pkg_name}: {msg}",
|
f"{pkg_name}: {msg}",
|
||||||
[f"remove variants from '{spec}' in depends_on directive in {filename}"],
|
f"remove variants from '{spec}' in depends_on directive in {filename}",
|
||||||
)
|
)
|
||||||
errors.append(error)
|
errors.append(error)
|
||||||
|
|
||||||
|
|||||||
@@ -29,7 +29,6 @@
|
|||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
||||||
from llnl.util.symlink import readlink
|
|
||||||
|
|
||||||
import spack.caches
|
import spack.caches
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
@@ -659,7 +658,7 @@ def get_buildfile_manifest(spec):
|
|||||||
# 2. paths are used as strings.
|
# 2. paths are used as strings.
|
||||||
for rel_path in visitor.symlinks:
|
for rel_path in visitor.symlinks:
|
||||||
abs_path = os.path.join(root, rel_path)
|
abs_path = os.path.join(root, rel_path)
|
||||||
link = readlink(abs_path)
|
link = os.readlink(abs_path)
|
||||||
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
||||||
data["link_to_relocate"].append(rel_path)
|
data["link_to_relocate"].append(rel_path)
|
||||||
|
|
||||||
@@ -2002,7 +2001,6 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
|||||||
with spack.util.path.filter_padding():
|
with spack.util.path.filter_padding():
|
||||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||||
extract_tarball(spec, download_result, force)
|
extract_tarball(spec, download_result, force)
|
||||||
spec.package.windows_establish_runtime_linkage()
|
|
||||||
spack.hooks.post_install(spec, False)
|
spack.hooks.post_install(spec, False)
|
||||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||||
|
|
||||||
|
|||||||
@@ -5,13 +5,7 @@
|
|||||||
"""Function and classes needed to bootstrap Spack itself."""
|
"""Function and classes needed to bootstrap Spack itself."""
|
||||||
|
|
||||||
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
|
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
|
||||||
from .core import (
|
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
||||||
all_core_root_specs,
|
|
||||||
ensure_clingo_importable_or_raise,
|
|
||||||
ensure_core_dependencies,
|
|
||||||
ensure_gpg_in_path_or_raise,
|
|
||||||
ensure_patchelf_in_path_or_raise,
|
|
||||||
)
|
|
||||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||||
from .status import status_message
|
from .status import status_message
|
||||||
|
|
||||||
@@ -19,8 +13,6 @@
|
|||||||
"is_bootstrapping",
|
"is_bootstrapping",
|
||||||
"ensure_bootstrap_configuration",
|
"ensure_bootstrap_configuration",
|
||||||
"ensure_core_dependencies",
|
"ensure_core_dependencies",
|
||||||
"ensure_gpg_in_path_or_raise",
|
|
||||||
"ensure_clingo_importable_or_raise",
|
|
||||||
"ensure_patchelf_in_path_or_raise",
|
"ensure_patchelf_in_path_or_raise",
|
||||||
"all_core_root_specs",
|
"all_core_root_specs",
|
||||||
"ensure_environment_dependencies",
|
"ensure_environment_dependencies",
|
||||||
|
|||||||
@@ -54,14 +54,10 @@ def _try_import_from_store(
|
|||||||
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
||||||
|
|
||||||
for candidate_spec in installed_specs:
|
for candidate_spec in installed_specs:
|
||||||
# previously bootstrapped specs may not have a python-venv dependency.
|
pkg = candidate_spec["python"].package
|
||||||
if candidate_spec.dependencies("python-venv"):
|
|
||||||
python, *_ = candidate_spec.dependencies("python-venv")
|
|
||||||
else:
|
|
||||||
python, *_ = candidate_spec.dependencies("python")
|
|
||||||
module_paths = [
|
module_paths = [
|
||||||
os.path.join(candidate_spec.prefix, python.package.purelib),
|
os.path.join(candidate_spec.prefix, pkg.purelib),
|
||||||
os.path.join(candidate_spec.prefix, python.package.platlib),
|
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||||
]
|
]
|
||||||
path_before = list(sys.path)
|
path_before = list(sys.path)
|
||||||
|
|
||||||
@@ -213,18 +209,15 @@ def _root_spec(spec_str: str) -> str:
|
|||||||
Args:
|
Args:
|
||||||
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
spec_str: spec to be bootstrapped. Must be without compiler and target.
|
||||||
"""
|
"""
|
||||||
# Add a compiler and platform requirement to the root spec.
|
# Add a compiler requirement to the root spec.
|
||||||
platform = str(spack.platforms.host())
|
platform = str(spack.platforms.host())
|
||||||
|
|
||||||
if platform == "darwin":
|
if platform == "darwin":
|
||||||
spec_str += " %apple-clang"
|
spec_str += " %apple-clang"
|
||||||
elif platform == "windows":
|
|
||||||
spec_str += " %msvc"
|
|
||||||
elif platform == "linux":
|
elif platform == "linux":
|
||||||
spec_str += " %gcc"
|
spec_str += " %gcc"
|
||||||
elif platform == "freebsd":
|
elif platform == "freebsd":
|
||||||
spec_str += " %clang"
|
spec_str += " %clang"
|
||||||
spec_str += f" platform={platform}"
|
|
||||||
target = archspec.cpu.host().family
|
target = archspec.cpu.host().family
|
||||||
spec_str += f" target={target}"
|
spec_str += f" target={target}"
|
||||||
|
|
||||||
|
|||||||
@@ -129,10 +129,10 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
|||||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||||
for name, path in configuration_paths:
|
for name, path in configuration_paths:
|
||||||
platform = spack.platforms.host().name
|
platform = spack.platforms.host().name
|
||||||
platform_scope = spack.config.DirectoryConfigScope(
|
platform_scope = spack.config.ConfigScope(
|
||||||
f"{name}/{platform}", os.path.join(path, platform)
|
"/".join([name, platform]), os.path.join(path, platform)
|
||||||
)
|
)
|
||||||
generic_scope = spack.config.DirectoryConfigScope(name, path)
|
generic_scope = spack.config.ConfigScope(name, path)
|
||||||
config_scopes.extend([generic_scope, platform_scope])
|
config_scopes.extend([generic_scope, platform_scope])
|
||||||
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
|
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
|
||||||
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
||||||
|
|||||||
@@ -270,6 +270,10 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
with spack_python_interpreter():
|
with spack_python_interpreter():
|
||||||
# Add hint to use frontend operating system on Cray
|
# Add hint to use frontend operating system on Cray
|
||||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||||
|
# This is needed to help the old concretizer taking the `setuptools` dependency
|
||||||
|
# only when bootstrapping from sources on Python 3.12
|
||||||
|
if spec_for_current_python() == "python@3.12":
|
||||||
|
concrete_spec.constrain("+force_setuptools")
|
||||||
|
|
||||||
if module == "clingo":
|
if module == "clingo":
|
||||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||||
|
|||||||
@@ -3,11 +3,13 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""Bootstrap non-core Spack dependencies from an environment."""
|
"""Bootstrap non-core Spack dependencies from an environment."""
|
||||||
|
import glob
|
||||||
import hashlib
|
import hashlib
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import sys
|
import sys
|
||||||
from typing import Iterable, List
|
import warnings
|
||||||
|
from typing import List
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -26,16 +28,6 @@
|
|||||||
class BootstrapEnvironment(spack.environment.Environment):
|
class BootstrapEnvironment(spack.environment.Environment):
|
||||||
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
|
||||||
if not self.spack_yaml().exists():
|
|
||||||
self._write_spack_yaml_file()
|
|
||||||
super().__init__(self.environment_root())
|
|
||||||
|
|
||||||
# Remove python package roots created before python-venv was introduced
|
|
||||||
for s in self.concrete_roots():
|
|
||||||
if "python" in s.package.extendees and not s.dependencies("python-venv"):
|
|
||||||
self.deconcretize(s)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def spack_dev_requirements(cls) -> List[str]:
|
def spack_dev_requirements(cls) -> List[str]:
|
||||||
"""Spack development requirements"""
|
"""Spack development requirements"""
|
||||||
@@ -67,19 +59,31 @@ def view_root(cls) -> pathlib.Path:
|
|||||||
return cls.environment_root().joinpath("view")
|
return cls.environment_root().joinpath("view")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def bin_dir(cls) -> pathlib.Path:
|
def pythonpaths(cls) -> List[str]:
|
||||||
"""Paths to be added to PATH"""
|
"""Paths to be added to sys.path or PYTHONPATH"""
|
||||||
return cls.view_root().joinpath("bin")
|
python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}"
|
||||||
|
glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**"))
|
||||||
|
result = glob.glob(glob_expr)
|
||||||
|
if not result:
|
||||||
|
msg = f"Cannot find any Python path in {cls.view_root()}"
|
||||||
|
warnings.warn(msg)
|
||||||
|
return result
|
||||||
|
|
||||||
def python_dirs(self) -> Iterable[pathlib.Path]:
|
@classmethod
|
||||||
python = next(s for s in self.all_specs_generator() if s.name == "python-venv").package
|
def bin_dirs(cls) -> List[pathlib.Path]:
|
||||||
return {self.view_root().joinpath(p) for p in (python.platlib, python.purelib)}
|
"""Paths to be added to PATH"""
|
||||||
|
return [cls.view_root().joinpath("bin")]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def spack_yaml(cls) -> pathlib.Path:
|
def spack_yaml(cls) -> pathlib.Path:
|
||||||
"""Environment spack.yaml file"""
|
"""Environment spack.yaml file"""
|
||||||
return cls.environment_root().joinpath("spack.yaml")
|
return cls.environment_root().joinpath("spack.yaml")
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
if not self.spack_yaml().exists():
|
||||||
|
self._write_spack_yaml_file()
|
||||||
|
super().__init__(self.environment_root())
|
||||||
|
|
||||||
def update_installations(self) -> None:
|
def update_installations(self) -> None:
|
||||||
"""Update the installations of this environment."""
|
"""Update the installations of this environment."""
|
||||||
log_enabled = tty.is_debug() or tty.is_verbose()
|
log_enabled = tty.is_debug() or tty.is_verbose()
|
||||||
@@ -96,13 +100,21 @@ def update_installations(self) -> None:
|
|||||||
self.install_all()
|
self.install_all()
|
||||||
self.write(regenerate=True)
|
self.write(regenerate=True)
|
||||||
|
|
||||||
def load(self) -> None:
|
def update_syspath_and_environ(self) -> None:
|
||||||
"""Update PATH and sys.path."""
|
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
||||||
# Make executables available (shouldn't need PYTHONPATH)
|
the environment view.
|
||||||
os.environ["PATH"] = f"{self.bin_dir()}{os.pathsep}{os.environ.get('PATH', '')}"
|
"""
|
||||||
|
# Do minimal modifications to sys.path and environment variables. In particular, pay
|
||||||
# Spack itself imports pytest
|
# attention to have the smallest PYTHONPATH / sys.path possible, since that may impact
|
||||||
sys.path.extend(str(p) for p in self.python_dirs())
|
# the performance of the current interpreter
|
||||||
|
sys.path.extend(self.pythonpaths())
|
||||||
|
os.environ["PATH"] = os.pathsep.join(
|
||||||
|
[str(x) for x in self.bin_dirs()] + os.environ.get("PATH", "").split(os.pathsep)
|
||||||
|
)
|
||||||
|
os.environ["PYTHONPATH"] = os.pathsep.join(
|
||||||
|
os.environ.get("PYTHONPATH", "").split(os.pathsep)
|
||||||
|
+ [str(x) for x in self.pythonpaths()]
|
||||||
|
)
|
||||||
|
|
||||||
def _write_spack_yaml_file(self) -> None:
|
def _write_spack_yaml_file(self) -> None:
|
||||||
tty.msg(
|
tty.msg(
|
||||||
@@ -152,4 +164,4 @@ def ensure_environment_dependencies() -> None:
|
|||||||
_add_externals_if_missing()
|
_add_externals_if_missing()
|
||||||
with BootstrapEnvironment() as env:
|
with BootstrapEnvironment() as env:
|
||||||
env.update_installations()
|
env.update_installations()
|
||||||
env.load()
|
env.update_syspath_and_environ()
|
||||||
|
|||||||
@@ -43,7 +43,7 @@
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from enum import Flag, auto
|
from enum import Flag, auto
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from typing import Dict, List, Set, Tuple
|
from typing import List, Set, Tuple
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.string import plural
|
from llnl.string import plural
|
||||||
@@ -72,7 +72,6 @@
|
|||||||
import spack.store
|
import spack.store
|
||||||
import spack.subprocess_context
|
import spack.subprocess_context
|
||||||
import spack.user_environment
|
import spack.user_environment
|
||||||
import spack.util.executable
|
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.pattern
|
import spack.util.pattern
|
||||||
from spack import traverse
|
from spack import traverse
|
||||||
@@ -92,7 +91,7 @@
|
|||||||
)
|
)
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
from spack.util.log_parse import make_log_context, parse_log_events
|
from spack.util.log_parse import make_log_context, parse_log_events
|
||||||
from spack.util.module_cmd import load_module, path_from_modules
|
from spack.util.module_cmd import load_module, module, path_from_modules
|
||||||
|
|
||||||
#
|
#
|
||||||
# This can be set by the user to globally disable parallel builds.
|
# This can be set by the user to globally disable parallel builds.
|
||||||
@@ -191,6 +190,14 @@ def __call__(self, *args, **kwargs):
|
|||||||
return super().__call__(*args, **kwargs)
|
return super().__call__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def _on_cray():
|
||||||
|
host_platform = spack.platforms.host()
|
||||||
|
host_os = host_platform.operating_system("default_os")
|
||||||
|
on_cray = str(host_platform) == "cray"
|
||||||
|
using_cnl = re.match(r"cnl\d+", str(host_os))
|
||||||
|
return on_cray, using_cnl
|
||||||
|
|
||||||
|
|
||||||
def clean_environment():
|
def clean_environment():
|
||||||
# Stuff in here sanitizes the build environment to eliminate
|
# Stuff in here sanitizes the build environment to eliminate
|
||||||
# anything the user has set that may interfere. We apply it immediately
|
# anything the user has set that may interfere. We apply it immediately
|
||||||
@@ -234,6 +241,17 @@ def clean_environment():
|
|||||||
if varname.endswith("_ROOT") and varname != "SPACK_ROOT":
|
if varname.endswith("_ROOT") and varname != "SPACK_ROOT":
|
||||||
env.unset(varname)
|
env.unset(varname)
|
||||||
|
|
||||||
|
# On Cray "cluster" systems, unset CRAY_LD_LIBRARY_PATH to avoid
|
||||||
|
# interference with Spack dependencies.
|
||||||
|
# CNL requires these variables to be set (or at least some of them,
|
||||||
|
# depending on the CNL version).
|
||||||
|
on_cray, using_cnl = _on_cray()
|
||||||
|
if on_cray and not using_cnl:
|
||||||
|
env.unset("CRAY_LD_LIBRARY_PATH")
|
||||||
|
for varname in os.environ.keys():
|
||||||
|
if "PKGCONF" in varname:
|
||||||
|
env.unset(varname)
|
||||||
|
|
||||||
# Unset the following variables because they can affect installation of
|
# Unset the following variables because they can affect installation of
|
||||||
# Autotools and CMake packages.
|
# Autotools and CMake packages.
|
||||||
build_system_vars = [
|
build_system_vars = [
|
||||||
@@ -363,7 +381,11 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
_add_werror_handling(keep_werror, env)
|
_add_werror_handling(keep_werror, env)
|
||||||
|
|
||||||
# Set the target parameters that the compiler will add
|
# Set the target parameters that the compiler will add
|
||||||
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
# Don't set on cray platform because the targeting module handles this
|
||||||
|
if spec.satisfies("platform=cray"):
|
||||||
|
isa_arg = ""
|
||||||
|
else:
|
||||||
|
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
||||||
env.set("SPACK_TARGET_ARGS", isa_arg)
|
env.set("SPACK_TARGET_ARGS", isa_arg)
|
||||||
|
|
||||||
# Trap spack-tracked compiler flags as appropriate.
|
# Trap spack-tracked compiler flags as appropriate.
|
||||||
@@ -459,7 +481,10 @@ def set_wrapper_variables(pkg, env):
|
|||||||
|
|
||||||
# Find ccache binary and hand it to build environment
|
# Find ccache binary and hand it to build environment
|
||||||
if spack.config.get("config:ccache"):
|
if spack.config.get("config:ccache"):
|
||||||
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
|
ccache = Executable("ccache")
|
||||||
|
if not ccache:
|
||||||
|
raise RuntimeError("No ccache binary found in PATH")
|
||||||
|
env.set(SPACK_CCACHE_BINARY, ccache)
|
||||||
|
|
||||||
# Gather information about various types of dependencies
|
# Gather information about various types of dependencies
|
||||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||||
@@ -705,28 +730,12 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwa
|
|||||||
return compiler(*compiler_args, output=compiler_output)
|
return compiler(*compiler_args, output=compiler_output)
|
||||||
|
|
||||||
|
|
||||||
def _get_rpath_deps_from_spec(
|
def get_rpath_deps(pkg):
|
||||||
spec: spack.spec.Spec, transitive_rpaths: bool
|
"""Return immediate or transitive RPATHs depending on the package."""
|
||||||
) -> List[spack.spec.Spec]:
|
if pkg.transitive_rpaths:
|
||||||
if not transitive_rpaths:
|
return [d for d in pkg.spec.traverse(root=False, deptype=("link"))]
|
||||||
return spec.dependencies(deptype=dt.LINK)
|
else:
|
||||||
|
return pkg.spec.dependencies(deptype="link")
|
||||||
by_name: Dict[str, spack.spec.Spec] = {}
|
|
||||||
|
|
||||||
for dep in spec.traverse(root=False, deptype=dt.LINK):
|
|
||||||
lookup = by_name.get(dep.name)
|
|
||||||
if lookup is None:
|
|
||||||
by_name[dep.name] = dep
|
|
||||||
elif lookup.version < dep.version:
|
|
||||||
by_name[dep.name] = dep
|
|
||||||
|
|
||||||
return list(by_name.values())
|
|
||||||
|
|
||||||
|
|
||||||
def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]:
|
|
||||||
"""Return immediate or transitive dependencies (depending on the package) that need to be
|
|
||||||
rpath'ed. If a package occurs multiple times, the newest version is kept."""
|
|
||||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
|
||||||
|
|
||||||
|
|
||||||
def get_rpaths(pkg):
|
def get_rpaths(pkg):
|
||||||
@@ -738,9 +747,7 @@ def get_rpaths(pkg):
|
|||||||
# Second module is our compiler mod name. We use that to get rpaths from
|
# Second module is our compiler mod name. We use that to get rpaths from
|
||||||
# module show output.
|
# module show output.
|
||||||
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
|
||||||
mod_rpath = path_from_modules([pkg.compiler.modules[1]])
|
rpaths.append(path_from_modules([pkg.compiler.modules[1]]))
|
||||||
if mod_rpath:
|
|
||||||
rpaths.append(mod_rpath)
|
|
||||||
return list(dedupe(filter_system_paths(rpaths)))
|
return list(dedupe(filter_system_paths(rpaths)))
|
||||||
|
|
||||||
|
|
||||||
@@ -810,6 +817,14 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
for mod in pkg.compiler.modules:
|
for mod in pkg.compiler.modules:
|
||||||
load_module(mod)
|
load_module(mod)
|
||||||
|
|
||||||
|
# kludge to handle cray mpich and libsci being automatically loaded by
|
||||||
|
# PrgEnv modules on cray platform. Module unload does no damage when
|
||||||
|
# unnecessary
|
||||||
|
on_cray, _ = _on_cray()
|
||||||
|
if on_cray and not dirty:
|
||||||
|
for mod in ["cray-mpich", "cray-libsci"]:
|
||||||
|
module("unload", mod)
|
||||||
|
|
||||||
if target and target.module_name:
|
if target and target.module_name:
|
||||||
load_module(target.module_name)
|
load_module(target.module_name)
|
||||||
|
|
||||||
@@ -1473,7 +1488,7 @@ def long_message(self):
|
|||||||
out.write(" {0}\n".format(self.log_name))
|
out.write(" {0}\n".format(self.log_name))
|
||||||
|
|
||||||
# Also output the test log path IF it exists
|
# Also output the test log path IF it exists
|
||||||
if self.context != "test" and have_log:
|
if self.context != "test":
|
||||||
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
test_log = join_path(os.path.dirname(self.log_name), spack_install_test_log)
|
||||||
if os.path.isfile(test_log):
|
if os.path.isfile(test_log):
|
||||||
out.write("\nSee test log for details:\n")
|
out.write("\nSee test log for details:\n")
|
||||||
|
|||||||
@@ -162,9 +162,7 @@ def initconfig_compiler_entries(self):
|
|||||||
ld_flags = " ".join(flags["ldflags"])
|
ld_flags = " ".join(flags["ldflags"])
|
||||||
ld_format_string = "CMAKE_{0}_LINKER_FLAGS"
|
ld_format_string = "CMAKE_{0}_LINKER_FLAGS"
|
||||||
# CMake has separate linker arguments for types of builds.
|
# CMake has separate linker arguments for types of builds.
|
||||||
# 'ldflags' should not be used with CMAKE_STATIC_LINKER_FLAGS which
|
for ld_type in ["EXE", "MODULE", "SHARED", "STATIC"]:
|
||||||
# is used by the archiver, so don't include "STATIC" in this loop:
|
|
||||||
for ld_type in ["EXE", "MODULE", "SHARED"]:
|
|
||||||
ld_string = ld_format_string.format(ld_type)
|
ld_string = ld_format_string.format(ld_type)
|
||||||
entries.append(cmake_cache_string(ld_string, ld_flags))
|
entries.append(cmake_cache_string(ld_string, ld_flags))
|
||||||
|
|
||||||
|
|||||||
@@ -39,11 +39,16 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
|
|||||||
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
|
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
|
||||||
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
|
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
|
||||||
``find_python_hints`` for context."""
|
``find_python_hints`` for context."""
|
||||||
if not getattr(pkg, "find_python_hints", False) or not pkg.spec.dependencies(
|
if not getattr(pkg, "find_python_hints", False):
|
||||||
"python", dt.BUILD | dt.LINK
|
|
||||||
):
|
|
||||||
return
|
return
|
||||||
python_executable = pkg.spec["python"].command.path
|
pythons = pkg.spec.dependencies("python", dt.BUILD | dt.LINK)
|
||||||
|
if len(pythons) != 1:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
python_executable = pythons[0].package.command.path
|
||||||
|
except RuntimeError:
|
||||||
|
return
|
||||||
|
|
||||||
args.extend(
|
args.extend(
|
||||||
[
|
[
|
||||||
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
||||||
|
|||||||
@@ -1,144 +0,0 @@
|
|||||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
import itertools
|
|
||||||
import os
|
|
||||||
import pathlib
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from typing import Dict, List, Sequence, Tuple, Union
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
from llnl.util.lang import classproperty
|
|
||||||
|
|
||||||
import spack.compiler
|
|
||||||
import spack.package_base
|
|
||||||
|
|
||||||
# Local "type" for type hints
|
|
||||||
Path = Union[str, pathlib.Path]
|
|
||||||
|
|
||||||
|
|
||||||
class CompilerPackage(spack.package_base.PackageBase):
|
|
||||||
"""A Package mixin for all common logic for packages that implement compilers"""
|
|
||||||
|
|
||||||
# TODO: how do these play nicely with other tags
|
|
||||||
tags: Sequence[str] = ["compiler"]
|
|
||||||
|
|
||||||
#: Optional suffix regexes for searching for this type of compiler.
|
|
||||||
#: Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
|
|
||||||
#: version suffix for gcc.
|
|
||||||
compiler_suffixes: List[str] = [r"-.*"]
|
|
||||||
|
|
||||||
#: Optional prefix regexes for searching for this compiler
|
|
||||||
compiler_prefixes: List[str] = []
|
|
||||||
|
|
||||||
#: Compiler argument(s) that produces version information
|
|
||||||
#: If multiple arguments, the earlier arguments must produce errors when invalid
|
|
||||||
compiler_version_argument: Union[str, Tuple[str]] = "-dumpversion"
|
|
||||||
|
|
||||||
#: Regex used to extract version from compiler's output
|
|
||||||
compiler_version_regex: str = "(.*)"
|
|
||||||
|
|
||||||
#: Static definition of languages supported by this class
|
|
||||||
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
|
||||||
|
|
||||||
def __init__(self, spec: "spack.spec.Spec"):
|
|
||||||
super().__init__(spec)
|
|
||||||
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
|
||||||
msg += f" supports: {self.supported_languages}, valid values: {self.compiler_languages}"
|
|
||||||
assert set(self.supported_languages) <= set(self.compiler_languages), msg
|
|
||||||
|
|
||||||
@property
|
|
||||||
def supported_languages(self) -> Sequence[str]:
|
|
||||||
"""Dynamic definition of languages supported by this package"""
|
|
||||||
return self.compiler_languages
|
|
||||||
|
|
||||||
@classproperty
|
|
||||||
def compiler_names(cls) -> Sequence[str]:
|
|
||||||
"""Construct list of compiler names from per-language names"""
|
|
||||||
names = []
|
|
||||||
for language in cls.compiler_languages:
|
|
||||||
names.extend(getattr(cls, f"{language}_names"))
|
|
||||||
return names
|
|
||||||
|
|
||||||
@classproperty
|
|
||||||
def executables(cls) -> Sequence[str]:
|
|
||||||
"""Construct executables for external detection from names, prefixes, and suffixes."""
|
|
||||||
regexp_fmt = r"^({0}){1}({2})$"
|
|
||||||
prefixes = [""] + cls.compiler_prefixes
|
|
||||||
suffixes = [""] + cls.compiler_suffixes
|
|
||||||
if sys.platform == "win32":
|
|
||||||
ext = r"\.(?:exe|bat)"
|
|
||||||
suffixes += [suf + ext for suf in suffixes]
|
|
||||||
return [
|
|
||||||
regexp_fmt.format(prefix, re.escape(name), suffix)
|
|
||||||
for prefix, name, suffix in itertools.product(prefixes, cls.compiler_names, suffixes)
|
|
||||||
]
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def determine_version(cls, exe: Path):
|
|
||||||
version_argument = cls.compiler_version_argument
|
|
||||||
if isinstance(version_argument, str):
|
|
||||||
version_argument = (version_argument,)
|
|
||||||
|
|
||||||
for va in version_argument:
|
|
||||||
try:
|
|
||||||
output = spack.compiler.get_compiler_version_output(exe, va)
|
|
||||||
match = re.search(cls.compiler_version_regex, output)
|
|
||||||
if match:
|
|
||||||
return ".".join(match.groups())
|
|
||||||
except spack.util.executable.ProcessError:
|
|
||||||
pass
|
|
||||||
except Exception as e:
|
|
||||||
tty.debug(
|
|
||||||
f"[{__file__}] Cannot detect a valid version for the executable "
|
|
||||||
f"{str(exe)}, for package '{cls.name}': {e}"
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def compiler_bindir(cls, prefix: Path) -> Path:
|
|
||||||
"""Overridable method for the location of the compiler bindir within the preifx"""
|
|
||||||
return os.path.join(prefix, "bin")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
|
|
||||||
"""Compute the paths to compiler executables associated with this package
|
|
||||||
|
|
||||||
This is a helper method for ``determine_variants`` to compute the ``extra_attributes``
|
|
||||||
to include with each spec object."""
|
|
||||||
# There are often at least two copies (not symlinks) of each compiler executable in the
|
|
||||||
# same directory: one with a canonical name, e.g. "gfortran", and another one with the
|
|
||||||
# target prefix, e.g. "x86_64-pc-linux-gnu-gfortran". There also might be a copy of "gcc"
|
|
||||||
# with the version suffix, e.g. "x86_64-pc-linux-gnu-gcc-6.3.0". To ensure the consistency
|
|
||||||
# of values in the "paths" dictionary (i.e. we prefer all of them to reference copies
|
|
||||||
# with canonical names if possible), we iterate over the executables in the reversed sorted
|
|
||||||
# order:
|
|
||||||
# First pass over languages identifies exes that are perfect matches for canonical names
|
|
||||||
# Second pass checks for names with prefix/suffix
|
|
||||||
# Second pass is sorted by language name length because longer named languages
|
|
||||||
# e.g. cxx can often contain the names of shorter named languages
|
|
||||||
# e.g. c (e.g. clang/clang++)
|
|
||||||
paths = {}
|
|
||||||
exes = sorted(exes, reverse=True)
|
|
||||||
languages = {
|
|
||||||
lang: getattr(cls, f"{lang}_names")
|
|
||||||
for lang in sorted(cls.compiler_languages, key=len, reverse=True)
|
|
||||||
}
|
|
||||||
for exe in exes:
|
|
||||||
for lang, names in languages.items():
|
|
||||||
if os.path.basename(exe) in names:
|
|
||||||
paths[lang] = exe
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
for lang, names in languages.items():
|
|
||||||
if any(name in os.path.basename(exe) for name in names):
|
|
||||||
paths[lang] = exe
|
|
||||||
break
|
|
||||||
|
|
||||||
return paths
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
|
||||||
# path determination is separated so it can be reused in subclasses
|
|
||||||
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
|
|
||||||
@@ -110,8 +110,9 @@ def cuda_flags(arch_list):
|
|||||||
# From the NVIDIA install guide we know of conflicts for particular
|
# From the NVIDIA install guide we know of conflicts for particular
|
||||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||||
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
||||||
# platform=linux, since they may apply to platform=darwin. We currently
|
# platform=linux, since they should also apply to platform=cray, and may
|
||||||
# do not provide conflicts for platform=darwin with %apple-clang.
|
# apply to platform=darwin. We currently do not provide conflicts for
|
||||||
|
# platform=darwin with %apple-clang.
|
||||||
|
|
||||||
# Linux x86_64 compiler conflicts from here:
|
# Linux x86_64 compiler conflicts from here:
|
||||||
# https://gist.github.com/ax3l/9489132
|
# https://gist.github.com/ax3l/9489132
|
||||||
@@ -124,8 +125,6 @@ def cuda_flags(arch_list):
|
|||||||
# minimum supported versions
|
# minimum supported versions
|
||||||
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
|
conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
|
||||||
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
|
conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
|
||||||
conflicts("%gcc@:7.2", when="+cuda ^cuda@12.4:")
|
|
||||||
conflicts("%clang@:6", when="+cuda ^cuda@12.2:")
|
|
||||||
|
|
||||||
# maximum supported version
|
# maximum supported version
|
||||||
# NOTE:
|
# NOTE:
|
||||||
@@ -138,14 +137,11 @@ def cuda_flags(arch_list):
|
|||||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.5")
|
|
||||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||||
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
|
||||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
conflicts("%clang@16:", when="+cuda ^cuda@:12.3")
|
||||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
|
||||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
|
||||||
|
|
||||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||||
@@ -213,16 +209,12 @@ def cuda_flags(arch_list):
|
|||||||
conflicts("%intel@19.0:", when="+cuda ^cuda@:10.0")
|
conflicts("%intel@19.0:", when="+cuda ^cuda@:10.0")
|
||||||
conflicts("%intel@19.1:", when="+cuda ^cuda@:10.1")
|
conflicts("%intel@19.1:", when="+cuda ^cuda@:10.1")
|
||||||
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
|
conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
|
||||||
conflicts("%intel@2021:", when="+cuda ^cuda@:11.4.0")
|
|
||||||
|
|
||||||
# XL is mostly relevant for ppc64le Linux
|
# XL is mostly relevant for ppc64le Linux
|
||||||
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
|
conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
|
||||||
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
|
conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
|
||||||
conflicts("%xl@:12,17:", when="+cuda ^cuda@:11.1.0")
|
conflicts("%xl@:12,17:", when="+cuda ^cuda@:11.1.0")
|
||||||
|
|
||||||
# PowerPC.
|
|
||||||
conflicts("target=ppc64le", when="+cuda ^cuda@12.5:")
|
|
||||||
|
|
||||||
# Darwin.
|
# Darwin.
|
||||||
# TODO: add missing conflicts for %apple-clang cuda@:10
|
# TODO: add missing conflicts for %apple-clang cuda@:10
|
||||||
conflicts("platform=darwin", when="+cuda ^cuda@11.0.2:")
|
conflicts("platform=darwin", when="+cuda ^cuda@11.0.2: ")
|
||||||
|
|||||||
@@ -846,7 +846,6 @@ def scalapack_libs(self):
|
|||||||
"^mpich@2:" in spec_root
|
"^mpich@2:" in spec_root
|
||||||
or "^cray-mpich" in spec_root
|
or "^cray-mpich" in spec_root
|
||||||
or "^mvapich2" in spec_root
|
or "^mvapich2" in spec_root
|
||||||
or "^mvapich" in spec_root
|
|
||||||
or "^intel-mpi" in spec_root
|
or "^intel-mpi" in spec_root
|
||||||
or "^intel-oneapi-mpi" in spec_root
|
or "^intel-oneapi-mpi" in spec_root
|
||||||
or "^intel-parallel-studio" in spec_root
|
or "^intel-parallel-studio" in spec_root
|
||||||
@@ -937,15 +936,32 @@ def mpi_setup_dependent_build_environment(self, env, dependent_spec, compilers_o
|
|||||||
"I_MPI_ROOT": self.normalize_path("mpi"),
|
"I_MPI_ROOT": self.normalize_path("mpi"),
|
||||||
}
|
}
|
||||||
|
|
||||||
compiler_wrapper_commands = self.mpi_compiler_wrappers
|
# CAUTION - SIMILAR code in:
|
||||||
wrapper_vars.update(
|
# var/spack/repos/builtin/packages/mpich/package.py
|
||||||
{
|
# var/spack/repos/builtin/packages/openmpi/package.py
|
||||||
"MPICC": compiler_wrapper_commands["MPICC"],
|
# var/spack/repos/builtin/packages/mvapich2/package.py
|
||||||
"MPICXX": compiler_wrapper_commands["MPICXX"],
|
#
|
||||||
"MPIF77": compiler_wrapper_commands["MPIF77"],
|
# On Cray, the regular compiler wrappers *are* the MPI wrappers.
|
||||||
"MPIF90": compiler_wrapper_commands["MPIF90"],
|
if "platform=cray" in self.spec:
|
||||||
}
|
# TODO: Confirm
|
||||||
)
|
wrapper_vars.update(
|
||||||
|
{
|
||||||
|
"MPICC": compilers_of_client["CC"],
|
||||||
|
"MPICXX": compilers_of_client["CXX"],
|
||||||
|
"MPIF77": compilers_of_client["F77"],
|
||||||
|
"MPIF90": compilers_of_client["F90"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
compiler_wrapper_commands = self.mpi_compiler_wrappers
|
||||||
|
wrapper_vars.update(
|
||||||
|
{
|
||||||
|
"MPICC": compiler_wrapper_commands["MPICC"],
|
||||||
|
"MPICXX": compiler_wrapper_commands["MPICXX"],
|
||||||
|
"MPIF77": compiler_wrapper_commands["MPIF77"],
|
||||||
|
"MPIF90": compiler_wrapper_commands["MPIF90"],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
# Ensure that the directory containing the compiler wrappers is in the
|
# Ensure that the directory containing the compiler wrappers is in the
|
||||||
# PATH. Spack packages add `prefix.bin` to their dependents' paths,
|
# PATH. Spack packages add `prefix.bin` to their dependents' paths,
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ class MSBuildPackage(spack.package_base.PackageBase):
|
|||||||
build_system("msbuild")
|
build_system("msbuild")
|
||||||
conflicts("platform=linux", when="build_system=msbuild")
|
conflicts("platform=linux", when="build_system=msbuild")
|
||||||
conflicts("platform=darwin", when="build_system=msbuild")
|
conflicts("platform=darwin", when="build_system=msbuild")
|
||||||
|
conflicts("platform=cray", when="build_system=msbuild")
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("msbuild")
|
@spack.builder.builder("msbuild")
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ class NMakePackage(spack.package_base.PackageBase):
|
|||||||
build_system("nmake")
|
build_system("nmake")
|
||||||
conflicts("platform=linux", when="build_system=nmake")
|
conflicts("platform=linux", when="build_system=nmake")
|
||||||
conflicts("platform=darwin", when="build_system=nmake")
|
conflicts("platform=darwin", when="build_system=nmake")
|
||||||
|
conflicts("platform=cray", when="build_system=nmake")
|
||||||
|
|
||||||
|
|
||||||
@spack.builder.builder("nmake")
|
@spack.builder.builder("nmake")
|
||||||
@@ -144,7 +145,7 @@ def install(self, pkg, spec, prefix):
|
|||||||
opts += self.nmake_install_args()
|
opts += self.nmake_install_args()
|
||||||
if self.makefile_name:
|
if self.makefile_name:
|
||||||
opts.append("/F{}".format(self.makefile_name))
|
opts.append("/F{}".format(self.makefile_name))
|
||||||
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
opts.append(self.define("PREFIX", prefix))
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).nmake(
|
inspect.getmodule(self.pkg).nmake(
|
||||||
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
||||||
|
|||||||
@@ -36,8 +36,9 @@ class IntelOneApiPackage(Package):
|
|||||||
"target=ppc64:",
|
"target=ppc64:",
|
||||||
"target=ppc64le:",
|
"target=ppc64le:",
|
||||||
"target=aarch64:",
|
"target=aarch64:",
|
||||||
"platform=darwin",
|
"platform=darwin:",
|
||||||
"platform=windows",
|
"platform=cray:",
|
||||||
|
"platform=windows:",
|
||||||
]:
|
]:
|
||||||
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
||||||
|
|
||||||
|
|||||||
@@ -138,21 +138,16 @@ def view_file_conflicts(self, view, merge_map):
|
|||||||
return conflicts
|
return conflicts
|
||||||
|
|
||||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||||
# Patch up shebangs if the package extends Python and we put a Python interpreter in the
|
# Patch up shebangs to the python linked in the view only if python is built by Spack.
|
||||||
# view.
|
if not self.extendee_spec or self.extendee_spec.external:
|
||||||
if not self.extendee_spec:
|
|
||||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
|
||||||
|
|
||||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
|
||||||
|
|
||||||
if python.external:
|
|
||||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||||
|
|
||||||
# We only patch shebangs in the bin directory.
|
# We only patch shebangs in the bin directory.
|
||||||
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
|
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
|
||||||
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
|
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
|
||||||
bin_dir = self.spec.prefix.bin
|
|
||||||
|
|
||||||
|
bin_dir = self.spec.prefix.bin
|
||||||
|
python_prefix = self.extendee_spec.prefix
|
||||||
for src, dst in merge_map.items():
|
for src, dst in merge_map.items():
|
||||||
if skip_if_exists and os.path.lexists(dst):
|
if skip_if_exists and os.path.lexists(dst):
|
||||||
continue
|
continue
|
||||||
@@ -173,7 +168,7 @@ def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
|||||||
copied_files[(s.st_dev, s.st_ino)] = dst
|
copied_files[(s.st_dev, s.st_ino)] = dst
|
||||||
shutil.copy2(src, dst)
|
shutil.copy2(src, dst)
|
||||||
fs.filter_file(
|
fs.filter_file(
|
||||||
python.prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
python_prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
view.link(src, dst)
|
view.link(src, dst)
|
||||||
@@ -204,13 +199,14 @@ def remove_files_from_view(self, view, merge_map):
|
|||||||
ignore_namespace = True
|
ignore_namespace = True
|
||||||
|
|
||||||
bin_dir = self.spec.prefix.bin
|
bin_dir = self.spec.prefix.bin
|
||||||
|
global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec)
|
||||||
|
|
||||||
to_remove = []
|
to_remove = []
|
||||||
for src, dst in merge_map.items():
|
for src, dst in merge_map.items():
|
||||||
if ignore_namespace and namespace_init(dst):
|
if ignore_namespace and namespace_init(dst):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not fs.path_contains_subdirectory(src, bin_dir):
|
if global_view or not fs.path_contains_subdirectory(src, bin_dir):
|
||||||
to_remove.append(dst)
|
to_remove.append(dst)
|
||||||
else:
|
else:
|
||||||
os.remove(dst)
|
os.remove(dst)
|
||||||
@@ -366,12 +362,6 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
|
|||||||
return f"https://pypi.org/simple/{name}/"
|
return f"https://pypi.org/simple/{name}/"
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
|
||||||
def python_spec(self):
|
|
||||||
"""Get python-venv if it exists or python otherwise."""
|
|
||||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
|
||||||
return python
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self) -> HeaderList:
|
def headers(self) -> HeaderList:
|
||||||
"""Discover header files in platlib."""
|
"""Discover header files in platlib."""
|
||||||
@@ -381,9 +371,8 @@ def headers(self) -> HeaderList:
|
|||||||
|
|
||||||
# Headers should only be in include or platlib, but no harm in checking purelib too
|
# Headers should only be in include or platlib, but no harm in checking purelib too
|
||||||
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
||||||
python = self.python_spec
|
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
||||||
platlib = self.prefix.join(python.package.platlib).join(name)
|
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
|
||||||
purelib = self.prefix.join(python.package.purelib).join(name)
|
|
||||||
|
|
||||||
headers_list = map(fs.find_all_headers, [include, platlib, purelib])
|
headers_list = map(fs.find_all_headers, [include, platlib, purelib])
|
||||||
headers = functools.reduce(operator.add, headers_list)
|
headers = functools.reduce(operator.add, headers_list)
|
||||||
@@ -402,9 +391,8 @@ def libs(self) -> LibraryList:
|
|||||||
name = self.spec.name[3:]
|
name = self.spec.name[3:]
|
||||||
|
|
||||||
# Libraries should only be in platlib, but no harm in checking purelib too
|
# Libraries should only be in platlib, but no harm in checking purelib too
|
||||||
python = self.python_spec
|
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
||||||
platlib = self.prefix.join(python.package.platlib).join(name)
|
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
|
||||||
purelib = self.prefix.join(python.package.purelib).join(name)
|
|
||||||
|
|
||||||
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
|
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
|
||||||
libs_list = map(find_all_libraries, [platlib, purelib])
|
libs_list = map(find_all_libraries, [platlib, purelib])
|
||||||
@@ -516,8 +504,6 @@ def global_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
|
|||||||
|
|
||||||
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||||
"""Install everything from build directory."""
|
"""Install everything from build directory."""
|
||||||
pip = spec["python"].command
|
|
||||||
pip.add_default_arg("-m", "pip")
|
|
||||||
|
|
||||||
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
||||||
|
|
||||||
@@ -533,6 +519,14 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
|||||||
else:
|
else:
|
||||||
args.append(".")
|
args.append(".")
|
||||||
|
|
||||||
|
pip = spec["python"].command
|
||||||
|
# Hide user packages, since we don't have build isolation. This is
|
||||||
|
# necessary because pip / setuptools may run hooks from arbitrary
|
||||||
|
# packages during the build. There is no equivalent variable to hide
|
||||||
|
# system packages, so this is not reliable for external Python.
|
||||||
|
pip.add_default_env("PYTHONNOUSERSITE", "1")
|
||||||
|
pip.add_default_arg("-m")
|
||||||
|
pip.add_default_arg("pip")
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pip(*args)
|
pip(*args)
|
||||||
|
|
||||||
|
|||||||
@@ -34,8 +34,6 @@ def _misc_cache():
|
|||||||
return spack.util.file_cache.FileCache(path)
|
return spack.util.file_cache.FileCache(path)
|
||||||
|
|
||||||
|
|
||||||
FileCacheType = Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton]
|
|
||||||
|
|
||||||
#: Spack's cache for small data
|
#: Spack's cache for small data
|
||||||
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
||||||
llnl.util.lang.Singleton(_misc_cache)
|
llnl.util.lang.Singleton(_misc_cache)
|
||||||
|
|||||||
@@ -22,8 +22,6 @@
|
|||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
from urllib.request import HTTPHandler, Request, build_opener
|
from urllib.request import HTTPHandler, Request, build_opener
|
||||||
|
|
||||||
import ruamel.yaml
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import memoized
|
from llnl.util.lang import memoized
|
||||||
@@ -46,7 +44,6 @@
|
|||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.reporters import CDash, CDashConfiguration
|
from spack.reporters import CDash, CDashConfiguration
|
||||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
|
||||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||||
|
|
||||||
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
||||||
@@ -553,9 +550,10 @@ def generate_gitlab_ci_yaml(
|
|||||||
env,
|
env,
|
||||||
print_summary,
|
print_summary,
|
||||||
output_file,
|
output_file,
|
||||||
*,
|
|
||||||
prune_dag=False,
|
prune_dag=False,
|
||||||
check_index_only=False,
|
check_index_only=False,
|
||||||
|
run_optimizer=False,
|
||||||
|
use_dependencies=False,
|
||||||
artifacts_root=None,
|
artifacts_root=None,
|
||||||
remote_mirror_override=None,
|
remote_mirror_override=None,
|
||||||
):
|
):
|
||||||
@@ -576,6 +574,12 @@ def generate_gitlab_ci_yaml(
|
|||||||
this mode results in faster yaml generation time). Otherwise, also
|
this mode results in faster yaml generation time). Otherwise, also
|
||||||
check each spec directly by url (useful if there is no index or it
|
check each spec directly by url (useful if there is no index or it
|
||||||
might be out of date).
|
might be out of date).
|
||||||
|
run_optimizer (bool): If True, post-process the generated yaml to try
|
||||||
|
try to reduce the size (attempts to collect repeated configuration
|
||||||
|
and replace with definitions).)
|
||||||
|
use_dependencies (bool): If true, use "dependencies" rather than "needs"
|
||||||
|
("needs" allows DAG scheduling). Useful if gitlab instance cannot
|
||||||
|
be configured to handle more than a few "needs" per job.
|
||||||
artifacts_root (str): Path where artifacts like logs, environment
|
artifacts_root (str): Path where artifacts like logs, environment
|
||||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||||
requires this to be within the project directory.
|
requires this to be within the project directory.
|
||||||
@@ -679,22 +683,6 @@ def generate_gitlab_ci_yaml(
|
|||||||
"instead.",
|
"instead.",
|
||||||
)
|
)
|
||||||
|
|
||||||
def ensure_expected_target_path(path):
|
|
||||||
"""Returns passed paths with all Windows path separators exchanged
|
|
||||||
for posix separators only if copy_only_pipeline is enabled
|
|
||||||
|
|
||||||
This is required as copy_only_pipelines are a unique scenario where
|
|
||||||
the generate job and child pipelines are run on different platforms.
|
|
||||||
To make this compatible w/ Windows, we cannot write Windows style path separators
|
|
||||||
that will be consumed on by the Posix copy job runner.
|
|
||||||
|
|
||||||
TODO (johnwparent): Refactor config + cli read/write to deal only in posix
|
|
||||||
style paths
|
|
||||||
"""
|
|
||||||
if copy_only_pipeline and path:
|
|
||||||
path = path.replace("\\", "/")
|
|
||||||
return path
|
|
||||||
|
|
||||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||||
deprecated_mirror_config = False
|
deprecated_mirror_config = False
|
||||||
buildcache_destination = None
|
buildcache_destination = None
|
||||||
@@ -809,8 +797,7 @@ def ensure_expected_target_path(path):
|
|||||||
cli_scopes = [
|
cli_scopes = [
|
||||||
os.path.relpath(s.path, concrete_env_dir)
|
os.path.relpath(s.path, concrete_env_dir)
|
||||||
for s in cfg.scopes().values()
|
for s in cfg.scopes().values()
|
||||||
if not s.writable
|
if isinstance(s, cfg.ImmutableConfigScope)
|
||||||
and isinstance(s, (cfg.DirectoryConfigScope))
|
|
||||||
and s.path not in env_includes
|
and s.path not in env_includes
|
||||||
and os.path.exists(s.path)
|
and os.path.exists(s.path)
|
||||||
]
|
]
|
||||||
@@ -819,7 +806,7 @@ def ensure_expected_target_path(path):
|
|||||||
if scope not in include_scopes and scope not in env_includes:
|
if scope not in include_scopes and scope not in env_includes:
|
||||||
include_scopes.insert(0, scope)
|
include_scopes.insert(0, scope)
|
||||||
env_includes.extend(include_scopes)
|
env_includes.extend(include_scopes)
|
||||||
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
|
env_yaml_root["spack"]["include"] = env_includes
|
||||||
|
|
||||||
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
||||||
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
||||||
@@ -1240,9 +1227,6 @@ def main_script_replacements(cmd):
|
|||||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||||
}
|
}
|
||||||
output_vars = output_object["variables"]
|
|
||||||
for item, val in output_vars.items():
|
|
||||||
output_vars[item] = ensure_expected_target_path(val)
|
|
||||||
|
|
||||||
# TODO: Remove this block in Spack 0.23
|
# TODO: Remove this block in Spack 0.23
|
||||||
if deprecated_mirror_config and remote_mirror_override:
|
if deprecated_mirror_config and remote_mirror_override:
|
||||||
@@ -1267,6 +1251,17 @@ def main_script_replacements(cmd):
|
|||||||
with open(copy_specs_file, "w") as fd:
|
with open(copy_specs_file, "w") as fd:
|
||||||
fd.write(json.dumps(buildcache_copies))
|
fd.write(json.dumps(buildcache_copies))
|
||||||
|
|
||||||
|
# TODO(opadron): remove this or refactor
|
||||||
|
if run_optimizer:
|
||||||
|
import spack.ci_optimization as ci_opt
|
||||||
|
|
||||||
|
output_object = ci_opt.optimizer(output_object)
|
||||||
|
|
||||||
|
# TODO(opadron): remove this or refactor
|
||||||
|
if use_dependencies:
|
||||||
|
import spack.ci_needs_workaround as cinw
|
||||||
|
|
||||||
|
output_object = cinw.needs_to_dependencies(output_object)
|
||||||
else:
|
else:
|
||||||
# No jobs were generated
|
# No jobs were generated
|
||||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||||
@@ -1288,6 +1283,7 @@ def main_script_replacements(cmd):
|
|||||||
sorted_output = {}
|
sorted_output = {}
|
||||||
for output_key, output_value in sorted(output_object.items()):
|
for output_key, output_value in sorted(output_object.items()):
|
||||||
sorted_output[output_key] = output_value
|
sorted_output[output_key] = output_value
|
||||||
|
|
||||||
if known_broken_specs_encountered:
|
if known_broken_specs_encountered:
|
||||||
tty.error("This pipeline generated hashes known to be broken on develop:")
|
tty.error("This pipeline generated hashes known to be broken on develop:")
|
||||||
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
|
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
|
||||||
@@ -1295,11 +1291,8 @@ def main_script_replacements(cmd):
|
|||||||
if not rebuild_everything:
|
if not rebuild_everything:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Minimize yaml output size through use of anchors
|
with open(output_file, "w") as outf:
|
||||||
syaml.anchorify(sorted_output)
|
outf.write(syaml.dump(sorted_output, default_flow_style=True))
|
||||||
|
|
||||||
with open(output_file, "w") as f:
|
|
||||||
ruamel.yaml.YAML().dump(sorted_output, f)
|
|
||||||
|
|
||||||
|
|
||||||
def _url_encode_string(input_string):
|
def _url_encode_string(input_string):
|
||||||
@@ -1485,12 +1478,6 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
|||||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||||
|
|
||||||
|
|
||||||
def win_quote(quote_str: str) -> str:
|
|
||||||
if IS_WINDOWS:
|
|
||||||
quote_str = f'"{quote_str}"'
|
|
||||||
return quote_str
|
|
||||||
|
|
||||||
|
|
||||||
def download_and_extract_artifacts(url, work_dir):
|
def download_and_extract_artifacts(url, work_dir):
|
||||||
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||||
and extract the contents into the given work_dir
|
and extract the contents into the given work_dir
|
||||||
@@ -1513,7 +1500,7 @@ def download_and_extract_artifacts(url, work_dir):
|
|||||||
request = Request(url, headers=headers)
|
request = Request(url, headers=headers)
|
||||||
request.get_method = lambda: "GET"
|
request.get_method = lambda: "GET"
|
||||||
|
|
||||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
response = opener.open(request)
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code != 200:
|
if response_code != 200:
|
||||||
@@ -1955,9 +1942,9 @@ def compose_command_err_handling(args):
|
|||||||
# but we need to handle EXEs (git, etc) ourselves
|
# but we need to handle EXEs (git, etc) ourselves
|
||||||
catch_exe_failure = (
|
catch_exe_failure = (
|
||||||
"""
|
"""
|
||||||
if ($LASTEXITCODE -ne 0){{
|
if ($LASTEXITCODE -ne 0){
|
||||||
throw 'Command {} has failed'
|
throw "Command {} has failed"
|
||||||
}}
|
}
|
||||||
"""
|
"""
|
||||||
if IS_WINDOWS
|
if IS_WINDOWS
|
||||||
else ""
|
else ""
|
||||||
@@ -2189,13 +2176,13 @@ def __init__(self, ci_cdash):
|
|||||||
def args(self):
|
def args(self):
|
||||||
return [
|
return [
|
||||||
"--cdash-upload-url",
|
"--cdash-upload-url",
|
||||||
win_quote(self.upload_url),
|
self.upload_url,
|
||||||
"--cdash-build",
|
"--cdash-build",
|
||||||
win_quote(self.build_name),
|
self.build_name,
|
||||||
"--cdash-site",
|
"--cdash-site",
|
||||||
win_quote(self.site),
|
self.site,
|
||||||
"--cdash-buildstamp",
|
"--cdash-buildstamp",
|
||||||
win_quote(self.build_stamp),
|
self.build_stamp,
|
||||||
]
|
]
|
||||||
|
|
||||||
@property # type: ignore
|
@property # type: ignore
|
||||||
@@ -2261,7 +2248,7 @@ def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
|||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers)
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
|
|
||||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
response = opener.open(request)
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code not in [200, 201]:
|
if response_code not in [200, 201]:
|
||||||
@@ -2307,7 +2294,7 @@ def populate_buildgroup(self, job_names):
|
|||||||
request = Request(url, data=enc_data, headers=headers)
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
request.get_method = lambda: "PUT"
|
request.get_method = lambda: "PUT"
|
||||||
|
|
||||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
response = opener.open(request)
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code != 200:
|
if response_code != 200:
|
||||||
|
|||||||
34
lib/spack/spack/ci_needs_workaround.py
Normal file
34
lib/spack/spack/ci_needs_workaround.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import collections.abc
|
||||||
|
|
||||||
|
get_job_name = lambda needs_entry: (
|
||||||
|
needs_entry.get("job")
|
||||||
|
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
|
||||||
|
else needs_entry if isinstance(needs_entry, str) else None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def convert_job(job_entry):
|
||||||
|
if not isinstance(job_entry, collections.abc.Mapping):
|
||||||
|
return job_entry
|
||||||
|
|
||||||
|
needs = job_entry.get("needs")
|
||||||
|
if needs is None:
|
||||||
|
return job_entry
|
||||||
|
|
||||||
|
new_job = {}
|
||||||
|
new_job.update(job_entry)
|
||||||
|
del new_job["needs"]
|
||||||
|
|
||||||
|
new_job["dependencies"] = list(
|
||||||
|
filter((lambda x: x is not None), (get_job_name(needs_entry) for needs_entry in needs))
|
||||||
|
)
|
||||||
|
|
||||||
|
return new_job
|
||||||
|
|
||||||
|
|
||||||
|
def needs_to_dependencies(yaml):
|
||||||
|
return dict((k, convert_job(v)) for k, v in yaml.items())
|
||||||
363
lib/spack/spack/ci_optimization.py
Normal file
363
lib/spack/spack/ci_optimization.py
Normal file
@@ -0,0 +1,363 @@
|
|||||||
|
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import collections
|
||||||
|
import collections.abc
|
||||||
|
import copy
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
|
|
||||||
|
def sort_yaml_obj(obj):
|
||||||
|
if isinstance(obj, collections.abc.Mapping):
|
||||||
|
return syaml.syaml_dict(
|
||||||
|
(k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0])))
|
||||||
|
)
|
||||||
|
|
||||||
|
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||||
|
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
|
||||||
|
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
def matches(obj, proto):
|
||||||
|
"""Returns True if the test object "obj" matches the prototype object
|
||||||
|
"proto".
|
||||||
|
|
||||||
|
If obj and proto are mappings, obj matches proto if (key in obj) and
|
||||||
|
(obj[key] matches proto[key]) for every key in proto.
|
||||||
|
|
||||||
|
If obj and proto are sequences, obj matches proto if they are of the same
|
||||||
|
length and (a matches b) for every (a,b) in zip(obj, proto).
|
||||||
|
|
||||||
|
Otherwise, obj matches proto if obj == proto.
|
||||||
|
|
||||||
|
Precondition: proto must not have any reference cycles
|
||||||
|
"""
|
||||||
|
if isinstance(obj, collections.abc.Mapping):
|
||||||
|
if not isinstance(proto, collections.abc.Mapping):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
|
||||||
|
|
||||||
|
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||||
|
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if len(obj) != len(proto):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return all(matches(obj[index], val) for index, val in enumerate(proto))
|
||||||
|
|
||||||
|
return obj == proto
|
||||||
|
|
||||||
|
|
||||||
|
def subkeys(obj, proto):
|
||||||
|
"""Returns the test mapping "obj" after factoring out the items it has in
|
||||||
|
common with the prototype mapping "proto".
|
||||||
|
|
||||||
|
Consider a recursive merge operation, merge(a, b) on mappings a and b, that
|
||||||
|
returns a mapping, m, whose keys are the union of the keys of a and b, and
|
||||||
|
for every such key, "k", its corresponding value is:
|
||||||
|
|
||||||
|
- merge(a[key], b[key]) if a[key] and b[key] are mappings, or
|
||||||
|
- b[key] if (key in b) and not matches(a[key], b[key]),
|
||||||
|
or
|
||||||
|
- a[key] otherwise
|
||||||
|
|
||||||
|
|
||||||
|
If obj and proto are mappings, the returned object is the smallest object,
|
||||||
|
"a", such that merge(a, proto) matches obj.
|
||||||
|
|
||||||
|
Otherwise, obj is returned.
|
||||||
|
"""
|
||||||
|
if not (
|
||||||
|
isinstance(obj, collections.abc.Mapping) and isinstance(proto, collections.abc.Mapping)
|
||||||
|
):
|
||||||
|
return obj
|
||||||
|
|
||||||
|
new_obj = {}
|
||||||
|
for key, value in obj.items():
|
||||||
|
if key not in proto:
|
||||||
|
new_obj[key] = value
|
||||||
|
continue
|
||||||
|
|
||||||
|
if matches(value, proto[key]) and matches(proto[key], value):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if isinstance(value, collections.abc.Mapping):
|
||||||
|
new_obj[key] = subkeys(value, proto[key])
|
||||||
|
continue
|
||||||
|
|
||||||
|
new_obj[key] = value
|
||||||
|
|
||||||
|
return new_obj
|
||||||
|
|
||||||
|
|
||||||
|
def add_extends(yaml, key):
|
||||||
|
"""Modifies the given object "yaml" so that it includes an "extends" key
|
||||||
|
whose value features "key".
|
||||||
|
|
||||||
|
If "extends" is not in yaml, then yaml is modified such that
|
||||||
|
yaml["extends"] == key.
|
||||||
|
|
||||||
|
If yaml["extends"] is a str, then yaml is modified such that
|
||||||
|
yaml["extends"] == [yaml["extends"], key]
|
||||||
|
|
||||||
|
If yaml["extends"] is a list that does not include key, then key is
|
||||||
|
appended to the list.
|
||||||
|
|
||||||
|
Otherwise, yaml is left unchanged.
|
||||||
|
"""
|
||||||
|
|
||||||
|
has_key = "extends" in yaml
|
||||||
|
extends = yaml.get("extends")
|
||||||
|
|
||||||
|
if has_key and not isinstance(extends, (str, collections.abc.Sequence)):
|
||||||
|
return
|
||||||
|
|
||||||
|
if extends is None:
|
||||||
|
yaml["extends"] = key
|
||||||
|
return
|
||||||
|
|
||||||
|
if isinstance(extends, str):
|
||||||
|
if extends != key:
|
||||||
|
yaml["extends"] = [extends, key]
|
||||||
|
return
|
||||||
|
|
||||||
|
if key not in extends:
|
||||||
|
extends.append(key)
|
||||||
|
|
||||||
|
|
||||||
|
def common_subobject(yaml, sub):
|
||||||
|
"""Factor prototype object "sub" out of the values of mapping "yaml".
|
||||||
|
|
||||||
|
Consider a modified copy of yaml, "new", where for each key, "key" in yaml:
|
||||||
|
|
||||||
|
- If yaml[key] matches sub, then new[key] = subkeys(yaml[key], sub).
|
||||||
|
- Otherwise, new[key] = yaml[key].
|
||||||
|
|
||||||
|
If the above match criteria is not satisfied for any such key, then (yaml,
|
||||||
|
None) is returned. The yaml object is returned unchanged.
|
||||||
|
|
||||||
|
Otherwise, each matching value in new is modified as in
|
||||||
|
add_extends(new[key], common_key), and then new[common_key] is set to sub.
|
||||||
|
The common_key value is chosen such that it does not match any preexisting
|
||||||
|
key in new. In this case, (new, common_key) is returned.
|
||||||
|
"""
|
||||||
|
match_list = set(k for k, v in yaml.items() if matches(v, sub))
|
||||||
|
|
||||||
|
if not match_list:
|
||||||
|
return yaml, None
|
||||||
|
|
||||||
|
common_prefix = ".c"
|
||||||
|
common_index = 0
|
||||||
|
|
||||||
|
while True:
|
||||||
|
common_key = "".join((common_prefix, str(common_index)))
|
||||||
|
if common_key not in yaml:
|
||||||
|
break
|
||||||
|
common_index += 1
|
||||||
|
|
||||||
|
new_yaml = {}
|
||||||
|
|
||||||
|
for key, val in yaml.items():
|
||||||
|
new_yaml[key] = copy.deepcopy(val)
|
||||||
|
|
||||||
|
if not matches(val, sub):
|
||||||
|
continue
|
||||||
|
|
||||||
|
new_yaml[key] = subkeys(new_yaml[key], sub)
|
||||||
|
add_extends(new_yaml[key], common_key)
|
||||||
|
|
||||||
|
new_yaml[common_key] = sub
|
||||||
|
|
||||||
|
return new_yaml, common_key
|
||||||
|
|
||||||
|
|
||||||
|
def print_delta(name, old, new, applied=None):
|
||||||
|
delta = new - old
|
||||||
|
reldelta = (1000 * delta) // old
|
||||||
|
reldelta = (reldelta // 10, reldelta % 10)
|
||||||
|
|
||||||
|
if applied is None:
|
||||||
|
applied = new <= old
|
||||||
|
|
||||||
|
print(
|
||||||
|
"\n".join(
|
||||||
|
(
|
||||||
|
"{0} {1}:",
|
||||||
|
" before: {2: 10d}",
|
||||||
|
" after : {3: 10d}",
|
||||||
|
" delta : {4:+10d} ({5:=+3d}.{6}%)",
|
||||||
|
)
|
||||||
|
).format(name, ("+" if applied else "x"), old, new, delta, reldelta[0], reldelta[1])
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def try_optimization_pass(name, yaml, optimization_pass, *args, **kwargs):
|
||||||
|
"""Try applying an optimization pass and return information about the
|
||||||
|
result
|
||||||
|
|
||||||
|
"name" is a string describing the nature of the pass. If it is a non-empty
|
||||||
|
string, summary statistics are also printed to stdout.
|
||||||
|
|
||||||
|
"yaml" is the object to apply the pass to.
|
||||||
|
|
||||||
|
"optimization_pass" is the function implementing the pass to be applied.
|
||||||
|
|
||||||
|
"args" and "kwargs" are the additional arguments to pass to optimization
|
||||||
|
pass. The pass is applied as
|
||||||
|
|
||||||
|
>>> (new_yaml, *other_results) = optimization_pass(yaml, *args, **kwargs)
|
||||||
|
|
||||||
|
The pass's results are greedily rejected if it does not modify the original
|
||||||
|
yaml document, or if it produces a yaml document that serializes to a
|
||||||
|
larger string.
|
||||||
|
|
||||||
|
Returns (new_yaml, yaml, applied, other_results) if applied, or
|
||||||
|
(yaml, new_yaml, applied, other_results) otherwise.
|
||||||
|
"""
|
||||||
|
result = optimization_pass(yaml, *args, **kwargs)
|
||||||
|
new_yaml, other_results = result[0], result[1:]
|
||||||
|
|
||||||
|
if new_yaml is yaml:
|
||||||
|
# pass was not applied
|
||||||
|
return (yaml, new_yaml, False, other_results)
|
||||||
|
|
||||||
|
pre_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||||
|
post_size = len(syaml.dump_config(sort_yaml_obj(new_yaml), default_flow_style=True))
|
||||||
|
|
||||||
|
# pass makes the size worse: not applying
|
||||||
|
applied = post_size <= pre_size
|
||||||
|
if applied:
|
||||||
|
yaml, new_yaml = new_yaml, yaml
|
||||||
|
|
||||||
|
if name:
|
||||||
|
print_delta(name, pre_size, post_size, applied)
|
||||||
|
|
||||||
|
return (yaml, new_yaml, applied, other_results)
|
||||||
|
|
||||||
|
|
||||||
|
def build_histogram(iterator, key):
|
||||||
|
"""Builds a histogram of values given an iterable of mappings and a key.
|
||||||
|
|
||||||
|
For each mapping "m" with key "key" in iterator, the value m[key] is
|
||||||
|
considered.
|
||||||
|
|
||||||
|
Returns a list of tuples (hash, count, proportion, value), where
|
||||||
|
|
||||||
|
- "hash" is a sha1sum hash of the value.
|
||||||
|
- "count" is the number of occurences of values that hash to "hash".
|
||||||
|
- "proportion" is the proportion of all values considered above that
|
||||||
|
hash to "hash".
|
||||||
|
- "value" is one of the values considered above that hash to "hash".
|
||||||
|
Which value is chosen when multiple values hash to the same "hash" is
|
||||||
|
undefined.
|
||||||
|
|
||||||
|
The list is sorted in descending order by count, yielding the most
|
||||||
|
frequently occuring hashes first.
|
||||||
|
"""
|
||||||
|
buckets = collections.defaultdict(int)
|
||||||
|
values = {}
|
||||||
|
|
||||||
|
num_objects = 0
|
||||||
|
for obj in iterator:
|
||||||
|
num_objects += 1
|
||||||
|
|
||||||
|
try:
|
||||||
|
val = obj[key]
|
||||||
|
except (KeyError, TypeError):
|
||||||
|
continue
|
||||||
|
|
||||||
|
value_hash = hashlib.sha1()
|
||||||
|
value_hash.update(syaml.dump_config(sort_yaml_obj(val)).encode())
|
||||||
|
value_hash = value_hash.hexdigest()
|
||||||
|
|
||||||
|
buckets[value_hash] += 1
|
||||||
|
values[value_hash] = val
|
||||||
|
|
||||||
|
return [
|
||||||
|
(h, buckets[h], float(buckets[h]) / num_objects, values[h])
|
||||||
|
for h in sorted(buckets.keys(), key=lambda k: -buckets[k])
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def optimizer(yaml):
|
||||||
|
original_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||||
|
|
||||||
|
# try factoring out commonly repeated portions
|
||||||
|
common_job = {
|
||||||
|
"variables": {"SPACK_COMPILER_ACTION": "NONE"},
|
||||||
|
"after_script": ['rm -rf "./spack"'],
|
||||||
|
"artifacts": {"paths": ["jobs_scratch_dir", "cdash_report"], "when": "always"},
|
||||||
|
}
|
||||||
|
|
||||||
|
# look for a list of tags that appear frequently
|
||||||
|
_, count, proportion, tags = next(iter(build_histogram(yaml.values(), "tags")), (None,) * 4)
|
||||||
|
|
||||||
|
# If a list of tags is found, and there are more than one job that uses it,
|
||||||
|
# *and* the jobs that do use it represent at least 70% of all jobs, then
|
||||||
|
# add the list to the prototype object.
|
||||||
|
if tags and count > 1 and proportion >= 0.70:
|
||||||
|
common_job["tags"] = tags
|
||||||
|
|
||||||
|
# apply common object factorization
|
||||||
|
yaml, other, applied, rest = try_optimization_pass(
|
||||||
|
"general common object factorization", yaml, common_subobject, common_job
|
||||||
|
)
|
||||||
|
|
||||||
|
# look for a common script, and try factoring that out
|
||||||
|
_, count, proportion, script = next(
|
||||||
|
iter(build_histogram(yaml.values(), "script")), (None,) * 4
|
||||||
|
)
|
||||||
|
|
||||||
|
if script and count > 1 and proportion >= 0.70:
|
||||||
|
yaml, other, applied, rest = try_optimization_pass(
|
||||||
|
"script factorization", yaml, common_subobject, {"script": script}
|
||||||
|
)
|
||||||
|
|
||||||
|
# look for a common before_script, and try factoring that out
|
||||||
|
_, count, proportion, script = next(
|
||||||
|
iter(build_histogram(yaml.values(), "before_script")), (None,) * 4
|
||||||
|
)
|
||||||
|
|
||||||
|
if script and count > 1 and proportion >= 0.70:
|
||||||
|
yaml, other, applied, rest = try_optimization_pass(
|
||||||
|
"before_script factorization", yaml, common_subobject, {"before_script": script}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Look specifically for the SPACK_ROOT_SPEC environment variables.
|
||||||
|
# Try to factor them out.
|
||||||
|
h = build_histogram(
|
||||||
|
(getattr(val, "get", lambda *args: {})("variables") for val in yaml.values()),
|
||||||
|
"SPACK_ROOT_SPEC",
|
||||||
|
)
|
||||||
|
|
||||||
|
# In this case, we try to factor out *all* instances of the SPACK_ROOT_SPEC
|
||||||
|
# environment variable; not just the one that appears with the greatest
|
||||||
|
# frequency. We only require that more than 1 job uses a given instance's
|
||||||
|
# value, because we expect the value to be very large, and so expect even
|
||||||
|
# few-to-one factorizations to yield large space savings.
|
||||||
|
counter = 0
|
||||||
|
for _, count, proportion, spec in h:
|
||||||
|
if count <= 1:
|
||||||
|
continue
|
||||||
|
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
yaml, other, applied, rest = try_optimization_pass(
|
||||||
|
"SPACK_ROOT_SPEC factorization ({count})".format(count=counter),
|
||||||
|
yaml,
|
||||||
|
common_subobject,
|
||||||
|
{"variables": {"SPACK_ROOT_SPEC": spec}},
|
||||||
|
)
|
||||||
|
|
||||||
|
new_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
|
||||||
|
|
||||||
|
print("\n")
|
||||||
|
print_delta("overall summary", original_size, new_size)
|
||||||
|
print("\n")
|
||||||
|
return yaml
|
||||||
@@ -336,7 +336,6 @@ def display_specs(specs, args=None, **kwargs):
|
|||||||
groups (bool): display specs grouped by arch/compiler (default True)
|
groups (bool): display specs grouped by arch/compiler (default True)
|
||||||
decorator (typing.Callable): function to call to decorate specs
|
decorator (typing.Callable): function to call to decorate specs
|
||||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||||
status_fn (typing.Callable): if provided, prepend install-status info
|
|
||||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@@ -360,7 +359,6 @@ def get_arg(name, default=None):
|
|||||||
groups = get_arg("groups", True)
|
groups = get_arg("groups", True)
|
||||||
all_headers = get_arg("all_headers", False)
|
all_headers = get_arg("all_headers", False)
|
||||||
output = get_arg("output", sys.stdout)
|
output = get_arg("output", sys.stdout)
|
||||||
status_fn = get_arg("status_fn", None)
|
|
||||||
|
|
||||||
decorator = get_arg("decorator", None)
|
decorator = get_arg("decorator", None)
|
||||||
if decorator is None:
|
if decorator is None:
|
||||||
@@ -388,13 +386,6 @@ def get_arg(name, default=None):
|
|||||||
def fmt(s, depth=0):
|
def fmt(s, depth=0):
|
||||||
"""Formatter function for all output specs"""
|
"""Formatter function for all output specs"""
|
||||||
string = ""
|
string = ""
|
||||||
|
|
||||||
if status_fn:
|
|
||||||
# This was copied from spec.tree's colorization logic
|
|
||||||
# then shortened because it seems like status_fn should
|
|
||||||
# always return an InstallStatus
|
|
||||||
string += colorize(status_fn(s).value)
|
|
||||||
|
|
||||||
if hashes:
|
if hashes:
|
||||||
string += gray_hash(s, hlen) + " "
|
string += gray_hash(s, hlen) + " "
|
||||||
string += depth * " "
|
string += depth * " "
|
||||||
@@ -453,7 +444,7 @@ def format_list(specs):
|
|||||||
def filter_loaded_specs(specs):
|
def filter_loaded_specs(specs):
|
||||||
"""Filter a list of specs returning only those that are
|
"""Filter a list of specs returning only those that are
|
||||||
currently loaded."""
|
currently loaded."""
|
||||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
|
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||||
return [x for x in specs if x.dag_hash() in hashes]
|
return [x for x in specs if x.dag_hash() in hashes]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -165,7 +165,7 @@ def _reset(args):
|
|||||||
if not ok_to_continue:
|
if not ok_to_continue:
|
||||||
raise RuntimeError("Aborting")
|
raise RuntimeError("Aborting")
|
||||||
|
|
||||||
for scope in spack.config.CONFIG.writable_scopes:
|
for scope in spack.config.CONFIG.file_scopes:
|
||||||
# The default scope should stay untouched
|
# The default scope should stay untouched
|
||||||
if scope.name == "defaults":
|
if scope.name == "defaults":
|
||||||
continue
|
continue
|
||||||
|
|||||||
@@ -13,6 +13,7 @@
|
|||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import urllib.request
|
||||||
from typing import Dict, List, Optional, Tuple, Union
|
from typing import Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -53,7 +54,6 @@
|
|||||||
from spack.oci.oci import (
|
from spack.oci.oci import (
|
||||||
copy_missing_layers_with_retry,
|
copy_missing_layers_with_retry,
|
||||||
get_manifest_and_config_with_retry,
|
get_manifest_and_config_with_retry,
|
||||||
list_tags,
|
|
||||||
upload_blob_with_retry,
|
upload_blob_with_retry,
|
||||||
upload_manifest_with_retry,
|
upload_manifest_with_retry,
|
||||||
)
|
)
|
||||||
@@ -70,6 +70,12 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
|
|
||||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
|
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
|
||||||
|
push.add_argument(
|
||||||
|
"--allow-root",
|
||||||
|
"-a",
|
||||||
|
action="store_true",
|
||||||
|
help="allow install root string in binary files after RPATH substitution",
|
||||||
|
)
|
||||||
push_sign = push.add_mutually_exclusive_group(required=False)
|
push_sign = push.add_mutually_exclusive_group(required=False)
|
||||||
push_sign.add_argument(
|
push_sign.add_argument(
|
||||||
"--unsigned",
|
"--unsigned",
|
||||||
@@ -184,6 +190,10 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
|||||||
keys.add_argument("-f", "--force", action="store_true", help="force new download of keys")
|
keys.add_argument("-f", "--force", action="store_true", help="force new download of keys")
|
||||||
keys.set_defaults(func=keys_fn)
|
keys.set_defaults(func=keys_fn)
|
||||||
|
|
||||||
|
preview = subparsers.add_parser("preview", help=preview_fn.__doc__)
|
||||||
|
arguments.add_common_arguments(preview, ["installed_specs"])
|
||||||
|
preview.set_defaults(func=preview_fn)
|
||||||
|
|
||||||
# Check if binaries need to be rebuilt on remote mirror
|
# Check if binaries need to be rebuilt on remote mirror
|
||||||
check = subparsers.add_parser("check", help=check_fn.__doc__)
|
check = subparsers.add_parser("check", help=check_fn.__doc__)
|
||||||
check.add_argument(
|
check.add_argument(
|
||||||
@@ -394,6 +404,11 @@ def push_fn(args):
|
|||||||
else:
|
else:
|
||||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||||
|
|
||||||
|
if args.allow_root:
|
||||||
|
tty.warn(
|
||||||
|
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
|
||||||
|
)
|
||||||
|
|
||||||
mirror: spack.mirror.Mirror = args.mirror
|
mirror: spack.mirror.Mirror = args.mirror
|
||||||
|
|
||||||
# Check if this is an OCI image.
|
# Check if this is an OCI image.
|
||||||
@@ -841,7 +856,10 @@ def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
|||||||
|
|
||||||
|
|
||||||
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
||||||
tags = list_tags(image_ref)
|
request = urllib.request.Request(url=image_ref.tags_url())
|
||||||
|
response = spack.oci.opener.urlopen(request)
|
||||||
|
spack.oci.opener.ensure_status(request, response, 200)
|
||||||
|
tags = json.load(response)["tags"]
|
||||||
|
|
||||||
# Fetch all image config files in parallel
|
# Fetch all image config files in parallel
|
||||||
spec_dicts = pool.starmap(
|
spec_dicts = pool.starmap(
|
||||||
@@ -945,6 +963,14 @@ def keys_fn(args):
|
|||||||
bindist.get_keys(args.install, args.trust, args.force)
|
bindist.get_keys(args.install, args.trust, args.force)
|
||||||
|
|
||||||
|
|
||||||
|
def preview_fn(args):
|
||||||
|
"""analyze an installed spec and reports whether executables and libraries are relocatable"""
|
||||||
|
tty.warn(
|
||||||
|
"`spack buildcache preview` is deprecated since `spack buildcache push --allow-root` is "
|
||||||
|
"now the default. This command will be removed in Spack 0.22"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_fn(args: argparse.Namespace):
|
def check_fn(args: argparse.Namespace):
|
||||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import warnings
|
|
||||||
from urllib.parse import urlparse, urlunparse
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
@@ -32,6 +31,7 @@
|
|||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
SPACK_COMMAND = "spack"
|
SPACK_COMMAND = "spack"
|
||||||
|
MAKE_COMMAND = "make"
|
||||||
INSTALL_FAIL_CODE = 1
|
INSTALL_FAIL_CODE = 1
|
||||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||||
|
|
||||||
@@ -40,12 +40,6 @@ def deindent(desc):
|
|||||||
return desc.replace(" ", "")
|
return desc.replace(" ", "")
|
||||||
|
|
||||||
|
|
||||||
def unicode_escape(path: str) -> str:
|
|
||||||
"""Returns transformed path with any unicode
|
|
||||||
characters replaced with their corresponding escapes"""
|
|
||||||
return path.encode("unicode-escape").decode("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
setup_parser.parser = subparser
|
setup_parser.parser = subparser
|
||||||
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
||||||
@@ -74,7 +68,7 @@ def setup_parser(subparser):
|
|||||||
"--optimize",
|
"--optimize",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="(DEPRECATED) optimize the gitlab yaml file for size\n\n"
|
help="(experimental) optimize the gitlab yaml file for size\n\n"
|
||||||
"run the generated document through a series of optimization passes "
|
"run the generated document through a series of optimization passes "
|
||||||
"designed to reduce the size of the generated file",
|
"designed to reduce the size of the generated file",
|
||||||
)
|
)
|
||||||
@@ -82,7 +76,7 @@ def setup_parser(subparser):
|
|||||||
"--dependencies",
|
"--dependencies",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=False,
|
default=False,
|
||||||
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
|
||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--buildcache-destination",
|
"--buildcache-destination",
|
||||||
@@ -201,18 +195,6 @@ def ci_generate(args):
|
|||||||
before invoking this command. the value must be the CDash authorization token needed to create
|
before invoking this command. the value must be the CDash authorization token needed to create
|
||||||
a build group and register all generated jobs under it
|
a build group and register all generated jobs under it
|
||||||
"""
|
"""
|
||||||
if args.optimize:
|
|
||||||
warnings.warn(
|
|
||||||
"The --optimize option has been deprecated, and currently has no effect. "
|
|
||||||
"It will be removed in Spack v0.24."
|
|
||||||
)
|
|
||||||
|
|
||||||
if args.dependencies:
|
|
||||||
warnings.warn(
|
|
||||||
"The --dependencies option has been deprecated, and currently has no effect. "
|
|
||||||
"It will be removed in Spack v0.24."
|
|
||||||
)
|
|
||||||
|
|
||||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||||
|
|
||||||
if args.copy_to:
|
if args.copy_to:
|
||||||
@@ -225,6 +207,8 @@ def ci_generate(args):
|
|||||||
|
|
||||||
output_file = args.output_file
|
output_file = args.output_file
|
||||||
copy_yaml_to = args.copy_to
|
copy_yaml_to = args.copy_to
|
||||||
|
run_optimizer = args.optimize
|
||||||
|
use_dependencies = args.dependencies
|
||||||
prune_dag = args.prune_dag
|
prune_dag = args.prune_dag
|
||||||
index_only = args.index_only
|
index_only = args.index_only
|
||||||
artifacts_root = args.artifacts_root
|
artifacts_root = args.artifacts_root
|
||||||
@@ -245,6 +229,8 @@ def ci_generate(args):
|
|||||||
output_file,
|
output_file,
|
||||||
prune_dag=prune_dag,
|
prune_dag=prune_dag,
|
||||||
check_index_only=index_only,
|
check_index_only=index_only,
|
||||||
|
run_optimizer=run_optimizer,
|
||||||
|
use_dependencies=use_dependencies,
|
||||||
artifacts_root=artifacts_root,
|
artifacts_root=artifacts_root,
|
||||||
remote_mirror_override=buildcache_destination,
|
remote_mirror_override=buildcache_destination,
|
||||||
)
|
)
|
||||||
@@ -565,35 +551,75 @@ def ci_rebuild(args):
|
|||||||
# No hash match anywhere means we need to rebuild spec
|
# No hash match anywhere means we need to rebuild spec
|
||||||
|
|
||||||
# Start with spack arguments
|
# Start with spack arguments
|
||||||
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose", "install"]
|
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose"]
|
||||||
|
|
||||||
config = cfg.get("config")
|
config = cfg.get("config")
|
||||||
if not config["verify_ssl"]:
|
if not config["verify_ssl"]:
|
||||||
spack_cmd.append("-k")
|
spack_cmd.append("-k")
|
||||||
|
|
||||||
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
|
install_args = []
|
||||||
|
|
||||||
can_verify = spack_ci.can_verify_binaries()
|
can_verify = spack_ci.can_verify_binaries()
|
||||||
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
||||||
if not verify_binaries:
|
if not verify_binaries:
|
||||||
install_args.append("--no-check-signature")
|
install_args.append("--no-check-signature")
|
||||||
|
|
||||||
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
|
slash_hash = "/{}".format(job_spec.dag_hash())
|
||||||
|
|
||||||
|
# Arguments when installing dependencies from cache
|
||||||
|
deps_install_args = install_args
|
||||||
|
|
||||||
# Arguments when installing the root from sources
|
# Arguments when installing the root from sources
|
||||||
deps_install_args = install_args + ["--only=dependencies"]
|
root_install_args = install_args + [
|
||||||
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
"--keep-stage",
|
||||||
|
"--only=package",
|
||||||
|
"--use-buildcache=package:never,dependencies:only",
|
||||||
|
]
|
||||||
if cdash_handler:
|
if cdash_handler:
|
||||||
# Add additional arguments to `spack install` for CDash reporting.
|
# Add additional arguments to `spack install` for CDash reporting.
|
||||||
root_install_args.extend(cdash_handler.args())
|
root_install_args.extend(cdash_handler.args())
|
||||||
|
root_install_args.append(slash_hash)
|
||||||
|
|
||||||
|
# ["x", "y"] -> "'x' 'y'"
|
||||||
|
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
|
||||||
|
|
||||||
commands = [
|
commands = [
|
||||||
# apparently there's a race when spack bootstraps? do it up front once
|
# apparently there's a race when spack bootstraps? do it up front once
|
||||||
[SPACK_COMMAND, "-e", unicode_escape(env.path), "bootstrap", "now"],
|
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
|
||||||
spack_cmd + deps_install_args + [slash_hash],
|
[
|
||||||
spack_cmd + root_install_args + [slash_hash],
|
SPACK_COMMAND,
|
||||||
|
"-e",
|
||||||
|
env.path,
|
||||||
|
"env",
|
||||||
|
"depfile",
|
||||||
|
"-o",
|
||||||
|
"Makefile",
|
||||||
|
"--use-buildcache=package:never,dependencies:only",
|
||||||
|
slash_hash, # limit to spec we're building
|
||||||
|
],
|
||||||
|
[
|
||||||
|
# --output-sync requires GNU make 4.x.
|
||||||
|
# Old make errors when you pass it a flag it doesn't recognize,
|
||||||
|
# but it doesn't error or warn when you set unrecognized flags in
|
||||||
|
# this variable.
|
||||||
|
"export",
|
||||||
|
"GNUMAKEFLAGS=--output-sync=recurse",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
MAKE_COMMAND,
|
||||||
|
"SPACK={}".format(args_to_string(spack_cmd)),
|
||||||
|
"SPACK_COLOR=always",
|
||||||
|
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
||||||
|
"-j$(nproc)",
|
||||||
|
"install-deps/{}".format(
|
||||||
|
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
|
||||||
|
"{name}-{version}-{hash}"
|
||||||
|
)
|
||||||
|
),
|
||||||
|
],
|
||||||
|
spack_cmd + ["install"] + root_install_args,
|
||||||
]
|
]
|
||||||
|
|
||||||
tty.debug("Installing {0} from source".format(job_spec.name))
|
tty.debug("Installing {0} from source".format(job_spec.name))
|
||||||
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
||||||
|
|
||||||
|
|||||||
@@ -106,8 +106,7 @@ def clean(parser, args):
|
|||||||
|
|
||||||
# Then do the cleaning falling through the cases
|
# Then do the cleaning falling through the cases
|
||||||
if args.specs:
|
if args.specs:
|
||||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||||
specs = list(spack.cmd.matching_spec_from_env(x) for x in specs)
|
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
msg = "Cleaning build stage [{0}]"
|
msg = "Cleaning build stage [{0}]"
|
||||||
tty.msg(msg.format(spec.short_spec))
|
tty.msg(msg.format(spec.short_spec))
|
||||||
|
|||||||
@@ -3,9 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
from llnl.string import plural
|
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common.arguments
|
import spack.cmd.common.arguments
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
@@ -46,9 +43,5 @@ def concretize(parser, args):
|
|||||||
with env.write_transaction():
|
with env.write_transaction():
|
||||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||||
if not args.quiet:
|
if not args.quiet:
|
||||||
if concretized_specs:
|
ev.display_specs(concretized_specs)
|
||||||
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}:")
|
|
||||||
ev.display_specs([concrete for _, concrete in concretized_specs])
|
|
||||||
else:
|
|
||||||
tty.msg("No new specs to concretize.")
|
|
||||||
env.write()
|
env.write()
|
||||||
|
|||||||
@@ -156,7 +156,7 @@ def print_flattened_configuration(*, blame: bool) -> None:
|
|||||||
"""
|
"""
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
if env is not None:
|
if env is not None:
|
||||||
pristine = env.manifest.yaml_content
|
pristine = env.manifest.pristine_yaml_content
|
||||||
flattened = pristine.copy()
|
flattened = pristine.copy()
|
||||||
flattened[spack.schema.env.TOP_LEVEL_KEY] = pristine[spack.schema.env.TOP_LEVEL_KEY].copy()
|
flattened[spack.schema.env.TOP_LEVEL_KEY] = pristine[spack.schema.env.TOP_LEVEL_KEY].copy()
|
||||||
else:
|
else:
|
||||||
@@ -264,9 +264,7 @@ def config_remove(args):
|
|||||||
def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||||
if isinstance(scope, spack.config.SingleFileScope):
|
if isinstance(scope, spack.config.SingleFileScope):
|
||||||
return fs.can_access(cfg_file)
|
return fs.can_access(cfg_file)
|
||||||
elif isinstance(scope, spack.config.DirectoryConfigScope):
|
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
|
||||||
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _config_change_requires_scope(path, spec, scope, match_spec=None):
|
def _config_change_requires_scope(path, spec, scope, match_spec=None):
|
||||||
@@ -364,11 +362,14 @@ def config_change(args):
|
|||||||
def config_update(args):
|
def config_update(args):
|
||||||
# Read the configuration files
|
# Read the configuration files
|
||||||
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||||
updates: List[spack.config.ConfigScope] = [
|
updates: List[spack.config.ConfigScope] = list(
|
||||||
x
|
filter(
|
||||||
for x in spack.config.CONFIG.format_updates[args.section]
|
lambda s: not isinstance(
|
||||||
if not isinstance(x, spack.config.InternalConfigScope) and x.writable
|
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||||
]
|
),
|
||||||
|
spack.config.CONFIG.format_updates[args.section],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
cannot_overwrite, skip_system_scope = [], False
|
cannot_overwrite, skip_system_scope = [], False
|
||||||
for scope in updates:
|
for scope in updates:
|
||||||
@@ -446,7 +447,7 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
|||||||
|
|
||||||
|
|
||||||
def config_revert(args):
|
def config_revert(args):
|
||||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.writable_scopes]
|
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
|
||||||
|
|
||||||
# Search for backup files in the configuration scopes
|
# Search for backup files in the configuration scopes
|
||||||
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
@@ -933,7 +934,7 @@ def get_repository(args, name):
|
|||||||
# Figure out where the new package should live
|
# Figure out where the new package should live
|
||||||
repo_path = args.repo
|
repo_path = args.repo
|
||||||
if repo_path is not None:
|
if repo_path is not None:
|
||||||
repo = spack.repo.from_path(repo_path)
|
repo = spack.repo.Repo(repo_path)
|
||||||
if spec.namespace and spec.namespace != repo.namespace:
|
if spec.namespace and spec.namespace != repo.namespace:
|
||||||
tty.die(
|
tty.die(
|
||||||
"Can't create package with namespace {0} in repo with "
|
"Can't create package with namespace {0} in repo with "
|
||||||
@@ -941,7 +942,9 @@ def get_repository(args, name):
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if spec.namespace:
|
if spec.namespace:
|
||||||
repo = spack.repo.PATH.get_repo(spec.namespace)
|
repo = spack.repo.PATH.get_repo(spec.namespace, None)
|
||||||
|
if not repo:
|
||||||
|
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
|
||||||
else:
|
else:
|
||||||
repo = spack.repo.PATH.first_repo()
|
repo = spack.repo.PATH.first_repo()
|
||||||
|
|
||||||
|
|||||||
@@ -47,6 +47,16 @@ def inverted_dependencies():
|
|||||||
dependents of, e.g., `mpi`, but virtuals are not included as
|
dependents of, e.g., `mpi`, but virtuals are not included as
|
||||||
actual dependents.
|
actual dependents.
|
||||||
"""
|
"""
|
||||||
|
dag = {}
|
||||||
|
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||||
|
dag.setdefault(pkg_cls.name, set())
|
||||||
|
for dep in pkg_cls.dependencies_by_name():
|
||||||
|
deps = [dep]
|
||||||
|
|
||||||
|
# expand virtuals if necessary
|
||||||
|
if spack.repo.PATH.is_virtual(dep):
|
||||||
|
deps += [s.name for s in spack.repo.PATH.providers_for(dep)]
|
||||||
|
|
||||||
dag = collections.defaultdict(set)
|
dag = collections.defaultdict(set)
|
||||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||||
for _, deps_by_name in pkg_cls.dependencies.items():
|
for _, deps_by_name in pkg_cls.dependencies.items():
|
||||||
|
|||||||
@@ -9,8 +9,6 @@
|
|||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.fetch_strategy
|
|
||||||
import spack.repo
|
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.version
|
import spack.version
|
||||||
@@ -71,15 +69,13 @@ def _retrieve_develop_source(spec, abspath):
|
|||||||
# We construct a package class ourselves, rather than asking for
|
# We construct a package class ourselves, rather than asking for
|
||||||
# Spec.package, since Spec only allows this when it is concrete
|
# Spec.package, since Spec only allows this when it is concrete
|
||||||
package = pkg_cls(spec)
|
package = pkg_cls(spec)
|
||||||
source_stage = package.stage[0]
|
if isinstance(package.stage[0].fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||||
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
|
package.stage[0].fetcher.get_full_repo = True
|
||||||
source_stage.fetcher.get_full_repo = True
|
|
||||||
# If we retrieved this version before and cached it, we may have
|
# If we retrieved this version before and cached it, we may have
|
||||||
# done so without cloning the full git repo; likewise, any
|
# done so without cloning the full git repo; likewise, any
|
||||||
# mirror might store an instance with truncated history.
|
# mirror might store an instance with truncated history.
|
||||||
source_stage.disable_mirrors()
|
package.stage[0].disable_mirrors()
|
||||||
|
|
||||||
source_stage.fetcher.set_package(package)
|
|
||||||
package.stage.steal_source(abspath)
|
package.stage.steal_source(abspath)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import errno
|
|
||||||
import glob
|
import glob
|
||||||
import os
|
import os
|
||||||
|
|
||||||
@@ -12,13 +11,43 @@
|
|||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.util.editor
|
from spack.spec import Spec
|
||||||
|
from spack.util.editor import editor
|
||||||
|
|
||||||
description = "open package files in $EDITOR"
|
description = "open package files in $EDITOR"
|
||||||
section = "packaging"
|
section = "packaging"
|
||||||
level = "short"
|
level = "short"
|
||||||
|
|
||||||
|
|
||||||
|
def edit_package(name, repo_path, namespace):
|
||||||
|
"""Opens the requested package file in your favorite $EDITOR.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name (str): The name of the package
|
||||||
|
repo_path (str): The path to the repository containing this package
|
||||||
|
namespace (str): A valid namespace registered with Spack
|
||||||
|
"""
|
||||||
|
# Find the location of the package
|
||||||
|
if repo_path:
|
||||||
|
repo = spack.repo.Repo(repo_path)
|
||||||
|
elif namespace:
|
||||||
|
repo = spack.repo.PATH.get_repo(namespace)
|
||||||
|
else:
|
||||||
|
repo = spack.repo.PATH
|
||||||
|
path = repo.filename_for_package_name(name)
|
||||||
|
|
||||||
|
spec = Spec(name)
|
||||||
|
if os.path.exists(path):
|
||||||
|
if not os.path.isfile(path):
|
||||||
|
tty.die("Something is wrong. '{0}' is not a file!".format(path))
|
||||||
|
if not os.access(path, os.R_OK):
|
||||||
|
tty.die("Insufficient permissions on '%s'!" % path)
|
||||||
|
else:
|
||||||
|
raise spack.repo.UnknownPackageError(spec.name)
|
||||||
|
|
||||||
|
editor(path)
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
excl_args = subparser.add_mutually_exclusive_group()
|
excl_args = subparser.add_mutually_exclusive_group()
|
||||||
|
|
||||||
@@ -69,67 +98,41 @@ def setup_parser(subparser):
|
|||||||
excl_args.add_argument("-r", "--repo", default=None, help="path to repo to edit package in")
|
excl_args.add_argument("-r", "--repo", default=None, help="path to repo to edit package in")
|
||||||
excl_args.add_argument("-N", "--namespace", default=None, help="namespace of package to edit")
|
excl_args.add_argument("-N", "--namespace", default=None, help="namespace of package to edit")
|
||||||
|
|
||||||
subparser.add_argument("package", nargs="*", default=None, help="package name")
|
subparser.add_argument("package", nargs="?", default=None, help="package name")
|
||||||
|
|
||||||
|
|
||||||
def locate_package(name: str, repo: spack.repo.Repo) -> str:
|
|
||||||
path = repo.filename_for_package_name(name)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(path, "r"):
|
|
||||||
return path
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno == errno.ENOENT:
|
|
||||||
raise spack.repo.UnknownPackageError(name) from e
|
|
||||||
tty.die(f"Cannot edit package: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
def locate_file(name: str, path: str) -> str:
|
|
||||||
# convert command names to python module name
|
|
||||||
if path == spack.paths.command_path:
|
|
||||||
name = spack.cmd.python_name(name)
|
|
||||||
|
|
||||||
file_path = os.path.join(path, name)
|
|
||||||
|
|
||||||
# Try to open direct match.
|
|
||||||
try:
|
|
||||||
with open(file_path, "r"):
|
|
||||||
return file_path
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno != errno.ENOENT:
|
|
||||||
tty.die(f"Cannot edit file: {e}")
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Otherwise try to find a file that starts with the name
|
|
||||||
candidates = glob.glob(file_path + "*")
|
|
||||||
exclude_list = [".pyc", "~"] # exclude binaries and backups
|
|
||||||
files = [f for f in candidates if not any(f.endswith(ext) for ext in exclude_list)]
|
|
||||||
if len(files) > 1:
|
|
||||||
tty.die(
|
|
||||||
f"Multiple files start with `{name}`:\n"
|
|
||||||
+ "\n".join(f" {os.path.basename(f)}" for f in files)
|
|
||||||
)
|
|
||||||
elif not files:
|
|
||||||
tty.die(f"No file for '{name}' was found in {path}")
|
|
||||||
return files[0]
|
|
||||||
|
|
||||||
|
|
||||||
def edit(parser, args):
|
def edit(parser, args):
|
||||||
names = args.package
|
name = args.package
|
||||||
|
|
||||||
|
# By default, edit package files
|
||||||
|
path = spack.paths.packages_path
|
||||||
|
|
||||||
# If `--command`, `--test`, or `--module` is chosen, edit those instead
|
# If `--command`, `--test`, or `--module` is chosen, edit those instead
|
||||||
if args.path:
|
if args.path:
|
||||||
paths = [locate_file(name, args.path) for name in names] if names else [args.path]
|
path = args.path
|
||||||
spack.util.editor.editor(*paths)
|
if name:
|
||||||
elif names:
|
# convert command names to python module name
|
||||||
if args.repo:
|
if path == spack.paths.command_path:
|
||||||
repo = spack.repo.from_path(args.repo)
|
name = spack.cmd.python_name(name)
|
||||||
elif args.namespace:
|
|
||||||
repo = spack.repo.PATH.get_repo(args.namespace)
|
path = os.path.join(path, name)
|
||||||
else:
|
if not os.path.exists(path):
|
||||||
repo = spack.repo.PATH
|
files = glob.glob(path + "*")
|
||||||
paths = [locate_package(name, repo) for name in names]
|
exclude_list = [".pyc", "~"] # exclude binaries and backups
|
||||||
spack.util.editor.editor(*paths)
|
files = list(filter(lambda x: all(s not in x for s in exclude_list), files))
|
||||||
|
if len(files) > 1:
|
||||||
|
m = "Multiple files exist with the name {0}.".format(name)
|
||||||
|
m += " Please specify a suffix. Files are:\n\n"
|
||||||
|
for f in files:
|
||||||
|
m += " " + os.path.basename(f) + "\n"
|
||||||
|
tty.die(m)
|
||||||
|
if not files:
|
||||||
|
tty.die("No file for '{0}' was found in {1}".format(name, path))
|
||||||
|
path = files[0] # already confirmed only one entry in files
|
||||||
|
|
||||||
|
editor(path)
|
||||||
|
elif name:
|
||||||
|
edit_package(name, args.repo, args.namespace)
|
||||||
else:
|
else:
|
||||||
# By default open the directory where packages live
|
# By default open the directory where packages live
|
||||||
spack.util.editor.editor(spack.paths.packages_path)
|
editor(path)
|
||||||
|
|||||||
@@ -10,13 +10,13 @@
|
|||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List, Optional
|
from typing import Optional
|
||||||
|
|
||||||
import llnl.string as string
|
import llnl.string as string
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
from llnl.util.tty.color import cescape, colorize
|
from llnl.util.tty.color import colorize
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common
|
import spack.cmd.common
|
||||||
@@ -61,7 +61,14 @@
|
|||||||
#
|
#
|
||||||
def env_create_setup_parser(subparser):
|
def env_create_setup_parser(subparser):
|
||||||
"""create a new environment"""
|
"""create a new environment"""
|
||||||
subparser.add_argument("env_name", metavar="env", help="name or directory of environment")
|
subparser.add_argument(
|
||||||
|
"env_name",
|
||||||
|
metavar="env",
|
||||||
|
help=(
|
||||||
|
"name of managed environment or directory of the anonymous env "
|
||||||
|
"(when using --dir/-d) to activate"
|
||||||
|
),
|
||||||
|
)
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
||||||
)
|
)
|
||||||
@@ -87,9 +94,6 @@ def env_create_setup_parser(subparser):
|
|||||||
default=None,
|
default=None,
|
||||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
|
||||||
"--include-concrete", action="append", help="name of old environment to copy specs from"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def env_create(args):
|
def env_create(args):
|
||||||
@@ -107,32 +111,19 @@ def env_create(args):
|
|||||||
# the environment should not include a view.
|
# the environment should not include a view.
|
||||||
with_view = None
|
with_view = None
|
||||||
|
|
||||||
include_concrete = None
|
|
||||||
if hasattr(args, "include_concrete"):
|
|
||||||
include_concrete = args.include_concrete
|
|
||||||
|
|
||||||
env = _env_create(
|
env = _env_create(
|
||||||
args.env_name,
|
args.env_name,
|
||||||
init_file=args.envfile,
|
init_file=args.envfile,
|
||||||
dir=args.dir or os.path.sep in args.env_name or args.env_name in (".", ".."),
|
dir=args.dir,
|
||||||
with_view=with_view,
|
with_view=with_view,
|
||||||
keep_relative=args.keep_relative,
|
keep_relative=args.keep_relative,
|
||||||
include_concrete=include_concrete,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Generate views, only really useful for environments created from spack.lock files.
|
# Generate views, only really useful for environments created from spack.lock files.
|
||||||
env.regenerate_views()
|
env.regenerate_views()
|
||||||
|
|
||||||
|
|
||||||
def _env_create(
|
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
||||||
name_or_path: str,
|
|
||||||
*,
|
|
||||||
init_file: Optional[str] = None,
|
|
||||||
dir: bool = False,
|
|
||||||
with_view: Optional[str] = None,
|
|
||||||
keep_relative: bool = False,
|
|
||||||
include_concrete: Optional[List[str]] = None,
|
|
||||||
):
|
|
||||||
"""Create a new environment, with an optional yaml description.
|
"""Create a new environment, with an optional yaml description.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
@@ -144,31 +135,22 @@ def _env_create(
|
|||||||
keep_relative (bool): if True, develop paths are copied verbatim into
|
keep_relative (bool): if True, develop paths are copied verbatim into
|
||||||
the new environment file, otherwise they may be made absolute if the
|
the new environment file, otherwise they may be made absolute if the
|
||||||
new environment is in a different location
|
new environment is in a different location
|
||||||
include_concrete (list): list of the included concrete environments
|
|
||||||
"""
|
"""
|
||||||
if not dir:
|
if not dir:
|
||||||
env = ev.create(
|
env = ev.create(
|
||||||
name_or_path,
|
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||||
init_file=init_file,
|
|
||||||
with_view=with_view,
|
|
||||||
keep_relative=keep_relative,
|
|
||||||
include_concrete=include_concrete,
|
|
||||||
)
|
)
|
||||||
tty.msg(
|
tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
|
||||||
colorize(
|
tty.msg("You can activate this environment with:")
|
||||||
f"Created environment @c{{{cescape(name_or_path)}}} in: @c{{{cescape(env.path)}}}"
|
tty.msg(" spack env activate %s" % (name_or_path))
|
||||||
)
|
return env
|
||||||
)
|
|
||||||
else:
|
env = ev.create_in_dir(
|
||||||
env = ev.create_in_dir(
|
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||||
name_or_path,
|
)
|
||||||
init_file=init_file,
|
tty.msg("Created environment in %s" % env.path)
|
||||||
with_view=with_view,
|
tty.msg("You can activate this environment with:")
|
||||||
keep_relative=keep_relative,
|
tty.msg(" spack env activate %s" % env.path)
|
||||||
include_concrete=include_concrete,
|
|
||||||
)
|
|
||||||
tty.msg(colorize(f"Created independent environment in: @c{{{cescape(env.path)}}}"))
|
|
||||||
tty.msg(f"Activate with: {colorize(f'@c{{spack env activate {cescape(name_or_path)}}}')}")
|
|
||||||
return env
|
return env
|
||||||
|
|
||||||
|
|
||||||
@@ -454,12 +436,6 @@ def env_remove_setup_parser(subparser):
|
|||||||
"""remove an existing environment"""
|
"""remove an existing environment"""
|
||||||
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
||||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||||
subparser.add_argument(
|
|
||||||
"-f",
|
|
||||||
"--force",
|
|
||||||
action="store_true",
|
|
||||||
help="remove the environment even if it is included in another environment",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def env_remove(args):
|
def env_remove(args):
|
||||||
@@ -469,35 +445,13 @@ def env_remove(args):
|
|||||||
and manifests embedded in repositories should be removed manually.
|
and manifests embedded in repositories should be removed manually.
|
||||||
"""
|
"""
|
||||||
read_envs = []
|
read_envs = []
|
||||||
valid_envs = []
|
|
||||||
bad_envs = []
|
bad_envs = []
|
||||||
invalid_envs = []
|
for env_name in args.rm_env:
|
||||||
|
|
||||||
for env_name in ev.all_environment_names():
|
|
||||||
try:
|
try:
|
||||||
env = ev.read(env_name)
|
env = ev.read(env_name)
|
||||||
valid_envs.append(env_name)
|
read_envs.append(env)
|
||||||
|
|
||||||
if env_name in args.rm_env:
|
|
||||||
read_envs.append(env)
|
|
||||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||||
invalid_envs.append(env_name)
|
bad_envs.append(env_name)
|
||||||
|
|
||||||
if env_name in args.rm_env:
|
|
||||||
bad_envs.append(env_name)
|
|
||||||
|
|
||||||
# Check if env is linked to another before trying to remove
|
|
||||||
for name in valid_envs:
|
|
||||||
# don't check if environment is included to itself
|
|
||||||
if name == env_name:
|
|
||||||
continue
|
|
||||||
environ = ev.Environment(ev.root(name))
|
|
||||||
if ev.root(env_name) in environ.included_concrete_envs:
|
|
||||||
msg = f'Environment "{env_name}" is being used by environment "{name}"'
|
|
||||||
if args.force:
|
|
||||||
tty.warn(msg)
|
|
||||||
else:
|
|
||||||
tty.die(msg)
|
|
||||||
|
|
||||||
if not args.yes_to_all:
|
if not args.yes_to_all:
|
||||||
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import List, Optional, Set
|
from typing import List, Optional
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.colify as colify
|
import llnl.util.tty.colify as colify
|
||||||
@@ -19,7 +19,6 @@
|
|||||||
import spack.detection
|
import spack.detection
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
|
|
||||||
@@ -139,26 +138,14 @@ def external_find(args):
|
|||||||
candidate_packages, path_hints=args.path, max_workers=args.jobs
|
candidate_packages, path_hints=args.path, max_workers=args.jobs
|
||||||
)
|
)
|
||||||
|
|
||||||
new_specs = spack.detection.update_configuration(
|
new_entries = spack.detection.update_configuration(
|
||||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||||
)
|
)
|
||||||
|
if new_entries:
|
||||||
# If the user runs `spack external find --not-buildable mpich` we also mark `mpi` non-buildable
|
|
||||||
# to avoid that the concretizer picks a different mpi provider.
|
|
||||||
if new_specs and args.not_buildable:
|
|
||||||
virtuals: Set[str] = {
|
|
||||||
virtual.name
|
|
||||||
for new_spec in new_specs
|
|
||||||
for virtual_specs in spack.repo.PATH.get_pkg_class(new_spec.name).provided.values()
|
|
||||||
for virtual in virtual_specs
|
|
||||||
}
|
|
||||||
new_virtuals = spack.detection.set_virtuals_nonbuildable(virtuals, scope=args.scope)
|
|
||||||
new_specs.extend(spack.spec.Spec(name) for name in new_virtuals)
|
|
||||||
|
|
||||||
if new_specs:
|
|
||||||
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||||
tty.msg(f"The following specs have been detected on this system and added to {path}")
|
msg = "The following specs have been detected on this system and added to {0}"
|
||||||
spack.cmd.display_specs(new_specs)
|
tty.msg(msg.format(path))
|
||||||
|
spack.cmd.display_specs(new_entries)
|
||||||
else:
|
else:
|
||||||
tty.msg("No new external packages detected")
|
tty.msg("No new external packages detected")
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import copy
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
@@ -46,10 +45,6 @@ def setup_parser(subparser):
|
|||||||
help="output specs as machine-readable json records",
|
help="output specs as machine-readable json records",
|
||||||
)
|
)
|
||||||
|
|
||||||
subparser.add_argument(
|
|
||||||
"-I", "--install-status", action="store_true", help="show install status of packages"
|
|
||||||
)
|
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
|
"-d", "--deps", action="store_true", help="output dependencies along with found specs"
|
||||||
)
|
)
|
||||||
@@ -276,45 +271,25 @@ def root_decorator(spec, string):
|
|||||||
|
|
||||||
print()
|
print()
|
||||||
|
|
||||||
if env.included_concrete_envs:
|
if args.show_concretized:
|
||||||
tty.msg("Included specs")
|
tty.msg("Concretized roots")
|
||||||
|
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)
|
||||||
# Root specs cannot be displayed with prefixes, since those are not
|
|
||||||
# set for abstract specs. Same for hashes
|
|
||||||
root_args = copy.copy(args)
|
|
||||||
root_args.paths = False
|
|
||||||
|
|
||||||
# Roots are displayed with variants, etc. so that we can see
|
|
||||||
# specifically what the user asked for.
|
|
||||||
cmd.display_specs(
|
|
||||||
env.included_user_specs,
|
|
||||||
root_args,
|
|
||||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
|
||||||
namespace=True,
|
|
||||||
show_flags=True,
|
|
||||||
show_full_compiler=True,
|
|
||||||
variants=True,
|
|
||||||
)
|
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
# Display a header for the installed packages section IF there are installed
|
||||||
|
# packages. If there aren't any, we'll just end up printing "0 installed packages"
|
||||||
|
# later.
|
||||||
|
if results and not args.only_roots:
|
||||||
|
tty.msg("Installed packages")
|
||||||
|
|
||||||
|
|
||||||
def find(parser, args):
|
def find(parser, args):
|
||||||
env = ev.active_environment()
|
q_args = query_arguments(args)
|
||||||
|
results = args.specs(**q_args)
|
||||||
|
|
||||||
|
env = ev.active_environment()
|
||||||
if not env and args.only_roots:
|
if not env and args.only_roots:
|
||||||
tty.die("-r / --only-roots requires an active environment")
|
tty.die("-r / --only-roots requires an active environment")
|
||||||
if not env and args.show_concretized:
|
|
||||||
tty.die("-c / --show-concretized requires an active environment")
|
|
||||||
|
|
||||||
if env:
|
|
||||||
if args.constraint:
|
|
||||||
init_specs = spack.cmd.parse_specs(args.constraint)
|
|
||||||
results = env.all_matching_specs(*init_specs)
|
|
||||||
else:
|
|
||||||
results = env.all_specs()
|
|
||||||
else:
|
|
||||||
q_args = query_arguments(args)
|
|
||||||
results = args.specs(**q_args)
|
|
||||||
|
|
||||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||||
|
|
||||||
@@ -335,11 +310,6 @@ def find(parser, args):
|
|||||||
if args.loaded:
|
if args.loaded:
|
||||||
results = spack.cmd.filter_loaded_specs(results)
|
results = spack.cmd.filter_loaded_specs(results)
|
||||||
|
|
||||||
if args.install_status or args.show_concretized:
|
|
||||||
status_fn = spack.spec.Spec.install_status
|
|
||||||
else:
|
|
||||||
status_fn = None
|
|
||||||
|
|
||||||
# Display the result
|
# Display the result
|
||||||
if args.json:
|
if args.json:
|
||||||
cmd.display_specs_as_json(results, deps=args.deps)
|
cmd.display_specs_as_json(results, deps=args.deps)
|
||||||
@@ -348,34 +318,12 @@ def find(parser, args):
|
|||||||
if env:
|
if env:
|
||||||
display_env(env, args, decorator, results)
|
display_env(env, args, decorator, results)
|
||||||
|
|
||||||
|
count_suffix = " (not shown)"
|
||||||
if not args.only_roots:
|
if not args.only_roots:
|
||||||
display_results = results
|
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||||
if not args.show_concretized:
|
count_suffix = ""
|
||||||
display_results = list(x for x in results if x.installed)
|
|
||||||
cmd.display_specs(
|
|
||||||
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
|
|
||||||
)
|
|
||||||
|
|
||||||
# print number of installed packages last (as the list may be long)
|
# print number of installed packages last (as the list may be long)
|
||||||
if sys.stdout.isatty() and args.groups:
|
if sys.stdout.isatty() and args.groups:
|
||||||
installed_suffix = ""
|
|
||||||
concretized_suffix = " to be installed"
|
|
||||||
|
|
||||||
if args.only_roots:
|
|
||||||
installed_suffix += " (not shown)"
|
|
||||||
concretized_suffix += " (not shown)"
|
|
||||||
else:
|
|
||||||
if env and not args.show_concretized:
|
|
||||||
concretized_suffix += " (show with `spack find -c`)"
|
|
||||||
|
|
||||||
pkg_type = "loaded" if args.loaded else "installed"
|
pkg_type = "loaded" if args.loaded else "installed"
|
||||||
spack.cmd.print_how_many_pkgs(
|
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)
|
||||||
list(x for x in results if x.installed), pkg_type, suffix=installed_suffix
|
|
||||||
)
|
|
||||||
|
|
||||||
if env:
|
|
||||||
spack.cmd.print_how_many_pkgs(
|
|
||||||
list(x for x in results if not x.installed),
|
|
||||||
"concretized",
|
|
||||||
suffix=concretized_suffix,
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -56,6 +56,7 @@ def roots_from_environments(args, active_env):
|
|||||||
|
|
||||||
# -e says "also preserve things needed by this particular env"
|
# -e says "also preserve things needed by this particular env"
|
||||||
for env_name_or_dir in args.except_environment:
|
for env_name_or_dir in args.except_environment:
|
||||||
|
print("HMM", env_name_or_dir)
|
||||||
if ev.exists(env_name_or_dir):
|
if ev.exists(env_name_or_dir):
|
||||||
env = ev.read(env_name_or_dir)
|
env = ev.read(env_name_or_dir)
|
||||||
elif ev.is_env_dir(env_name_or_dir):
|
elif ev.is_env_dir(env_name_or_dir):
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.store
|
import spack.store
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
from spack.graph import DAGWithDependencyTypes, SimpleDAG, graph_ascii, graph_dot, static_graph_dot
|
from spack.graph import DotGraph, MermaidGraph, DAGWithDependencyTypes, SimpleDAG, graph_ascii, graph_dot, static_graph_dot
|
||||||
|
|
||||||
description = "generate graphs of package dependency relationships"
|
description = "generate graphs of package dependency relationships"
|
||||||
section = "basic"
|
section = "basic"
|
||||||
@@ -33,6 +33,9 @@ def setup_parser(subparser):
|
|||||||
method.add_argument(
|
method.add_argument(
|
||||||
"-d", "--dot", action="store_true", help="generate graph in dot format and print to stdout"
|
"-d", "--dot", action="store_true", help="generate graph in dot format and print to stdout"
|
||||||
)
|
)
|
||||||
|
method.add_argument(
|
||||||
|
"-m", "--mermaid", action="store_true", help="generate graph in mermaid format and print to stdout"
|
||||||
|
)
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-s",
|
"-s",
|
||||||
@@ -85,10 +88,14 @@ def graph(parser, args):
|
|||||||
static_graph_dot(specs, depflag=args.deptype)
|
static_graph_dot(specs, depflag=args.deptype)
|
||||||
return
|
return
|
||||||
|
|
||||||
if args.dot:
|
if args.dot or args.mermaid:
|
||||||
builder = SimpleDAG()
|
if args.dot:
|
||||||
|
graph = DotGraph()
|
||||||
|
if args.mermaid:
|
||||||
|
graph = MermaidGraph()
|
||||||
|
builder = SimpleDAG(graph=graph)
|
||||||
if args.color:
|
if args.color:
|
||||||
builder = DAGWithDependencyTypes()
|
builder = DAGWithDependencyTypes(graph=graph)
|
||||||
graph_dot(specs, builder=builder, depflag=args.deptype)
|
graph_dot(specs, builder=builder, depflag=args.deptype)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -50,7 +50,7 @@
|
|||||||
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
|
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
|
||||||
|
|
||||||
architecture variants:
|
architecture variants:
|
||||||
@m{platform=platform} linux, darwin, freebsd, windows
|
@m{platform=platform} linux, darwin, cray, etc.
|
||||||
@m{os=operating_system} specific <operating_system>
|
@m{os=operating_system} specific <operating_system>
|
||||||
@m{target=target} specific <target> processor
|
@m{target=target} specific <target> processor
|
||||||
@m{arch=platform-os-target} shortcut for all three above
|
@m{arch=platform-os-target} shortcut for all three above
|
||||||
|
|||||||
@@ -10,7 +10,6 @@
|
|||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
from llnl.string import plural
|
|
||||||
from llnl.util import lang, tty
|
from llnl.util import lang, tty
|
||||||
|
|
||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
@@ -62,6 +61,7 @@ def install_kwargs_from_args(args):
|
|||||||
"dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc),
|
"dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc),
|
||||||
"dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc),
|
"dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc),
|
||||||
"include_build_deps": args.include_build_deps,
|
"include_build_deps": args.include_build_deps,
|
||||||
|
"explicit": True, # Use true as a default for install command
|
||||||
"stop_at": args.until,
|
"stop_at": args.until,
|
||||||
"unsigned": args.unsigned,
|
"unsigned": args.unsigned,
|
||||||
"install_deps": ("dependencies" in args.things_to_install),
|
"install_deps": ("dependencies" in args.things_to_install),
|
||||||
@@ -376,9 +376,7 @@ def _maybe_add_and_concretize(args, env, specs):
|
|||||||
# `spack concretize`
|
# `spack concretize`
|
||||||
tests = compute_tests_install_kwargs(env.user_specs, args.test)
|
tests = compute_tests_install_kwargs(env.user_specs, args.test)
|
||||||
concretized_specs = env.concretize(tests=tests)
|
concretized_specs = env.concretize(tests=tests)
|
||||||
if concretized_specs:
|
ev.display_specs(concretized_specs)
|
||||||
tty.msg(f"Concretized {plural(len(concretized_specs), 'spec')}")
|
|
||||||
ev.display_specs([concrete for _, concrete in concretized_specs])
|
|
||||||
|
|
||||||
# save view regeneration for later, so that we only do it
|
# save view regeneration for later, so that we only do it
|
||||||
# once, as it can be slow.
|
# once, as it can be slow.
|
||||||
@@ -475,7 +473,6 @@ def install_without_active_env(args, install_kwargs, reporter_factory):
|
|||||||
require_user_confirmation_for_overwrite(concrete_specs, args)
|
require_user_confirmation_for_overwrite(concrete_specs, args)
|
||||||
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
||||||
|
|
||||||
installs = [s.package for s in concrete_specs]
|
installs = [(s.package, install_kwargs) for s in concrete_specs]
|
||||||
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
|
builder = PackageInstaller(installs)
|
||||||
builder = PackageInstaller(installs, install_kwargs)
|
|
||||||
builder.install()
|
builder.install()
|
||||||
|
|||||||
@@ -91,7 +91,7 @@ def repo_add(args):
|
|||||||
tty.die("Not a Spack repository: %s" % path)
|
tty.die("Not a Spack repository: %s" % path)
|
||||||
|
|
||||||
# Make sure it's actually a spack repository by constructing it.
|
# Make sure it's actually a spack repository by constructing it.
|
||||||
repo = spack.repo.from_path(canon_path)
|
repo = spack.repo.Repo(canon_path)
|
||||||
|
|
||||||
# If that succeeds, finally add it to the configuration.
|
# If that succeeds, finally add it to the configuration.
|
||||||
repos = spack.config.get("repos", scope=args.scope)
|
repos = spack.config.get("repos", scope=args.scope)
|
||||||
@@ -124,7 +124,7 @@ def repo_remove(args):
|
|||||||
# If it is a namespace, remove corresponding repo
|
# If it is a namespace, remove corresponding repo
|
||||||
for path in repos:
|
for path in repos:
|
||||||
try:
|
try:
|
||||||
repo = spack.repo.from_path(path)
|
repo = spack.repo.Repo(path)
|
||||||
if repo.namespace == namespace_or_path:
|
if repo.namespace == namespace_or_path:
|
||||||
repos.remove(path)
|
repos.remove(path)
|
||||||
spack.config.set("repos", repos, args.scope)
|
spack.config.set("repos", repos, args.scope)
|
||||||
@@ -142,7 +142,7 @@ def repo_list(args):
|
|||||||
repos = []
|
repos = []
|
||||||
for r in roots:
|
for r in roots:
|
||||||
try:
|
try:
|
||||||
repos.append(spack.repo.from_path(r))
|
repos.append(spack.repo.Repo(r))
|
||||||
except spack.repo.RepoError:
|
except spack.repo.RepoError:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|||||||
@@ -114,16 +114,15 @@ def _process_result(result, show, required_format, kwargs):
|
|||||||
|
|
||||||
# dump the solutions as concretized specs
|
# dump the solutions as concretized specs
|
||||||
if "solutions" in show:
|
if "solutions" in show:
|
||||||
if required_format:
|
for spec in result.specs:
|
||||||
for spec in result.specs:
|
# With -y, just print YAML to output.
|
||||||
# With -y, just print YAML to output.
|
if required_format == "yaml":
|
||||||
if required_format == "yaml":
|
# use write because to_yaml already has a newline.
|
||||||
# use write because to_yaml already has a newline.
|
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
elif required_format == "json":
|
||||||
elif required_format == "json":
|
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
else:
|
||||||
else:
|
sys.stdout.write(spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||||
sys.stdout.write(spack.spec.tree(result.specs, color=sys.stdout.isatty(), **kwargs))
|
|
||||||
print()
|
print()
|
||||||
|
|
||||||
if result.unsolved_specs and "solutions" in show:
|
if result.unsolved_specs and "solutions" in show:
|
||||||
|
|||||||
@@ -105,19 +105,11 @@ def spec(parser, args):
|
|||||||
if env:
|
if env:
|
||||||
env.concretize()
|
env.concretize()
|
||||||
specs = env.concretized_specs()
|
specs = env.concretized_specs()
|
||||||
|
|
||||||
# environments are printed together in a combined tree() invocation,
|
|
||||||
# except when using --yaml or --json, which we print spec by spec below.
|
|
||||||
if not args.format:
|
|
||||||
tree_kwargs["key"] = spack.traverse.by_dag_hash
|
|
||||||
tree_kwargs["hashes"] = args.long or args.very_long
|
|
||||||
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
|
|
||||||
return
|
|
||||||
else:
|
else:
|
||||||
tty.die("spack spec requires at least one spec or an active environment")
|
tty.die("spack spec requires at least one spec or an active environment")
|
||||||
|
|
||||||
for input, output in specs:
|
for input, output in specs:
|
||||||
# With --yaml or --json, just print the raw specs to output
|
# With -y, just print YAML to output.
|
||||||
if args.format:
|
if args.format:
|
||||||
if args.format == "yaml":
|
if args.format == "yaml":
|
||||||
# use write because to_yaml already has a newline.
|
# use write because to_yaml already has a newline.
|
||||||
|
|||||||
@@ -23,7 +23,7 @@
|
|||||||
|
|
||||||
|
|
||||||
# tutorial configuration parameters
|
# tutorial configuration parameters
|
||||||
tutorial_branch = "releases/v0.22"
|
tutorial_branch = "releases/v0.21"
|
||||||
tutorial_mirror = "file:///mirror"
|
tutorial_mirror = "file:///mirror"
|
||||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||||
|
|
||||||
|
|||||||
@@ -151,8 +151,7 @@ def is_installed(spec):
|
|||||||
key=lambda s: s.dag_hash(),
|
key=lambda s: s.dag_hash(),
|
||||||
)
|
)
|
||||||
|
|
||||||
with spack.store.STORE.db.read_transaction():
|
return [spec for spec in specs if is_installed(spec)]
|
||||||
return [spec for spec in specs if is_installed(spec)]
|
|
||||||
|
|
||||||
|
|
||||||
def dependent_environments(
|
def dependent_environments(
|
||||||
@@ -240,8 +239,6 @@ def get_uninstall_list(args, specs: List[spack.spec.Spec], env: Optional[ev.Envi
|
|||||||
print()
|
print()
|
||||||
tty.info("The following environments still reference these specs:")
|
tty.info("The following environments still reference these specs:")
|
||||||
colify([e.name for e in other_dependent_envs.keys()], indent=4)
|
colify([e.name for e in other_dependent_envs.keys()], indent=4)
|
||||||
if env:
|
|
||||||
msgs.append("use `spack remove` to remove the spec from the current environment")
|
|
||||||
msgs.append("use `spack env remove` to remove environments")
|
msgs.append("use `spack env remove` to remove environments")
|
||||||
msgs.append("use `spack uninstall --force` to override")
|
msgs.append("use `spack uninstall --force` to override")
|
||||||
print()
|
print()
|
||||||
|
|||||||
@@ -71,7 +71,7 @@ def unload(parser, args):
|
|||||||
"Cannot specify specs on command line when unloading all specs with '--all'"
|
"Cannot specify specs on command line when unloading all specs with '--all'"
|
||||||
)
|
)
|
||||||
|
|
||||||
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(os.pathsep)
|
hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
|
||||||
if args.specs:
|
if args.specs:
|
||||||
specs = [
|
specs = [
|
||||||
spack.cmd.disambiguate_spec_from_hashes(spec, hashes)
|
spack.cmd.disambiguate_spec_from_hashes(spec, hashes)
|
||||||
|
|||||||
@@ -38,10 +38,10 @@
|
|||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.filesystem_view as fsv
|
|
||||||
import spack.schema.projections
|
import spack.schema.projections
|
||||||
import spack.store
|
import spack.store
|
||||||
from spack.config import validate
|
from spack.config import validate
|
||||||
|
from spack.filesystem_view import YamlFilesystemView, view_func_parser
|
||||||
from spack.util import spack_yaml as s_yaml
|
from spack.util import spack_yaml as s_yaml
|
||||||
|
|
||||||
description = "project packages to a compact naming scheme on the filesystem"
|
description = "project packages to a compact naming scheme on the filesystem"
|
||||||
@@ -193,13 +193,17 @@ def view(parser, args):
|
|||||||
ordered_projections = {}
|
ordered_projections = {}
|
||||||
|
|
||||||
# What method are we using for this view
|
# What method are we using for this view
|
||||||
link_type = args.action if args.action in actions_link else "symlink"
|
if args.action in actions_link:
|
||||||
view = fsv.YamlFilesystemView(
|
link_fn = view_func_parser(args.action)
|
||||||
|
else:
|
||||||
|
link_fn = view_func_parser("symlink")
|
||||||
|
|
||||||
|
view = YamlFilesystemView(
|
||||||
path,
|
path,
|
||||||
spack.store.STORE.layout,
|
spack.store.STORE.layout,
|
||||||
projections=ordered_projections,
|
projections=ordered_projections,
|
||||||
ignore_conflicts=getattr(args, "ignore_conflicts", False),
|
ignore_conflicts=getattr(args, "ignore_conflicts", False),
|
||||||
link_type=link_type,
|
link=link_fn,
|
||||||
verbose=args.verbose,
|
verbose=args.verbose,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -18,6 +18,7 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
|
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
|
||||||
|
|
||||||
|
import spack.compilers
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.schema.environment
|
import spack.schema.environment
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@@ -694,6 +695,10 @@ def compiler_environment(self):
|
|||||||
try:
|
try:
|
||||||
# load modules and set env variables
|
# load modules and set env variables
|
||||||
for module in self.modules:
|
for module in self.modules:
|
||||||
|
# On cray, mic-knl module cannot be loaded without cce module
|
||||||
|
# See: https://github.com/spack/spack/issues/3153
|
||||||
|
if os.environ.get("CRAY_CPU_TARGET") == "mic-knl":
|
||||||
|
spack.util.module_cmd.load_module("cce")
|
||||||
spack.util.module_cmd.load_module(module)
|
spack.util.module_cmd.load_module(module)
|
||||||
|
|
||||||
# apply other compiler environment changes
|
# apply other compiler environment changes
|
||||||
|
|||||||
@@ -164,66 +164,43 @@ def _compiler_config_from_package_config(config):
|
|||||||
|
|
||||||
|
|
||||||
def _compiler_config_from_external(config):
|
def _compiler_config_from_external(config):
|
||||||
extra_attributes_key = "extra_attributes"
|
|
||||||
compilers_key = "compilers"
|
|
||||||
c_key, cxx_key, fortran_key = "c", "cxx", "fortran"
|
|
||||||
|
|
||||||
# Allow `@x.y.z` instead of `@=x.y.z`
|
|
||||||
spec = spack.spec.parse_with_version_concrete(config["spec"])
|
spec = spack.spec.parse_with_version_concrete(config["spec"])
|
||||||
|
# use str(spec.versions) to allow `@x.y.z` instead of `@=x.y.z`
|
||||||
compiler_spec = spack.spec.CompilerSpec(
|
compiler_spec = spack.spec.CompilerSpec(
|
||||||
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
||||||
)
|
)
|
||||||
|
|
||||||
err_header = f"The external spec '{spec}' cannot be used as a compiler"
|
extra_attributes = config.get("extra_attributes", {})
|
||||||
|
prefix = config.get("prefix", None)
|
||||||
|
|
||||||
# If extra_attributes is not there I might not want to use this entry as a compiler,
|
compiler_class = class_for_compiler_name(compiler_spec.name)
|
||||||
# therefore just leave a debug message, but don't be loud with a warning.
|
paths = extra_attributes.get("paths", {})
|
||||||
if extra_attributes_key not in config:
|
compiler_langs = ["cc", "cxx", "fc", "f77"]
|
||||||
tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key")
|
for lang in compiler_langs:
|
||||||
|
if paths.setdefault(lang, None):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not prefix:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check for files that satisfy the naming scheme for this compiler
|
||||||
|
bindir = os.path.join(prefix, "bin")
|
||||||
|
for f, regex in itertools.product(os.listdir(bindir), compiler_class.search_regexps(lang)):
|
||||||
|
if regex.match(f):
|
||||||
|
paths[lang] = os.path.join(bindir, f)
|
||||||
|
|
||||||
|
if all(v is None for v in paths.values()):
|
||||||
return None
|
return None
|
||||||
extra_attributes = config[extra_attributes_key]
|
|
||||||
|
|
||||||
# If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler
|
|
||||||
if compilers_key not in extra_attributes:
|
|
||||||
warnings.warn(
|
|
||||||
f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'"
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
attribute_compilers = extra_attributes[compilers_key]
|
|
||||||
|
|
||||||
if c_key not in attribute_compilers:
|
|
||||||
warnings.warn(
|
|
||||||
f"{err_header}: missing the C compiler path under "
|
|
||||||
f"'{extra_attributes_key}:{compilers_key}'"
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
c_compiler = attribute_compilers[c_key]
|
|
||||||
|
|
||||||
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
|
|
||||||
if cxx_key not in attribute_compilers:
|
|
||||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
|
|
||||||
|
|
||||||
if fortran_key not in attribute_compilers:
|
|
||||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
|
|
||||||
|
|
||||||
# compilers format has cc/fc/f77, externals format has "c/fortran"
|
|
||||||
paths = {
|
|
||||||
"cc": c_compiler,
|
|
||||||
"cxx": attribute_compilers.get(cxx_key, None),
|
|
||||||
"fc": attribute_compilers.get(fortran_key, None),
|
|
||||||
"f77": attribute_compilers.get(fortran_key, None),
|
|
||||||
}
|
|
||||||
|
|
||||||
if not spec.architecture:
|
if not spec.architecture:
|
||||||
host_platform = spack.platforms.host()
|
host_platform = spack.platforms.host()
|
||||||
operating_system = host_platform.operating_system("default_os")
|
operating_system = host_platform.operating_system("default_os")
|
||||||
target = host_platform.target("default_target").microarchitecture
|
target = host_platform.target("default_target").microarchitecture
|
||||||
else:
|
else:
|
||||||
target = spec.architecture.target
|
target = spec.target
|
||||||
if not target:
|
if not target:
|
||||||
target = spack.platforms.host().target("default_target")
|
host_platform = spack.platforms.host()
|
||||||
target = target.microarchitecture
|
target = host_platform.target("default_target").microarchitecture
|
||||||
|
|
||||||
operating_system = spec.os
|
operating_system = spec.os
|
||||||
if not operating_system:
|
if not operating_system:
|
||||||
@@ -260,7 +237,7 @@ def _init_compiler_config(
|
|||||||
def compiler_config_files():
|
def compiler_config_files():
|
||||||
config_files = list()
|
config_files = list()
|
||||||
config = spack.config.CONFIG
|
config = spack.config.CONFIG
|
||||||
for scope in config.writable_scopes:
|
for scope in config.file_scopes:
|
||||||
name = scope.name
|
name = scope.name
|
||||||
compiler_config = config.get("compilers", scope=name)
|
compiler_config = config.get("compilers", scope=name)
|
||||||
if compiler_config:
|
if compiler_config:
|
||||||
@@ -488,7 +465,7 @@ def supported_compilers_for_host_platform() -> List[str]:
|
|||||||
return supported_compilers_for_platform(host_plat)
|
return supported_compilers_for_platform(host_plat)
|
||||||
|
|
||||||
|
|
||||||
def supported_compilers_for_platform(platform: "spack.platforms.Platform") -> List[str]:
|
def supported_compilers_for_platform(platform: spack.platforms.Platform) -> List[str]:
|
||||||
"""Return a set of compiler class objects supported by Spack
|
"""Return a set of compiler class objects supported by Spack
|
||||||
that are also supported by the provided platform
|
that are also supported by the provided platform
|
||||||
|
|
||||||
|
|||||||
@@ -96,8 +96,6 @@ def verbose_flag(self):
|
|||||||
|
|
||||||
openmp_flag = "-fopenmp"
|
openmp_flag = "-fopenmp"
|
||||||
|
|
||||||
# C++ flags based on CMake Modules/Compiler/Clang.cmake
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cxx11_flag(self):
|
def cxx11_flag(self):
|
||||||
if self.real_version < Version("3.3"):
|
if self.real_version < Version("3.3"):
|
||||||
@@ -122,24 +120,6 @@ def cxx17_flag(self):
|
|||||||
|
|
||||||
return "-std=c++17"
|
return "-std=c++17"
|
||||||
|
|
||||||
@property
|
|
||||||
def cxx20_flag(self):
|
|
||||||
if self.real_version < Version("5.0"):
|
|
||||||
raise UnsupportedCompilerFlag(self, "the C++20 standard", "cxx20_flag", "< 5.0")
|
|
||||||
elif self.real_version < Version("11.0"):
|
|
||||||
return "-std=c++2a"
|
|
||||||
else:
|
|
||||||
return "-std=c++20"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def cxx23_flag(self):
|
|
||||||
if self.real_version < Version("12.0"):
|
|
||||||
raise UnsupportedCompilerFlag(self, "the C++23 standard", "cxx23_flag", "< 12.0")
|
|
||||||
elif self.real_version < Version("17.0"):
|
|
||||||
return "-std=c++2b"
|
|
||||||
else:
|
|
||||||
return "-std=c++23"
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def c99_flag(self):
|
def c99_flag(self):
|
||||||
return "-std=c99"
|
return "-std=c99"
|
||||||
@@ -162,10 +142,7 @@ def c17_flag(self):
|
|||||||
def c23_flag(self):
|
def c23_flag(self):
|
||||||
if self.real_version < Version("9.0"):
|
if self.real_version < Version("9.0"):
|
||||||
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
|
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
|
||||||
elif self.real_version < Version("18.0"):
|
return "-std=c2x"
|
||||||
return "-std=c2x"
|
|
||||||
else:
|
|
||||||
return "-std=c23"
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cc_pic_flag(self):
|
def cc_pic_flag(self):
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user