Compare commits

..

1 Commits

Author SHA1 Message Date
Harmen Stoppels
7ffaa3e454 simpler spack bootstrap remove args... 2024-04-30 15:23:04 +02:00
511 changed files with 5793 additions and 11317 deletions

View File

@@ -17,51 +17,33 @@ concurrency:
jobs: jobs:
# Run audits on all the packages in the built-in repository # Run audits on all the packages in the built-in repository
package-audits: package-audits:
runs-on: ${{ matrix.system.os }} runs-on: ${{ matrix.operating_system }}
strategy: strategy:
matrix: matrix:
system: operating_system: ["ubuntu-latest", "macos-latest"]
- { os: windows-latest, shell: 'powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}' }
- { os: ubuntu-latest, shell: bash }
- { os: macos-latest, shell: bash }
defaults:
run:
shell: ${{ matrix.system.shell }}
steps: steps:
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with: with:
python-version: ${{inputs.python_version}} python-version: ${{inputs.python_version}}
- name: Install Python packages - name: Install Python packages
run: | run: |
pip install --upgrade pip setuptools pytest coverage[toml] pip install --upgrade pip setuptools pytest coverage[toml]
- name: Setup for Windows run
if: runner.os == 'Windows'
run: |
python -m pip install --upgrade pywin32
- name: Package audits (with coverage) - name: Package audits (with coverage)
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }} if: ${{ inputs.with_coverage == 'true' }}
run: | run: |
. share/spack/setup-env.sh . share/spack/setup-env.sh
coverage run $(which spack) audit packages coverage run $(which spack) audit packages
coverage run $(which spack) -d audit externals coverage run $(which spack) -d audit externals
coverage combine coverage combine
coverage xml coverage xml
- name: Package audits (without coverage) - name: Package audits (without coverage)
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }} if: ${{ inputs.with_coverage == 'false' }}
run: | run: |
. share/spack/setup-env.sh . share/spack/setup-env.sh
spack -d audit packages $(which spack) audit packages
spack -d audit externals $(which spack) audit externals
- name: Package audits (without coverage) - uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
if: ${{ runner.os == 'Windows' }}
run: |
. share/spack/setup-env.sh
spack -d audit packages
./share/spack/qa/validate_last_exit.ps1
spack -d audit externals
./share/spack/qa/validate_last_exit.ps1
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
if: ${{ inputs.with_coverage == 'true' }} if: ${{ inputs.with_coverage == 'true' }}
with: with:
flags: unittests,audits flags: unittests,audits

View File

@@ -1,8 +1,7 @@
#!/bin/bash #!/bin/bash
set -e set -ex
source share/spack/setup-env.sh source share/spack/setup-env.sh
$PYTHON bin/spack bootstrap disable github-actions-v0.4
$PYTHON bin/spack bootstrap disable spack-install $PYTHON bin/spack bootstrap disable spack-install
$PYTHON bin/spack $SPACK_FLAGS solve zlib $PYTHON bin/spack -d solve zlib
tree $BOOTSTRAP/store tree $BOOTSTRAP/store
exit 0 exit 0

View File

@@ -13,22 +13,118 @@ concurrency:
cancel-in-progress: true cancel-in-progress: true
jobs: jobs:
distros-clingo-sources: fedora-clingo-sources:
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: ${{ matrix.image }} container: "fedora:latest"
strategy:
matrix:
image: ["fedora:latest", "opensuse/leap:latest"]
steps: steps:
- name: Setup Fedora - name: Install dependencies
if: ${{ matrix.image == 'fedora:latest' }}
run: | run: |
dnf install -y \ dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch unzip which xz python3 python3-devel tree \ make patch unzip which xz python3 python3-devel tree \
cmake bison bison-devel libstdc++-static cmake bison bison-devel libstdc++-static
- name: Setup OpenSUSE - name: Checkout
if: ${{ matrix.image == 'opensuse/leap:latest' }} uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup non-root user
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
useradd spack-test && mkdir -p ~spack-test
chown -R spack-test . ~spack-test
- name: Setup repo
shell: runuser -u spack-test -- bash {0}
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack external find cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
ubuntu-clingo-sources:
runs-on: ubuntu-latest
container: "ubuntu:latest"
steps:
- name: Install dependencies
env:
DEBIAN_FRONTEND: noninteractive
run: |
apt-get update -y && apt-get upgrade -y
apt-get install -y \
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
make patch unzip xz-utils python3 python3-dev tree \
cmake bison
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup non-root user
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
useradd spack-test && mkdir -p ~spack-test
chown -R spack-test . ~spack-test
- name: Setup repo
shell: runuser -u spack-test -- bash {0}
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack external find cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
ubuntu-clingo-binaries-and-patchelf:
runs-on: ubuntu-latest
container: "ubuntu:latest"
steps:
- name: Install dependencies
env:
DEBIAN_FRONTEND: noninteractive
run: |
apt-get update -y && apt-get upgrade -y
apt-get install -y \
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup non-root user
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
useradd spack-test && mkdir -p ~spack-test
chown -R spack-test . ~spack-test
- name: Setup repo
shell: runuser -u spack-test -- bash {0}
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack -d solve zlib
tree ~/.spack/bootstrap/store/
opensuse-clingo-sources:
runs-on: ubuntu-latest
container: "opensuse/leap:latest"
steps:
- name: Install dependencies
run: | run: |
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505 # Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
zypper update -y || zypper update -y zypper update -y || zypper update -y
@@ -37,9 +133,15 @@ jobs:
make patch unzip which xz python3 python3-devel tree \ make patch unzip which xz python3 python3-devel tree \
cmake bison cmake bison
- name: Checkout - name: Checkout
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Setup repo
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap clingo - name: Bootstrap clingo
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
@@ -49,98 +151,77 @@ jobs:
spack -d solve zlib spack -d solve zlib
tree ~/.spack/bootstrap/store/ tree ~/.spack/bootstrap/store/
clingo-sources: macos-clingo-sources:
runs-on: ${{ matrix.runner }} runs-on: macos-latest
strategy:
matrix:
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
steps: steps:
- name: Setup macOS - name: Install dependencies
if: ${{ matrix.runner != 'ubuntu-latest' }}
run: | run: |
brew install cmake bison tree brew install cmake bison@2.7 tree
- name: Checkout - name: Checkout
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with: with:
python-version: "3.12" python-version: "3.12"
- name: Bootstrap clingo - name: Bootstrap clingo
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
spack bootstrap disable github-actions-v0.5 spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4 spack bootstrap disable github-actions-v0.4
spack external find --not-buildable cmake bison spack external find --not-buildable cmake bison
spack -d solve zlib spack -d solve zlib
tree ~/.spack/bootstrap/store/ tree ~/.spack/bootstrap/store/
gnupg-sources: macos-clingo-binaries:
runs-on: ${{ matrix.runner }} runs-on: ${{ matrix.macos-version }}
strategy: strategy:
matrix: matrix:
runner: [ 'macos-13', 'macos-14', "ubuntu-latest" ] macos-version: ['macos-11', 'macos-12']
steps: steps:
- name: Setup macOS - name: Install dependencies
if: ${{ matrix.runner != 'ubuntu-latest' }}
run: brew install tree gawk
- name: Remove system executables
run: | run: |
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do brew install tree
sudo rm $(command -v gpg gpg2 patchelf)
done
- name: Checkout - name: Checkout
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
spack solve zlib
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack -d gpg list
tree ~/.spack/bootstrap/store/
from-binaries:
runs-on: ${{ matrix.runner }}
strategy:
matrix:
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
steps:
- name: Setup macOS
if: ${{ matrix.runner != 'ubuntu-latest' }}
run: brew install tree
- name: Remove system executables
run: |
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
sudo rm $(command -v gpg gpg2 patchelf)
done
- name: Checkout
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: |
3.8
3.9
3.10
3.11
3.12
- name: Set bootstrap sources
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.4
spack bootstrap disable spack-install
- name: Bootstrap clingo - name: Bootstrap clingo
run: | run: |
set -e set -ex
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
not_found=1 not_found=1
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)" ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
echo "Testing $ver_dir"
if [[ -d "$ver_dir" ]] ; then
if $ver_dir/python --version ; then
export PYTHON="$ver_dir/python"
not_found=0
old_path="$PATH"
export PATH="$ver_dir:$PATH"
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
export PATH="$old_path"
fi
fi
# NOTE: test all pythons that exist, not all do on 12
done
ubuntu-clingo-binaries:
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup repo
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
run: |
set -ex
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
not_found=1
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
echo "Testing $ver_dir"
if [[ -d "$ver_dir" ]] ; then if [[ -d "$ver_dir" ]] ; then
echo "Testing $ver_dir"
if $ver_dir/python --version ; then if $ver_dir/python --version ; then
export PYTHON="$ver_dir/python" export PYTHON="$ver_dir/python"
not_found=0 not_found=0
@@ -155,9 +236,122 @@ jobs:
exit 1 exit 1
fi fi
done done
ubuntu-gnupg-binaries:
runs-on: ubuntu-latest
container: "ubuntu:latest"
steps:
- name: Install dependencies
env:
DEBIAN_FRONTEND: noninteractive
run: |
apt-get update -y && apt-get upgrade -y
apt-get install -y \
bzip2 curl file g++ gcc patchelf gfortran git gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup non-root user
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
useradd spack-test && mkdir -p ~spack-test
chown -R spack-test . ~spack-test
- name: Setup repo
shell: runuser -u spack-test -- bash {0}
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap GnuPG - name: Bootstrap GnuPG
shell: runuser -u spack-test -- bash {0}
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.4
spack bootstrap disable spack-install
spack -d gpg list spack -d gpg list
tree ~/.spack/bootstrap/store/ tree ~/.spack/bootstrap/store/
ubuntu-gnupg-sources:
runs-on: ubuntu-latest
container: "ubuntu:latest"
steps:
- name: Install dependencies
env:
DEBIAN_FRONTEND: noninteractive
run: |
apt-get update -y && apt-get upgrade -y
apt-get install -y \
bzip2 curl file g++ gcc patchelf gfortran git gzip \
make patch unzip xz-utils python3 python3-dev tree \
gawk
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup non-root user
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
useradd spack-test && mkdir -p ~spack-test
chown -R spack-test . ~spack-test
- name: Setup repo
shell: runuser -u spack-test -- bash {0}
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap GnuPG
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack solve zlib
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack -d gpg list
tree ~/.spack/bootstrap/store/
macos-gnupg-binaries:
runs-on: macos-latest
steps:
- name: Install dependencies
run: |
brew install tree
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.4
spack bootstrap disable spack-install
spack -d gpg list
tree ~/.spack/bootstrap/store/
macos-gnupg-sources:
runs-on: macos-latest
steps:
- name: Install dependencies
run: |
brew install gawk tree
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
spack solve zlib
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack -d gpg list
tree ~/.spack/bootstrap/store/
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
# See:
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
# - https://github.com/actions/checkout/issues/760
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog

View File

@@ -45,18 +45,19 @@ jobs:
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'], [leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'], [ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'], [ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'], [almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'], [almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'], [rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'], [rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38'],
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'], [fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']] [fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
name: Build ${{ matrix.dockerfile[0] }} name: Build ${{ matrix.dockerfile[0] }}
if: github.repository == 'spack/spack' if: github.repository == 'spack/spack'
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 - uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
id: docker_meta id: docker_meta
@@ -88,9 +89,9 @@ jobs:
fi fi
- name: Upload Dockerfile - name: Upload Dockerfile
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
with: with:
name: dockerfiles_${{ matrix.dockerfile[0] }} name: dockerfiles
path: dockerfiles path: dockerfiles
- name: Set up QEMU - name: Set up QEMU
@@ -121,14 +122,3 @@ jobs:
push: ${{ github.event_name != 'pull_request' }} push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.docker_meta.outputs.tags }} tags: ${{ steps.docker_meta.outputs.tags }}
labels: ${{ steps.docker_meta.outputs.labels }} labels: ${{ steps.docker_meta.outputs.labels }}
merge-dockerfiles:
runs-on: ubuntu-latest
needs: deploy-images
steps:
- name: Merge Artifacts
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
with:
name: dockerfiles
pattern: dockerfiles_*
delete-merged: true

View File

@@ -36,7 +36,7 @@ jobs:
core: ${{ steps.filter.outputs.core }} core: ${{ steps.filter.outputs.core }}
packages: ${{ steps.filter.outputs.packages }} packages: ${{ steps.filter.outputs.packages }}
steps: steps:
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
if: ${{ github.event_name == 'push' }} if: ${{ github.event_name == 'push' }}
with: with:
fetch-depth: 0 fetch-depth: 0
@@ -77,8 +77,13 @@ jobs:
needs: [ prechecks, changes ] needs: [ prechecks, changes ]
uses: ./.github/workflows/unit_tests.yaml uses: ./.github/workflows/unit_tests.yaml
secrets: inherit secrets: inherit
windows:
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
needs: [ prechecks ]
uses: ./.github/workflows/windows_python.yml
secrets: inherit
all: all:
needs: [ unit-tests, bootstrap ] needs: [ windows, unit-tests, bootstrap ]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Success - name: Success

View File

@@ -14,7 +14,7 @@ jobs:
build-paraview-deps: build-paraview-deps:
runs-on: windows-latest runs-on: windows-latest
steps: steps:
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d

View File

@@ -1,4 +1,4 @@
black==24.4.2 black==24.4.0
clingo==5.7.1 clingo==5.7.1
flake8==7.0.0 flake8==7.0.0
isort==5.13.2 isort==5.13.2

View File

@@ -14,14 +14,14 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
strategy: strategy:
matrix: matrix:
os: [ubuntu-22.04] os: [ubuntu-latest]
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
concretizer: ['clingo'] concretizer: ['clingo']
on_develop: on_develop:
- ${{ github.ref == 'refs/heads/develop' }} - ${{ github.ref == 'refs/heads/develop' }}
include: include:
- python-version: '3.11' - python-version: '3.11'
os: ubuntu-20.04 os: ubuntu-latest
concretizer: original concretizer: original
on_develop: ${{ github.ref == 'refs/heads/develop' }} on_develop: ${{ github.ref == 'refs/heads/develop' }}
- python-version: '3.6' - python-version: '3.6'
@@ -30,28 +30,28 @@ jobs:
on_develop: ${{ github.ref == 'refs/heads/develop' }} on_develop: ${{ github.ref == 'refs/heads/develop' }}
exclude: exclude:
- python-version: '3.7' - python-version: '3.7'
os: ubuntu-latest
concretizer: 'clingo' concretizer: 'clingo'
os: ubuntu-22.04
on_develop: false on_develop: false
- python-version: '3.8' - python-version: '3.8'
os: ubuntu-latest
concretizer: 'clingo' concretizer: 'clingo'
os: ubuntu-22.04
on_develop: false on_develop: false
- python-version: '3.9' - python-version: '3.9'
os: ubuntu-latest
concretizer: 'clingo' concretizer: 'clingo'
os: ubuntu-22.04
on_develop: false on_develop: false
- python-version: '3.10' - python-version: '3.10'
os: ubuntu-latest
concretizer: 'clingo' concretizer: 'clingo'
os: ubuntu-22.04
on_develop: false on_develop: false
- python-version: '3.11' - python-version: '3.11'
os: ubuntu-latest
concretizer: 'clingo' concretizer: 'clingo'
os: ubuntu-22.04
on_develop: false on_develop: false
steps: steps:
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
@@ -91,16 +91,16 @@ jobs:
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }} UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: | run: |
share/spack/qa/run-unit-tests share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be - uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with: with:
flags: unittests,linux,${{ matrix.concretizer }} flags: unittests,linux,${{ matrix.concretizer }}
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
verbose: true verbose: true
# Test shell integration # Test shell integration
shell: shell:
runs-on: ubuntu-22.04 runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
@@ -124,7 +124,7 @@ jobs:
COVERAGE: true COVERAGE: true
run: | run: |
share/spack/qa/run-shell-tests share/spack/qa/run-shell-tests
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be - uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with: with:
flags: shelltests,linux flags: shelltests,linux
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
@@ -141,7 +141,7 @@ jobs:
dnf install -y \ dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz make patch tcl unzip which xz
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- name: Setup repo and non-root user - name: Setup repo and non-root user
run: | run: |
git --version git --version
@@ -158,9 +158,9 @@ jobs:
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
# Test for the clingo based solver (using clingo-cffi) # Test for the clingo based solver (using clingo-cffi)
clingo-cffi: clingo-cffi:
runs-on: ubuntu-22.04 runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
@@ -185,7 +185,7 @@ jobs:
SPACK_TEST_SOLVER: clingo SPACK_TEST_SOLVER: clingo
run: | run: |
share/spack/qa/run-unit-tests share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be - uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with: with:
flags: unittests,linux,clingo flags: unittests,linux,clingo
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
@@ -195,10 +195,10 @@ jobs:
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
strategy: strategy:
matrix: matrix:
os: [macos-13, macos-14] os: [macos-latest, macos-14]
python-version: ["3.11"] python-version: ["3.11"]
steps: steps:
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
@@ -223,39 +223,8 @@ jobs:
$(which spack) solve zlib $(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x) common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}" $(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be - uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with: with:
flags: unittests,macos flags: unittests,macos
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
verbose: true verbose: true
# Run unit tests on Windows
windows:
defaults:
run:
shell:
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
runs-on: windows-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
- name: Create local develop
run: |
./.github/workflows/setup_git.ps1
- name: Unit Test
run: |
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true

View File

@@ -18,7 +18,7 @@ jobs:
validate: validate:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with: with:
python-version: '3.11' python-version: '3.11'
@@ -35,7 +35,7 @@ jobs:
style: style:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d - uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
@@ -70,7 +70,7 @@ jobs:
dnf install -y \ dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz make patch tcl unzip which xz
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- name: Setup repo and non-root user - name: Setup repo and non-root user
run: | run: |
git --version git --version

83
.github/workflows/windows_python.yml vendored Normal file
View File

@@ -0,0 +1,83 @@
name: windows
on:
workflow_call:
concurrency:
group: windows-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
cancel-in-progress: true
defaults:
run:
shell:
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
jobs:
unit-tests:
runs-on: windows-latest
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
- name: Create local develop
run: |
./.github/workflows/setup_git.ps1
- name: Unit Test
run: |
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
unit-tests-cmd:
runs-on: windows-latest
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip pywin32 setuptools coverage pytest-cov clingo
- name: Create local develop
run: |
./.github/workflows/setup_git.ps1
- name: Command Unit Test
run: |
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
build-abseil:
runs-on: windows-latest
steps:
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: 3.9
- name: Install Python packages
run: |
python -m pip install --upgrade pip pywin32 setuptools coverage
- name: Build Test
run: |
spack compiler find
spack -d external find cmake ninja
spack -d install abseil-cpp

View File

@@ -14,26 +14,3 @@ sphinx:
python: python:
install: install:
- requirements: lib/spack/docs/requirements.txt - requirements: lib/spack/docs/requirements.txt
search:
ranking:
spack.html: -10
spack.*.html: -10
llnl.html: -10
llnl.*.html: -10
_modules/*: -10
command_index.html: -9
basic_usage.html: 5
configuration.html: 5
config_yaml.html: 5
packages_yaml.html: 5
build_settings.html: 5
environments.html: 5
containers.html: 5
mirrors.html: 5
module_file_support.html: 5
repositories.html: 5
binary_caches.html: 5
chain.html: 5
pipelines.html: 5
packaging_guide.html: 5

View File

@@ -1,466 +1,3 @@
# v0.22.4 (2025-02-18)
## Bugfixes
- Continue to mark non-roots as implicitly installed on partial env installs (#47183)
# v0.22.3 (2024-11-18)
## Bugfixes
- Forward compatibility with Python 3.13 (#46775, #46983, #47035, #47175)
- `archspec` was updated to v0.2.5 (#46503, #46958)
- Fix path to Spack in `spack env depfile` makefile (#46966)
- Fix `glibc` detection in Chinese locales (#47434)
- Fix pickle round-trip of specs propagating variants (#47351)
- Fix a bug where concurrent spack install commands would not always update explicits correctly
(#47358)
- Fix a bug where autopush would run before all post install hooks modifying the install prefix
had run (#47329)
- Fix `spack find -u` (#47102)
- Fix a bug where sometimes the wrong Python interpreter was used for build dependencies such as
`py-setuptools` (#46980)
- Fix default config errors found by `spack audit externals` (#47308)
- Fix duplicate printing of external roots in installer (#44917)
- Fix modules schema in `compilers.yaml` (#47197)
- Reduce the size of generated YAML for Gitlab CI (#44995)
- Handle missing metadata file gracefully in bootstrap (#47278)
- Show underlying errors on fetch failure (#45714)
- Recognize `.` and `..` as paths instead of names in buildcache commands (#47105)
- Documentation and style (#46991, #47107, #47110, #47111, #47346, #47307, #47309, #47328, #47160,
#47402, #47557, #46709, #47080)
- Tests and CI fixes (#47165, #46711)
## Package updates
- ffmpeg: fix hash of patch (#45574)
# v0.22.2 (2024-09-21)
## Bugfixes
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
- Bump vendored `archspec` for better aarch64 support (#45721, #46445)
- Support macOS Sequoia (#45018, #45127)
- Fix regression in `{variants.X}` and `{variants.X.value}` format strings (#46206)
- Ensure shell escaping of environment variable values in load and activate commands (#42780)
- Fix an issue where `spec[pkg]` considers specs outside the current DAG (#45090)
- Do not halt concretization on unknown variants in externals (#45326)
- Improve validation of `develop` config section (#46485)
- Explicitly disable `ccache` if turned off in config, to avoid cache pollution (#45275)
- Improve backwards compatibility in `include_concrete` (#45766)
- Fix issue where package tags were sometimes repeated (#45160)
- Make `setup-env.sh` "sourced only" by dropping execution bits (#45641)
- Make certain source/binary fetch errors recoverable instead of a hard error (#45683)
- Remove debug statements in package hash computation (#45235)
- Remove redundant clingo warnings (#45269)
- Remove hard-coded layout version (#45645)
- Do not initialize previous store state in `use_store` (#45268)
- Docs improvements (#46475)
## Package updates
- `chapel` major update (#42197, #44931, #45304)
# v0.22.1 (2024-07-04)
## Bugfixes
- Fix reuse of externals on Linux (#44316)
- Ensure parent gcc-runtime version >= child (#44834, #44870)
- Ensure the latest gcc-runtime is rpath'ed when multiple exist among link deps (#44219)
- Improve version detection of glibc (#44154)
- Improve heuristics for solver (#44893, #44976, #45023)
- Make strong preferences override reuse (#44373)
- Reduce verbosity when C compiler is missing (#44182)
- Make missing ccache executable an error when required (#44740)
- Make every environment view containing `python` a `venv` (#44382)
- Fix external detection for compilers with os but no target (#44156)
- Fix version optimization for roots (#44272)
- Handle common implementations of pagination of tags in OCI build caches (#43136)
- Apply fetched patches to develop specs (#44950)
- Avoid Windows wrappers for filesystem utilities on non-Windows (#44126)
- Fix issue with long filenames in build caches on Windows (#43851)
- Fix formatting issue in `spack audit` (#45045)
- CI fixes (#44582, #43965, #43967, #44279, #44213)
## Package updates
- protobuf: fix 3.4:3.21 patch checksum (#44443)
- protobuf: update hash for patch needed when="@3.4:3.21" (#44210)
- git: bump v2.39 to 2.45; deprecate unsafe versions (#44248)
- gcc: use -rpath {rpath_dir} not -rpath={rpath dir} (#44315)
- Remove mesa18 and libosmesa (#44264)
- Enforce consistency of `gl` providers (#44307)
- Require libiconv for iconv (#44335, #45026).
Notice that glibc/musl also provide iconv, but are not guaranteed to be
complete. Set `packages:iconv:require:[glibc]` to restore the old behavior.
- py-matplotlib: qualify when to do a post install (#44191)
- rust: fix v1.78.0 instructions (#44127)
- suite-sparse: improve setting of the `libs` property (#44214)
- netlib-lapack: provide blas and lapack together (#44981)
# v0.22.0 (2024-05-12)
`v0.22.0` is a major feature release.
## Features in this release
1. **Compiler dependencies**
We are in the process of making compilers proper dependencies in Spack, and a number
of changes in `v0.22` support that effort. You may notice nodes in your dependency
graphs for compiler runtime libraries like `gcc-runtime` or `libgfortran`, and you
may notice that Spack graphs now include `libc`. We've also begun moving compiler
configuration from `compilers.yaml` to `packages.yaml` to make it consistent with
other externals. We are trying to do this with the least disruption possible, so
your existing `compilers.yaml` files should still work. We expect to be done with
this transition by the `v0.23` release in November.
* #41104: Packages compiled with `%gcc` on Linux, macOS and FreeBSD now depend on a
new package `gcc-runtime`, which contains a copy of the shared compiler runtime
libraries. This enables gcc runtime libraries to be installed and relocated when
using a build cache. When building minimal Spack-generated container images it is
no longer necessary to install libgfortran, libgomp etc. using the system package
manager.
* #42062: Packages compiled with `%oneapi` now depend on a new package
`intel-oneapi-runtime`. This is similar to `gcc-runtime`, and the runtimes can
provide virtuals and compilers can inject dependencies on virtuals into compiled
packages. This allows us to model library soname compatibility and allows
compilers like `%oneapi` to provide virtuals like `sycl` (which can also be
provided by standalone libraries). Note that until we have an agreement in place
with intel, Intel packages are marked `redistribute(source=False, binary=False)`
and must be downloaded outside of Spack.
* #43272: changes to the optimization criteria of the solver improve the hit-rate of
buildcaches by a fair amount. The solver more relaxed compatibility rules and will
not try to strictly match compilers or targets of reused specs. Users can still
enforce the previous strict behavior with `require:` sections in `packages.yaml`.
Note that to enforce correct linking, Spack will *not* reuse old `%gcc` and
`%oneapi` specs that do not have the runtime libraries as a dependency.
* #43539: Spack will reuse specs built with compilers that are *not* explicitly
configured in `compilers.yaml`. Because we can now keep runtime libraries in build
cache, we do not require you to also have a local configured compiler to *use* the
runtime libraries. This improves reuse in buildcaches and avoids conflicts with OS
updates that happen underneath Spack.
* #43190: binary compatibility on `linux` is now based on the `libc` version,
instead of on the `os` tag. Spack builds now detect the host `libc` (`glibc` or
`musl`) and add it as an implicit external node in the dependency graph. Binaries
with a `libc` with the same name and a version less than or equal to that of the
detected `libc` can be reused. This is only on `linux`, not `macos` or `Windows`.
* #43464: each package that can provide a compiler is now detectable using `spack
external find`. External packages defining compiler paths are effectively used as
compilers, and `spack external find -t compiler` can be used as a substitute for
`spack compiler find`. More details on this transition are in
[the docs](https://spack.readthedocs.io/en/latest/getting_started.html#manual-compiler-configuration)
2. **Improved `spack find` UI for Environments**
If you're working in an enviroment, you likely care about:
* What are the roots
* Which ones are installed / not installed
* What's been added that still needs to be concretized
We've tweaked `spack find` in environments to show this information much more
clearly. Installation status is shown next to each root, so you can see what is
installed. Roots are also shown in bold in the list of installed packages. There is
also a new option for `spack find -r` / `--only-roots` that will only show env
roots, if you don't want to look at all the installed specs.
More details in #42334.
3. **Improved command-line string quoting**
We are making some breaking changes to how Spack parses specs on the CLI in order to
respect shell quoting instead of trying to fight it. If you (sadly) had to write
something like this on the command line:
```
spack install zlib cflags=\"-O2 -g\"
```
That will now result in an error, but you can now write what you probably expected
to work in the first place:
```
spack install zlib cflags="-O2 -g"
```
Quoted can also now include special characters, so you can supply flags like:
```
spack intall zlib ldflags='-Wl,-rpath=$ORIGIN/_libs'
```
To reduce ambiguity in parsing, we now require that you *not* put spaces around `=`
and `==` when for flags or variants. This would not have broken before but will now
result in an error:
```
spack install zlib cflags = "-O2 -g"
```
More details and discussion in #30634.
4. **Revert default `spack install` behavior to `--reuse`**
We changed the default concretizer behavior from `--reuse` to `--reuse-deps` in
#30990 (in `v0.20`), which meant that *every* `spack install` invocation would
attempt to build a new version of the requested package / any environment roots.
While this is a common ask for *upgrading* and for *developer* workflows, we don't
think it should be the default for a package manager.
We are going to try to stick to this policy:
1. Prioritize reuse and build as little as possible by default.
2. Only upgrade or install duplicates if they are explicitly asked for, or if there
is a known security issue that necessitates an upgrade.
With the install command you now have three options:
* `--reuse` (default): reuse as many existing installations as possible.
* `--reuse-deps` / `--fresh-roots`: upgrade (freshen) roots but reuse dependencies if possible.
* `--fresh`: install fresh versions of requested packages (roots) and their dependencies.
We've also introduced `--fresh-roots` as an alias for `--reuse-deps` to make it more clear
that it may give you fresh versions. More details in #41302 and #43988.
5. **More control over reused specs**
You can now control which packages to reuse and how. There is a new
`concretizer:reuse` config option, which accepts the following properties:
- `roots`: `true` to reuse roots, `false` to reuse just dependencies
- `exclude`: list of constraints used to select which specs *not* to reuse
- `include`: list of constraints used to select which specs *to* reuse
- `from`: list of sources for reused specs (some combination of `local`,
`buildcache`, or `external`)
For example, to reuse only specs compiled with GCC, you could write:
```yaml
concretizer:
reuse:
roots: true
include:
- "%gcc"
```
Or, if `openmpi` must be used from externals, and it must be the only external used:
```yaml
concretizer:
reuse:
roots: true
from:
- type: local
exclude: ["openmpi"]
- type: buildcache
exclude: ["openmpi"]
- type: external
include: ["openmpi"]
```
6. **New `redistribute()` directive**
Some packages can't be redistributed in source or binary form. We need an explicit
way to say that in a package.
Now there is a `redistribute()` directive so that package authors can write:
```python
class MyPackage(Package):
redistribute(source=False, binary=False)
```
Like other directives, this works with `when=`:
```python
class MyPackage(Package):
# 12.0 and higher are proprietary
redistribute(source=False, binary=False, when="@12.0:")
# can't redistribute when we depend on some proprietary dependency
redistribute(source=False, binary=False, when="^proprietary-dependency")
```
More in #20185.
7. **New `conflict:` and `prefer:` syntax for package preferences**
Previously, you could express conflicts and preferences in `packages.yaml` through
some contortions with `require:`:
```yaml
packages:
zlib-ng:
require:
- one_of: ["%clang", "@:"] # conflict on %clang
- any_of: ["+shared", "@:"] # strong preference for +shared
```
You can now use `require:` and `prefer:` for a much more readable configuration:
```yaml
packages:
zlib-ng:
conflict:
- "%clang"
prefer:
- "+shared"
```
See [the documentation](https://spack.readthedocs.io/en/latest/packages_yaml.html#conflicts-and-strong-preferences)
and #41832 for more details.
8. **`include_concrete` in environments**
You may want to build on the *concrete* contents of another environment without
changing that environment. You can now include the concrete specs from another
environment's `spack.lock` with `include_concrete`:
```yaml
spack:
specs: []
concretizer:
unify: true
include_concrete:
- /path/to/environment1
- /path/to/environment2
```
Now, when *this* environment is concretized, it will bring in the already concrete
specs from `environment1` and `environment2`, and build on top of them without
changing them. This is useful if you have phased deployments, where old deployments
should not be modified but you want to use as many of them as possible. More details
in #33768.
9. **`python-venv` isolation**
Spack has unique requirements for Python because it:
1. installs every package in its own independent directory, and
2. allows users to register *external* python installations.
External installations may contain their own installed packages that can interfere
with Spack installations, and some distributions (Debian and Ubuntu) even change the
`sysconfig` in ways that alter the installation layout of installed Python packages
(e.g., with the addition of a `/local` prefix on Debian or Ubuntu). To isolate Spack
from these and other issues, we now insert a small `python-venv` package in between
`python` and packages that need to install Python code. This isolates Spack's build
environment, isolates Spack from any issues with an external python, and resolves a
large number of issues we've had with Python installations.
See #40773 for further details.
## New commands, options, and directives
* Allow packages to be pushed to build cache after install from source (#42423)
* `spack develop`: stage build artifacts in same root as non-dev builds #41373
* Don't delete `spack develop` build artifacts after install (#43424)
* `spack find`: add options for local/upstream only (#42999)
* `spack logs`: print log files for packages (either partially built or installed) (#42202)
* `patch`: support reversing patches (#43040)
* `develop`: Add -b/--build-directory option to set build_directory package attribute (#39606)
* `spack list`: add `--namesapce` / `--repo` option (#41948)
* directives: add `checked_by` field to `license()`, add some license checks
* `spack gc`: add options for environments and build dependencies (#41731)
* Add `--create` to `spack env activate` (#40896)
## Performance improvements
* environment.py: fix excessive re-reads (#43746)
* ruamel yaml: fix quadratic complexity bug (#43745)
* Refactor to improve `spec format` speed (#43712)
* Do not acquire a write lock on the env post install if no views (#43505)
* asp.py: fewer calls to `spec.copy()` (#43715)
* spec.py: early return in `__str__`
* avoid `jinja2` import at startup unless needed (#43237)
## Other new features of note
* `archspec`: update to `v0.2.4`: support for Windows, bugfixes for `neoverse-v1` and
`neoverse-v2` detection.
* `spack config get`/`blame`: with no args, show entire config
* `spack env create <env>`: dir if dir-like (#44024)
* ASP-based solver: update os compatibility for macOS (#43862)
* Add handling of custom ssl certs in urllib ops (#42953)
* Add ability to rename environments (#43296)
* Add config option and compiler support to reuse across OS's (#42693)
* Support for prereleases (#43140)
* Only reuse externals when configured (#41707)
* Environments: Add support for including views (#42250)
## Binary caches
* Build cache: make signed/unsigned a mirror property (#41507)
* tools stack
## Removals, deprecations, and syntax changes
* remove `dpcpp` compiler and package (#43418)
* spack load: remove --only argument (#42120)
## Notable Bugfixes
* repo.py: drop deleted packages from provider cache (#43779)
* Allow `+` in module file names (#41999)
* `cmd/python`: use runpy to allow multiprocessing in scripts (#41789)
* Show extension commands with spack -h (#41726)
* Support environment variable expansion inside module projections (#42917)
* Alert user to failed concretizations (#42655)
* shell: fix zsh color formatting for PS1 in environments (#39497)
* spack mirror create --all: include patches (#41579)
## Spack community stats
* 7,994 total packages; 525 since `v0.21.0`
* 178 new Python packages, 5 new R packages
* 358 people contributed to this release
* 344 committers to packages
* 45 committers to core
# v0.21.2 (2024-03-01)
## Bugfixes
- Containerize: accommodate nested or pre-existing spack-env paths (#41558)
- Fix setup-env script, when going back and forth between instances (#40924)
- Fix using fully-qualified namespaces from root specs (#41957)
- Fix a bug when a required provider is requested for multiple virtuals (#42088)
- OCI buildcaches:
- only push in parallel when forking (#42143)
- use pickleable errors (#42160)
- Fix using sticky variants in externals (#42253)
- Fix a rare issue with conditional requirements and multi-valued variants (#42566)
## Package updates
- rust: add v1.75, rework a few variants (#41161,#41903)
- py-transformers: add v4.35.2 (#41266)
- mgard: fix OpenMP on AppleClang (#42933)
# v0.21.1 (2024-01-11)
## New features
- Add support for reading buildcaches created by Spack v0.22 (#41773)
## Bugfixes
- spack graph: fix coloring with environments (#41240)
- spack info: sort variants in --variants-by-name (#41389)
- Spec.format: error on old style format strings (#41934)
- ASP-based solver:
- fix infinite recursion when computing concretization errors (#41061)
- don't error for type mismatch on preferences (#41138)
- don't emit spurious debug output (#41218)
- Improve the error message for deprecated preferences (#41075)
- Fix MSVC preview version breaking clingo build on Windows (#41185)
- Fix multi-word aliases (#41126)
- Add a warning for unconfigured compiler (#41213)
- environment: fix an issue with deconcretization/reconcretization of specs (#41294)
- buildcache: don't error if a patch is missing, when installing from binaries (#41986)
- Multiple improvements to unit-tests (#41215,#41369,#41495,#41359,#41361,#41345,#41342,#41308,#41226)
## Package updates
- root: add a webgui patch to address security issue (#41404)
- BerkeleyGW: update source urls (#38218)
# v0.21.0 (2023-11-11) # v0.21.0 (2023-11-11)
`v0.21.0` is a major feature release. `v0.21.0` is a major feature release.

View File

@@ -144,5 +144,3 @@ switch($SpackSubCommand)
"unload" {Invoke-SpackLoad} "unload" {Invoke-SpackLoad}
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs} default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
} }
exit $LASTEXITCODE

View File

@@ -42,8 +42,8 @@ concretizer:
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.) # "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG) # "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
strategy: minimal strategy: minimal
# Option to specify compatibility between operating systems for reuse of compilers and packages # Option to specify compatiblity between operating systems for reuse of compilers and packages
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's # Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's # it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]} # requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
os_compatible: {} os_compatible: {}

View File

@@ -1,3 +0,0 @@
packages:
iconv:
require: [libiconv]

View File

@@ -18,7 +18,6 @@ packages:
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc] compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
providers: providers:
awk: [gawk] awk: [gawk]
armci: [armcimpi]
blas: [openblas, amdblis] blas: [openblas, amdblis]
D: [ldc] D: [ldc]
daal: [intel-oneapi-daal] daal: [intel-oneapi-daal]
@@ -37,10 +36,11 @@ packages:
jpeg: [libjpeg-turbo, libjpeg] jpeg: [libjpeg-turbo, libjpeg]
lapack: [openblas, amdlibflame] lapack: [openblas, amdlibflame]
libc: [glibc, musl] libc: [glibc, musl]
libgfortran: [gcc-runtime] libgfortran: [ gcc-runtime ]
libglx: [mesa+glx] libglx: [mesa+glx, mesa18+glx]
libifcore: [intel-oneapi-runtime] libifcore: [ intel-oneapi-runtime ]
libllvm: [llvm] libllvm: [llvm]
libosmesa: [mesa+osmesa, mesa18+osmesa]
lua-lang: [lua, lua-luajit-openresty, lua-luajit] lua-lang: [lua, lua-luajit-openresty, lua-luajit]
luajit: [lua-luajit-openresty, lua-luajit] luajit: [lua-luajit-openresty, lua-luajit]
mariadb-client: [mariadb-c-client, mariadb] mariadb-client: [mariadb-c-client, mariadb]

View File

@@ -1,12 +0,0 @@
{% extends "!layout.html" %}
{%- block extrahead %}
<!-- Google tag (gtag.js) -->
<script async src="https://www.googletagmanager.com/gtag/js?id=G-S0PQ7WV75K"></script>
<script>
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', 'G-S0PQ7WV75K');
</script>
{% endblock %}

View File

@@ -865,7 +865,7 @@ There are several different ways to use Spack packages once you have
installed them. As you've seen, spack packages are installed into long installed them. As you've seen, spack packages are installed into long
paths with hashes, and you need a way to get them into your path. The paths with hashes, and you need a way to get them into your path. The
easiest way is to use :ref:`spack load <cmd-spack-load>`, which is easiest way is to use :ref:`spack load <cmd-spack-load>`, which is
described in this section. described in the next section.
Some more advanced ways to use Spack packages include: Some more advanced ways to use Spack packages include:
@@ -959,86 +959,7 @@ use ``spack find --loaded``.
You can also use ``spack load --list`` to get the same output, but it You can also use ``spack load --list`` to get the same output, but it
does not have the full set of query options that ``spack find`` offers. does not have the full set of query options that ``spack find`` offers.
We'll learn more about Spack's spec syntax in :ref:`a later section <sec-specs>`. We'll learn more about Spack's spec syntax in the next section.
.. _extensions:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Python packages and virtual environments
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Spack can install a large number of Python packages. Their names are
typically prefixed with ``py-``. Installing and using them is no
different from any other package:
.. code-block:: console
$ spack install py-numpy
$ spack load py-numpy
$ python3
>>> import numpy
The ``spack load`` command sets the ``PATH`` variable so that the right Python
executable is used, and makes sure that ``numpy`` and its dependencies can be
located in the ``PYTHONPATH``.
Spack is different from other Python package managers in that it installs
every package into its *own* prefix. This is in contrast to ``pip``, which
installs all packages into the same prefix, be it in a virtual environment
or not.
For many users, **virtual environments** are more convenient than repeated
``spack load`` commands, particularly when working with multiple Python
packages. Fortunately Spack supports environments itself, which together
with a view are no different from Python virtual environments.
The recommended way of working with Python extensions such as ``py-numpy``
is through :ref:`Environments <environments>`. The following example creates
a Spack environment with ``numpy`` in the current working directory. It also
puts a filesystem view in ``./view``, which is a more traditional combined
prefix for all packages in the environment.
.. code-block:: console
$ spack env create --with-view view --dir .
$ spack -e . add py-numpy
$ spack -e . concretize
$ spack -e . install
Now you can activate the environment and start using the packages:
.. code-block:: console
$ spack env activate .
$ python3
>>> import numpy
The environment view is also a virtual environment, which is useful if you are
sharing the environment with others who are unfamiliar with Spack. They can
either use the Python executable directly:
.. code-block:: console
$ ./view/bin/python3
>>> import numpy
or use the activation script:
.. code-block:: console
$ source ./view/bin/activate
$ python3
>>> import numpy
In general, there should not be much difference between ``spack env activate``
and using the virtual environment. The main advantage of ``spack env activate``
is that it knows about more packages than just Python packages, and it may set
additional runtime variables that are not covered by the virtual environment
activation script.
See :ref:`environments` for a more in-depth description of Spack
environments and customizations to views.
.. _sec-specs: .. _sec-specs:
@@ -1784,6 +1705,165 @@ check only local packages (as opposed to those used transparently from
``upstream`` spack instances) and the ``-j,--json`` option to output ``upstream`` spack instances) and the ``-j,--json`` option to output
machine-readable json data for any errors. machine-readable json data for any errors.
.. _extensions:
---------------------------
Extensions & Python support
---------------------------
Spack's installation model assumes that each package will live in its
own install prefix. However, certain packages are typically installed
*within* the directory hierarchy of other packages. For example,
`Python <https://www.python.org>`_ packages are typically installed in the
``$prefix/lib/python-2.7/site-packages`` directory.
In Spack, installation prefixes are immutable, so this type of installation
is not directly supported. However, it is possible to create views that
allow you to merge install prefixes of multiple packages into a single new prefix.
Views are a convenient way to get a more traditional filesystem structure.
Using *extensions*, you can ensure that Python packages always share the
same prefix in the view as Python itself. Suppose you have
Python installed like so:
.. code-block:: console
$ spack find python
==> 1 installed packages.
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
python@2.7.8
.. _cmd-spack-extensions:
^^^^^^^^^^^^^^^^^^^^
``spack extensions``
^^^^^^^^^^^^^^^^^^^^
You can find extensions for your Python installation like this:
.. code-block:: console
$ spack extensions python
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
py-biopython py-mako py-pmw py-rpy2 py-sympy
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
py-dateutil py-mpi4py py-pygments py-scikit-learn
py-epydoc py-mx py-pylint py-scipy
py-gnuplot py-nose py-pyparsing py-setuptools
py-h5py py-numpy py-pyqt py-shiboken
==> 12 installed:
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
The extensions are a subset of what's returned by ``spack list``, and
they are packages like any other. They are installed into their own
prefixes, and you can see this with ``spack find --paths``:
.. code-block:: console
$ spack find --paths py-numpy
==> 1 installed packages.
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-numpy@1.9.1 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/py-numpy@1.9.1-66733244
However, even though this package is installed, you cannot use it
directly when you run ``python``:
.. code-block:: console
$ spack load python
$ python
Python 2.7.8 (default, Feb 17 2015, 01:35:25)
[GCC 4.4.7 20120313 (Red Hat 4.4.7-11)] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import numpy
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ImportError: No module named numpy
>>>
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Using Extensions in Environments
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The recommended way of working with extensions such as ``py-numpy``
above is through :ref:`Environments <environments>`. For example,
the following creates an environment in the current working directory
with a filesystem view in the ``./view`` directory:
.. code-block:: console
$ spack env create --with-view view --dir .
$ spack -e . add py-numpy
$ spack -e . concretize
$ spack -e . install
We recommend environments for two reasons. Firstly, environments
can be activated (requires :ref:`shell-support`):
.. code-block:: console
$ spack env activate .
which sets all the right environment variables such as ``PATH`` and
``PYTHONPATH``. This ensures that
.. code-block:: console
$ python
>>> import numpy
works. Secondly, even without shell support, the view ensures
that Python can locate its extensions:
.. code-block:: console
$ ./view/bin/python
>>> import numpy
See :ref:`environments` for a more in-depth description of Spack
environments and customizations to views.
^^^^^^^^^^^^^^^^^^^^
Using ``spack load``
^^^^^^^^^^^^^^^^^^^^
A more traditional way of using Spack and extensions is ``spack load``
(requires :ref:`shell-support`). This will add the extension to ``PYTHONPATH``
in your current shell, and Python itself will be available in the ``PATH``:
.. code-block:: console
$ spack load py-numpy
$ python
>>> import numpy
The loaded packages can be checked using ``spack find --loaded``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Loading Extensions via Modules
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Apart from ``spack env activate`` and ``spack load``, you can load numpy
through your environment modules (using ``environment-modules`` or
``lmod``). This will also add the extension to the ``PYTHONPATH`` in
your current shell.
.. code-block:: console
$ module load <name of numpy module>
If you do not know the name of the specific numpy module you wish to
load, you can use the ``spack module tcl|lmod loads`` command to get
the name of the module from the Spack spec.
----------------------- -----------------------
Filesystem requirements Filesystem requirements
----------------------- -----------------------

View File

@@ -21,86 +21,23 @@ is the following:
Reuse already installed packages Reuse already installed packages
-------------------------------- --------------------------------
The ``reuse`` attribute controls how aggressively Spack reuses binary packages during concretization. The The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
attribute can either be a single value, or an object for more complex configurations. whether it will do a "fresh" installation and prefer the latest settings from
``package.py`` files and ``packages.yaml`` (``false``).
In the former case ("single value") it allows Spack to: You can use:
1. Reuse installed packages and buildcaches for all the specs to be concretized, when ``true``
2. Reuse installed packages and buildcaches only for the dependencies of the root specs, when ``dependencies``
3. Disregard reusing installed packages and buildcaches, when ``false``
In case a finer control over which specs are reused is needed, then the value of this attribute can be
an object, with the following keys:
1. ``roots``: if ``true`` root specs are reused, if ``false`` only dependencies of root specs are reused
2. ``from``: list of sources from which reused specs are taken
Each source in ``from`` is itself an object:
.. list-table:: Attributes for a source or reusable specs
:header-rows: 1
* - Attribute name
- Description
* - type (mandatory, string)
- Can be ``local``, ``buildcache``, or ``external``
* - include (optional, list of specs)
- If present, reusable specs must match at least one of the constraint in the list
* - exclude (optional, list of specs)
- If present, reusable specs must not match any of the constraint in the list.
For instance, the following configuration:
.. code-block:: yaml
concretizer:
reuse:
roots: true
from:
- type: local
include:
- "%gcc"
- "%clang"
tells the concretizer to reuse all specs compiled with either ``gcc`` or ``clang``, that are installed
in the local store. Any spec from remote buildcaches is disregarded.
To reduce the boilerplate in configuration files, default values for the ``include`` and
``exclude`` options can be pushed up one level:
.. code-block:: yaml
concretizer:
reuse:
roots: true
include:
- "%gcc"
from:
- type: local
- type: buildcache
- type: local
include:
- "foo %oneapi"
In the example above we reuse all specs compiled with ``gcc`` from the local store
and remote buildcaches, and we also reuse ``foo %oneapi``. Note that the last source of
specs override the default ``include`` attribute.
For one-off concretizations, the are command line arguments for each of the simple "single value"
configurations. This means a user can:
.. code-block:: console .. code-block:: console
% spack install --reuse <spec> % spack install --reuse <spec>
to enable reuse for a single installation, or: to enable reuse for a single installation, and you can use:
.. code-block:: console .. code-block:: console
spack install --fresh <spec> spack install --fresh <spec>
to do a fresh install if ``reuse`` is enabled by default. to do a fresh install if ``reuse`` is enabled by default.
``reuse: dependencies`` is the default.
.. seealso:: .. seealso::

View File

@@ -718,45 +718,23 @@ command-line tool, or C/C++/Fortran program with optional Python
modules? The former should be prepended with ``py-``, while the modules? The former should be prepended with ``py-``, while the
latter should not. latter should not.
"""""""""""""""""""""""""""""" """"""""""""""""""""""
``extends`` vs. ``depends_on`` extends vs. depends_on
"""""""""""""""""""""""""""""" """"""""""""""""""""""
This is very similar to the naming dilemma above, with a slight twist.
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`, As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures ``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
that the extension and extendee share the same prefix in views. that the extension and extendee share the same prefix in views.
This allows the user to import a Python module without This allows the user to import a Python module without
having to add that module to ``PYTHONPATH``. having to add that module to ``PYTHONPATH``.
Additionally, ``extends("python")`` adds a dependency on the package When deciding between ``extends`` and ``depends_on``, the best rule of
``python-venv``. This improves isolation from the system, whether thumb is to check the installation prefix. If Python libraries are
it's during the build or at runtime: user and system site packages installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
cannot accidentally be used by any package that ``extends("python")``. should use ``extends``. If Python libraries are installed elsewhere
or the only files that get installed reside in ``<prefix>/bin``, then
As a rule of thumb: if a package does not install any Python modules don't use ``extends``.
of its own, and merely puts a Python script in the ``bin`` directory,
then there is no need for ``extends``. If the package installs modules
in the ``site-packages`` directory, it requires ``extends``.
"""""""""""""""""""""""""""""""""""""
Executing ``python`` during the build
"""""""""""""""""""""""""""""""""""""
Whenever you need to execute a Python command or pass the path of the
Python interpreter to the build system, it is best to use the global
variable ``python`` directly. For example:
.. code-block:: python
@run_before("install")
def recythonize(self):
python("setup.py", "clean") # use the `python` global
As mentioned in the previous section, ``extends("python")`` adds an
automatic dependency on ``python-venv``, which is a virtual environment
that guarantees build isolation. The ``python`` global always refers to
the correct Python interpreter, whether the package uses ``extends("python")``
or ``depends_on("python")``.
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
Alternatives to Spack Alternatives to Spack

View File

@@ -5,9 +5,9 @@
.. chain: .. chain:
============================================= ============================
Chaining Spack Installations (upstreams.yaml) Chaining Spack Installations
============================================= ============================
You can point your Spack installation to another installation to use any You can point your Spack installation to another installation to use any
packages that are installed there. To register the other Spack instance, packages that are installed there. To register the other Spack instance,

View File

@@ -150,7 +150,7 @@ this can expose you to attacks. Use at your own risk.
-------------------- --------------------
Path to custom certificats for SSL verification. The value can be a Path to custom certificats for SSL verification. The value can be a
filesytem path, or an environment variable that expands to an absolute file path. filesytem path, or an environment variable that expands to a file path.
The default value is set to the environment variable ``SSL_CERT_FILE`` The default value is set to the environment variable ``SSL_CERT_FILE``
to use the same syntax used by many other applications that automatically to use the same syntax used by many other applications that automatically
detect custom certificates. detect custom certificates.
@@ -160,9 +160,6 @@ in the subprocess calling ``curl``.
If ``url_fetch_method:urllib`` then files and directories are supported i.e. If ``url_fetch_method:urllib`` then files and directories are supported i.e.
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR`` ``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
will work. will work.
In all cases the expanded path must be absolute for Spack to use the certificates.
Certificates relative to an environment can be created by prepending the path variable
with the Spack configuration variable``$env``.
-------------------- --------------------
``checksum`` ``checksum``

View File

@@ -194,15 +194,15 @@ The OS that are currently supported are summarized in the table below:
* - Operating System * - Operating System
- Base Image - Base Image
- Spack Image - Spack Image
* - Ubuntu 18.04
- ``ubuntu:18.04``
- ``spack/ubuntu-bionic``
* - Ubuntu 20.04 * - Ubuntu 20.04
- ``ubuntu:20.04`` - ``ubuntu:20.04``
- ``spack/ubuntu-focal`` - ``spack/ubuntu-focal``
* - Ubuntu 22.04 * - Ubuntu 22.04
- ``ubuntu:22.04`` - ``ubuntu:22.04``
- ``spack/ubuntu-jammy`` - ``spack/ubuntu-jammy``
* - Ubuntu 24.04
- ``ubuntu:24.04``
- ``spack/ubuntu-noble``
* - CentOS 7 * - CentOS 7
- ``centos:7`` - ``centos:7``
- ``spack/centos7`` - ``spack/centos7``
@@ -227,6 +227,12 @@ The OS that are currently supported are summarized in the table below:
* - Rocky Linux 9 * - Rocky Linux 9
- ``rockylinux:9`` - ``rockylinux:9``
- ``spack/rockylinux9`` - ``spack/rockylinux9``
* - Fedora Linux 37
- ``fedora:37``
- ``spack/fedora37``
* - Fedora Linux 38
- ``fedora:38``
- ``spack/fedora38``
* - Fedora Linux 39 * - Fedora Linux 39
- ``fedora:39`` - ``fedora:39``
- ``spack/fedora39`` - ``spack/fedora39``

View File

@@ -184,7 +184,7 @@ Style Tests
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and `PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
`mypy <https://mypy.readthedocs.io/en/stable/>`_ for type checking. PEP 8 is `mypy <https://mypy.readthedocs.io/en/stable/>` for type checking. PEP 8 is
a series of style guides for Python that provide suggestions for everything a series of style guides for Python that provide suggestions for everything
from variable naming to indentation. In order to limit the number of PRs that from variable naming to indentation. In order to limit the number of PRs that
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR were mostly style changes, we decided to enforce PEP 8 conformance. Your PR

View File

@@ -716,27 +716,27 @@ Release branches
^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^
There are currently two types of Spack releases: :ref:`major releases There are currently two types of Spack releases: :ref:`major releases
<major-releases>` (``0.21.0``, ``0.22.0``, etc.) and :ref:`patch releases <major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
<patch-releases>` (``0.22.1``, ``0.22.2``, ``0.22.3``, etc.). Here is a <point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
diagram of how Spack release branches work:: diagram of how Spack release branches work::
o branch: develop (latest version, v0.23.0.dev0) o branch: develop (latest version, v0.19.0.dev0)
| |
o o
| o branch: releases/v0.22, tag: v0.22.1 | o branch: releases/v0.18, tag: v0.18.1
o | o |
| o tag: v0.22.0 | o tag: v0.18.0
o | o |
| o | o
|/ |/
o o
| |
o o
| o branch: releases/v0.21, tag: v0.21.2 | o branch: releases/v0.17, tag: v0.17.2
o | o |
| o tag: v0.21.1 | o tag: v0.17.1
o | o |
| o tag: v0.21.0 | o tag: v0.17.0
o | o |
| o | o
|/ |/
@@ -747,8 +747,8 @@ requests target ``develop``. The ``develop`` branch will report that its
version is that of the next **major** release with a ``.dev0`` suffix. version is that of the next **major** release with a ``.dev0`` suffix.
Each Spack release series also has a corresponding branch, e.g. Each Spack release series also has a corresponding branch, e.g.
``releases/v0.22`` has ``v0.22.x`` versions of Spack, and ``releases/v0.18`` has ``0.18.x`` versions of Spack, and
``releases/v0.21`` has ``v0.21.x`` versions. A major release is the first ``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
tagged version on a release branch. Minor releases are back-ported from tagged version on a release branch. Minor releases are back-ported from
develop onto release branches. This is typically done by cherry-picking develop onto release branches. This is typically done by cherry-picking
bugfix commits off of ``develop``. bugfix commits off of ``develop``.
@@ -778,40 +778,27 @@ for more details.
Scheduling work for releases Scheduling work for releases
^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
We schedule work for **major releases** through `milestones We schedule work for releases by creating `GitHub projects
<https://github.com/spack/spack/milestones>`_ and `GitHub Projects <https://github.com/spack/spack/projects>`_. At any time, there may be
<https://github.com/spack/spack/projects>`_, while **patch releases** use `labels several open release projects. For example, below are two releases (from
<https://github.com/spack/spack/labels>`_. some past version of the page linked above):
There is only one milestone open at a time. Its name corresponds to the next major version, for .. image:: images/projects.png
example ``v0.23``. Important issues and pull requests should be assigned to this milestone by
core developers, so that they are not forgotten at the time of release. The milestone is closed
when the release is made, and a new milestone is created for the next major release.
Bug reports in GitHub issues are automatically labelled ``bug`` and ``triage``. Spack developers This image shows one release in progress for ``0.15.1`` and another for
assign one of the labels ``impact-low``, ``impact-medium`` or ``impact-high``. This will make the ``0.16.0``. Each of these releases has a project board containing issues
issue appear in the `Triaged bugs <https://github.com/orgs/spack/projects/6>`_ project board. and pull requests. GitHub shows a status bar with completed work in
Important issues should be assigned to the next milestone as well, so they appear at the top of green, work in progress in purple, and work not started yet in gray, so
the project board. it's fairly easy to see progress.
Spack's milestones are not firm commitments so we move work between releases frequently. If we Spack's project boards are not firm commitments so we move work between
need to make a release and some tasks are not yet done, we will simply move them to the next major releases frequently. If we need to make a release and some tasks are not
release milestone, rather than delaying the release to complete them. yet done, we will simply move them to the next minor or major release, rather
than delaying the release to complete them.
^^^^^^^^^^^^^^^^^^^^^ For more on using GitHub project boards, see `GitHub's documentation
Backporting bug fixes <https://docs.github.com/en/github/managing-your-work-on-github/about-project-boards>`_.
^^^^^^^^^^^^^^^^^^^^^
When a bug is fixed in the ``develop`` branch, it is often necessary to backport the fix to one
(or more) of the ``release/vX.Y`` branches. Only the release manager is responsible for doing
backports, but Spack maintainers are responsible for labelling pull requests (and issues if no bug
fix is available yet) with ``vX.Y.Z`` labels. The label should correspond to the next patch version
that the bug fix should be backported to.
Backports are done publicly by the release manager using a pull request named ``Backports vX.Y.Z``.
This pull request is opened from the ``backports/vX.Y.Z`` branch, targets the ``releases/vX.Y``
branch and contains a (growing) list of cherry-picked commits from the ``develop`` branch.
Typically there are one or two backport pull requests open at any given time.
.. _major-releases: .. _major-releases:
@@ -819,21 +806,25 @@ Typically there are one or two backport pull requests open at any given time.
Making major releases Making major releases
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
Assuming all required work from the milestone is completed, the steps to make the major release Assuming a project board has already been created and all required work
are: completed, the steps to make the major release are:
#. `Create a new milestone <https://github.com/spack/spack/milestones>`_ for the next major #. Create two new project boards:
release.
#. `Create a new label <https://github.com/spack/spack/labels>`_ for the next patch release. * One for the next major release
* One for the next point release
#. Move any optional tasks that are not done to the next milestone. #. Move any optional tasks that are not done to one of the new project boards.
In general, small bugfixes should go to the next point release. Major
features, refactors, and changes that could affect concretization should
go in the next major release.
#. Create a branch for the release, based on ``develop``: #. Create a branch for the release, based on ``develop``:
.. code-block:: console .. code-block:: console
$ git checkout -b releases/v0.23 develop $ git checkout -b releases/v0.15 develop
For a version ``vX.Y.Z``, the branch's name should be For a version ``vX.Y.Z``, the branch's name should be
``releases/vX.Y``. That is, you should create a ``releases/vX.Y`` ``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
@@ -869,8 +860,8 @@ are:
Create a pull request targeting the ``develop`` branch, bumping the major Create a pull request targeting the ``develop`` branch, bumping the major
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment. version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
For instance when you have just released ``v0.23.0``, set the version For instance when you have just released ``v0.15.0``, set the version
to ``(0, 24, 0, 'dev0')`` on ``develop``. to ``(0, 16, 0, 'dev0')`` on ``develop``.
#. Follow the steps in :ref:`publishing-releases`. #. Follow the steps in :ref:`publishing-releases`.
@@ -879,52 +870,82 @@ are:
#. Follow the steps in :ref:`announcing-releases`. #. Follow the steps in :ref:`announcing-releases`.
.. _patch-releases: .. _point-releases:
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
Making patch releases Making point releases
^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
To make the patch release process both efficient and transparent, we use a *backports pull request* Assuming a project board has already been created and all required work
which contains cherry-picked commits from the ``develop`` branch. The majority of the work is to completed, the steps to make the point release are:
cherry-pick the bug fixes, which ideally should be done as soon as they land on ``develop``:
this ensures cherry-picking happens in order, and makes conflicts easier to resolve since the
changes are fresh in the mind of the developer.
The backports pull request is always titled ``Backports vX.Y.Z`` and is labelled ``backports``. It #. Create a new project board for the next point release.
is opened from a branch named ``backports/vX.Y.Z`` and targets the ``releases/vX.Y`` branch.
Whenever a pull request labelled ``vX.Y.Z`` is merged, cherry-pick the associated squashed commit #. Move any optional tasks that are not done to the next project board.
on ``develop`` to the ``backports/vX.Y.Z`` branch. For pull requests that were rebased (or not
squashed), cherry-pick each associated commit individually. Never force push to the
``backports/vX.Y.Z`` branch.
.. warning:: #. Check out the release branch (it should already exist).
Sometimes you may **still** get merge conflicts even if you have For the ``X.Y.Z`` release, the release branch is called ``releases/vX.Y``.
cherry-picked all the commits in order. This generally means there For ``v0.15.1``, you would check out ``releases/v0.15``:
is some other intervening pull request that the one you're trying
to pick depends on. In these cases, you'll need to make a judgment
call regarding those pull requests. Consider the number of affected
files and/or the resulting differences.
1. If the changes are small, you might just cherry-pick it. .. code-block:: console
2. If the changes are large, then you may decide that this fix is not $ git checkout releases/v0.15
worth including in a patch release, in which case you should remove
the label from the pull request. Remember that large, manual backports
are seldom the right choice for a patch release.
When all commits are cherry-picked in the ``backports/vX.Y.Z`` branch, make the patch #. If a pull request to the release branch named ``Backports vX.Y.Z`` is not already
release as follows: in the project, create it. This pull request ought to be created as early as
possible when working on a release project, so that we can build the release
commits incrementally, and identify potential conflicts at an early stage.
#. `Create a new label <https://github.com/spack/spack/labels>`_ ``vX.Y.{Z+1}`` for the next patch #. Cherry-pick each pull request in the ``Done`` column of the release
release. project board onto the ``Backports vX.Y.Z`` pull request.
#. Replace the label ``vX.Y.Z`` with ``vX.Y.{Z+1}`` for all PRs and issues that are not done. This is **usually** fairly simple since we squash the commits from the
vast majority of pull requests. That means there is only one commit
per pull request to cherry-pick. For example, `this pull request
<https://github.com/spack/spack/pull/15777>`_ has three commits, but
they were squashed into a single commit on merge. You can see the
commit that was created here:
#. Manually push a single commit with commit message ``Set version to vX.Y.Z`` to the .. image:: images/pr-commit.png
``backports/vX.Y.Z`` branch, that both bumps the Spack version number and updates the changelog:
You can easily cherry pick it like this (assuming you already have the
release branch checked out):
.. code-block:: console
$ git cherry-pick 7e46da7
For pull requests that were rebased (or not squashed), you'll need to
cherry-pick each associated commit individually.
.. warning::
It is important to cherry-pick commits in the order they happened,
otherwise you can get conflicts while cherry-picking. When
cherry-picking look at the merge date,
**not** the number of the pull request or the date it was opened.
Sometimes you may **still** get merge conflicts even if you have
cherry-picked all the commits in order. This generally means there
is some other intervening pull request that the one you're trying
to pick depends on. In these cases, you'll need to make a judgment
call regarding those pull requests. Consider the number of affected
files and or the resulting differences.
1. If the dependency changes are small, you might just cherry-pick it,
too. If you do this, add the task to the release board.
2. If the changes are large, then you may decide that this fix is not
worth including in a point release, in which case you should remove
the task from the release project.
3. You can always decide to manually back-port the fix to the release
branch if neither of the above options makes sense, but this can
require a lot of work. It's seldom the right choice.
#. When all the commits from the project board are cherry-picked into
the ``Backports vX.Y.Z`` pull request, you can push a commit to:
1. Bump the version in ``lib/spack/spack/__init__.py``. 1. Bump the version in ``lib/spack/spack/__init__.py``.
2. Update ``CHANGELOG.md`` with a list of the changes. 2. Update ``CHANGELOG.md`` with a list of the changes.
@@ -933,22 +954,20 @@ release as follows:
release branch. See `the changelog from 0.14.1 release branch. See `the changelog from 0.14.1
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_. <https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
#. Make sure CI passes on the **backports pull request**, including: #. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
is needed to keep track in the release branch of all the commits that were
cherry-picked.
#. Make sure CI passes on the release branch, including:
* Regular unit tests * Regular unit tests
* Build tests * Build tests
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_ * The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This If CI does not pass, you'll need to figure out why, and make changes
is needed to keep track in the release branch of all the commits that were to the release branch until it does. You can make more commits, modify
cherry-picked. or remove cherry-picked commits, or cherry-pick **more** from
``develop`` to make this happen.
#. Make sure CI passes on the last commit of the **release branch**.
#. In the rare case you need to include additional commits in the patch release after the backports
PR is merged, it is best to delete the last commit ``Set version to vX.Y.Z`` from the release
branch with a single force push, open a new backports PR named ``Backports vX.Y.Z (2)``, and
repeat the process. Avoid repeated force pushes to the release branch.
#. Follow the steps in :ref:`publishing-releases`. #. Follow the steps in :ref:`publishing-releases`.
@@ -1023,31 +1042,25 @@ Updating `releases/latest`
If the new release is the **highest** Spack release yet, you should If the new release is the **highest** Spack release yet, you should
also tag it as ``releases/latest``. For example, suppose the highest also tag it as ``releases/latest``. For example, suppose the highest
release is currently ``0.22.3``: release is currently ``0.15.3``:
* If you are releasing ``0.22.4`` or ``0.23.0``, then you should tag * If you are releasing ``0.15.4`` or ``0.16.0``, then you should tag
it with ``releases/latest``, as these are higher than ``0.22.3``. it with ``releases/latest``, as these are higher than ``0.15.3``.
* If you are making a new release of an **older** major version of * If you are making a new release of an **older** major version of
Spack, e.g. ``0.21.4``, then you should not tag it as Spack, e.g. ``0.14.4``, then you should not tag it as
``releases/latest`` (as there are newer major versions). ``releases/latest`` (as there are newer major versions).
To do so, first fetch the latest tag created on GitHub, since you may not have it locally: To tag ``releases/latest``, do this:
.. code-block:: console .. code-block:: console
$ git fetch --force git@github.com:spack/spack vX.Y.Z $ git checkout releases/vX.Y # vX.Y is the new release's branch
$ git tag --force releases/latest
$ git push --force --tags
Then tag ``vX.Y.Z`` as ``releases/latest`` and push the individual tag to GitHub. The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
``releases/latest`` tag with the new one.
.. code-block:: console
$ git tag --force releases/latest vX.Y.Z
$ git push --force git@github.com:spack/spack releases/latest
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing ``releases/latest``
tag with the new one. Do **not** use the ``--tags`` flag when pushing, since this will push *all*
local tags.
.. _announcing-releases: .. _announcing-releases:

View File

@@ -142,8 +142,12 @@ user's prompt to begin with the environment name in brackets.
$ spack env activate -p myenv $ spack env activate -p myenv
[myenv] $ ... [myenv] $ ...
The ``activate`` command can also be used to create a new environment if it does not already The ``activate`` command can also be used to create a new environment, if it is
exist. not already defined, by adding the ``--create`` flag. Managed and anonymous
environments, anonymous environments are explained in the next section,
can both be created using the same flags that `spack env create` accepts.
If an environment already exists then spack will simply activate it and ignore the
create specific flags.
.. code-block:: console .. code-block:: console
@@ -172,36 +176,21 @@ environment will remove the view from the user environment.
Anonymous Environments Anonymous Environments
^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^
Apart from managed environments, Spack also supports anonymous environments. Any directory can be treated as an environment if it contains a file
``spack.yaml``. To load an anonymous environment, use:
Anonymous environments can be placed in any directory of choice.
.. note::
When uninstalling packages, Spack asks the user to confirm the removal of packages
that are still used in a managed environment. This is not the case for anonymous
environments.
To create an anonymous environment, use one of the following commands:
.. code-block:: console .. code-block:: console
$ spack env create --dir my_env $ spack env activate -d /path/to/directory
$ spack env create ./my_env
As a shorthand, you can also create an anonymous environment upon activation if it does not Anonymous specs can be created in place using the command:
already exist:
.. code-block:: console .. code-block:: console
$ spack env activate --create ./my_env $ spack env create -d .
For convenience, Spack can also place an anonymous environment in a temporary directory for you:
.. code-block:: console
$ spack env activate --temp
In this case Spack simply creates a ``spack.yaml`` file in the requested
directory.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Environment Sensitive Commands Environment Sensitive Commands
@@ -460,125 +449,6 @@ Sourcing that file in Bash will make the environment available to the
user; and can be included in ``.bashrc`` files, etc. The ``loads`` user; and can be included in ``.bashrc`` files, etc. The ``loads``
file may also be copied out of the environment, renamed, etc. file may also be copied out of the environment, renamed, etc.
.. _environment_include_concrete:
------------------------------
Included Concrete Environments
------------------------------
Spack environments can create an environment based off of information in already
established environments. You can think of it as a combination of existing
environments. It will gather information from the existing environment's
``spack.lock`` and use that during the creation of this included concrete
environment. When an included concrete environment is created it will generate
a ``spack.lock`` file for the newly created environment.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Creating included environments
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
To create a combined concrete environment, you must have at least one existing
concrete environment. You will use the command ``spack env create`` with the
argument ``--include-concrete`` followed by the name or path of the environment
you'd like to include. Here is an example of how to create a combined environment
from the command line.
.. code-block:: console
$ spack env create myenv
$ spack -e myenv add python
$ spack -e myenv concretize
$ spack env create --include-concrete myenv included_env
You can also include an environment directly in the ``spack.yaml`` file. It
involves adding the ``include_concrete`` heading in the yaml followed by the
absolute path to the independent environments.
.. code-block:: yaml
spack:
specs: []
concretizer:
unify: true
include_concrete:
- /absolute/path/to/environment1
- /absolute/path/to/environment2
Once the ``spack.yaml`` has been updated you must concretize the environment to
get the concrete specs from the included environments.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Updating an included environment
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If changes were made to the base environment and you want that reflected in the
included environment you will need to reconcretize both the base environment and the
included environment for the change to be implemented. For example:
.. code-block:: console
$ spack env create myenv
$ spack -e myenv add python
$ spack -e myenv concretize
$ spack env create --include-concrete myenv included_env
$ spack -e myenv find
==> In environment myenv
==> Root specs
python
==> 0 installed packages
$ spack -e included_env find
==> In environment included_env
==> No root specs
==> Included specs
python
==> 0 installed packages
Here we see that ``included_env`` has access to the python package through
the ``myenv`` environment. But if we were to add another spec to ``myenv``,
``included_env`` will not be able to access the new information.
.. code-block:: console
$ spack -e myenv add perl
$ spack -e myenv concretize
$ spack -e myenv find
==> In environment myenv
==> Root specs
perl python
==> 0 installed packages
$ spack -e included_env find
==> In environment included_env
==> No root specs
==> Included specs
python
==> 0 installed packages
It isn't until you run the ``spack concretize`` command that the combined
environment will get the updated information from the reconcretized base environmennt.
.. code-block:: console
$ spack -e included_env concretize
$ spack -e included_env find
==> In environment included_env
==> No root specs
==> Included specs
perl python
==> 0 installed packages
.. _environment-configuration: .. _environment-configuration:
------------------------ ------------------------
@@ -930,7 +800,6 @@ For example, the following environment has three root packages:
This allows for a much-needed reduction in redundancy between packages This allows for a much-needed reduction in redundancy between packages
and constraints. and constraints.
---------------- ----------------
Filesystem Views Filesystem Views
---------------- ----------------
@@ -1164,7 +1033,7 @@ other targets to depend on the environment installation.
A typical workflow is as follows: A typical workflow is as follows:
.. code-block:: console .. code:: console
spack env create -d . spack env create -d .
spack -e . add perl spack -e . add perl
@@ -1257,7 +1126,7 @@ its dependencies. This can be useful when certain flags should only apply to
dependencies. Below we show a use case where a spec is installed with verbose dependencies. Below we show a use case where a spec is installed with verbose
output (``spack install --verbose``) while its dependencies are installed silently: output (``spack install --verbose``) while its dependencies are installed silently:
.. code-block:: console .. code:: console
$ spack env depfile -o Makefile $ spack env depfile -o Makefile
@@ -1279,7 +1148,7 @@ This can be accomplished through the generated ``[<prefix>/]SPACK_PACKAGE_IDS``
variable. Assuming we have an active and concrete environment, we generate the variable. Assuming we have an active and concrete environment, we generate the
associated ``Makefile`` with a prefix ``example``: associated ``Makefile`` with a prefix ``example``:
.. code-block:: console .. code:: console
$ spack env depfile -o env.mk --make-prefix example $ spack env depfile -o env.mk --make-prefix example

View File

@@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo
.. code-block:: console .. code-block:: console
apt update apt update
apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd apt install build-essential ca-certificates coreutils curl environment-modules gfortran git gpg lsb-release python3 python3-distutils python3-venv unzip zip
.. tab-item:: RHEL .. tab-item:: RHEL
@@ -43,14 +43,14 @@ A build matrix showing which packages are working on which systems is shown belo
dnf install epel-release dnf install epel-release
dnf group install "Development Tools" dnf group install "Development Tools"
dnf install gcc-gfortran redhat-lsb-core python3 unzip dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3
.. tab-item:: macOS Brew .. tab-item:: macOS Brew
.. code-block:: console .. code-block:: console
brew update brew update
brew install gcc git zip brew install curl gcc git gnupg zip
------------ ------------
Installation Installation
@@ -478,13 +478,6 @@ prefix, you can add them to the ``extra_attributes`` field. Similarly,
all other fields from the compilers config can be added to the all other fields from the compilers config can be added to the
``extra_attributes`` field for an external representing a compiler. ``extra_attributes`` field for an external representing a compiler.
Note that the format for the ``paths`` field in the
``extra_attributes`` section is different than in the ``compilers``
config. For compilers configured as external packages, the section is
named ``compilers`` and the dictionary maps language names (``c``,
``cxx``, ``fortran``) to paths, rather than using the names ``cc``,
``fc``, and ``f77``.
.. code-block:: yaml .. code-block:: yaml
packages: packages:
@@ -500,10 +493,11 @@ named ``compilers`` and the dictionary maps language names (``c``,
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake - spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
prefix: /usr prefix: /usr
extra_attributes: extra_attributes:
compilers: paths:
c: /usr/bin/clang-with-suffix cc: /usr/bin/clang-with-suffix
cxx: /usr/bin/clang++-with-extra-info cxx: /usr/bin/clang++-with-extra-info
fortran: /usr/bin/gfortran fc: /usr/bin/gfortran
f77: /usr/bin/gfortran
extra_rpaths: extra_rpaths:
- /usr/lib/llvm/ - /usr/lib/llvm/
@@ -1578,8 +1572,6 @@ Microsoft Visual Studio
""""""""""""""""""""""" """""""""""""""""""""""
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack. Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
Spack additionally requires that the Windows SDK (including WGL) to be installed as part of your
visual studio installation as it is required to build many packages from source.
We require several specific components to be included in the Visual Studio installation. We require several specific components to be included in the Visual Studio installation.
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools," One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
@@ -1587,7 +1579,6 @@ depending on installation type (Professional, Build Tools, etc.) The other requ
"C++ CMake tools for Windows," which can be selected from among the optional packages. "C++ CMake tools for Windows," which can be selected from among the optional packages.
This provides CMake and Ninja for use during Spack configuration. This provides CMake and Ninja for use during Spack configuration.
If you already have Visual Studio installed, you can make sure these components are installed by If you already have Visual Studio installed, you can make sure these components are installed by
rerunning the installer. Next to your installation, select "Modify" and look at the rerunning the installer. Next to your installation, select "Modify" and look at the
"Installation details" pane on the right. "Installation details" pane on the right.

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 KiB

View File

@@ -12,6 +12,10 @@
Spack Spack
=================== ===================
.. epigraph::
`These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
Spack is a package management tool designed to support multiple Spack is a package management tool designed to support multiple
versions and configurations of software on a wide variety of platforms versions and configurations of software on a wide variety of platforms
and environments. It was designed for large supercomputing centers, and environments. It was designed for large supercomputing centers,

View File

@@ -2442,14 +2442,15 @@ with. For example, suppose that in the ``libdwarf`` package you write:
depends_on("libelf@0.8") depends_on("libelf@0.8")
Now ``libdwarf`` will require ``libelf`` in the range ``0.8``, which Now ``libdwarf`` will require ``libelf`` at *exactly* version ``0.8``.
includes patch versions ``0.8.1``, ``0.8.2``, etc. Apart from version You can also specify a requirement for a particular variant or for
restrictions, you can also specify variants if this package requires specific compiler flags:
optional features of the dependency.
.. code-block:: python .. code-block:: python
depends_on("libelf@0.8 +parser +pic") depends_on("libelf@0.8+debug")
depends_on("libelf debug=True")
depends_on("libelf cppflags='-fPIC'")
Both users *and* package authors can use the same spec syntax to refer Both users *and* package authors can use the same spec syntax to refer
to different package configurations. Users use the spec syntax on the to different package configurations. Users use the spec syntax on the
@@ -2457,82 +2458,46 @@ command line to find installed packages or to install packages with
particular constraints, and package authors can use specs to describe particular constraints, and package authors can use specs to describe
relationships between packages. relationships between packages.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
Specifying backward and forward compatibility Version ranges
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
Packages are often compatible with a range of versions of their Although some packages require a specific version for their dependencies,
dependencies. This is typically referred to as backward and forward most can be built with a range of versions. For example, if you are
compatibility. Spack allows you to specify this in the ``depends_on`` writing a package for a legacy Python module that only works with Python
directive using version ranges. 2.4 through 2.6, this would look like:
**Backwards compatibility** means that the package requires at least a
certain version of its dependency:
.. code-block:: python .. code-block:: python
depends_on("python@3.10:") depends_on("python@2.4:2.6")
In this case, the package requires Python 3.10 or newer. Version ranges in Spack are *inclusive*, so ``2.4:2.6`` means any version
greater than or equal to ``2.4`` and up to and including any ``2.6.x``. If
Commonly, packages drop support for older versions of a dependency as you want to specify that a package works with any version of Python 3 (or
they release new versions. In Spack you can conveniently add every higher), this would look like:
backward compatibility rule as a separate line:
.. code-block:: python .. code-block:: python
# backward compatibility with Python depends_on("python@3:")
depends_on("python@3.8:")
depends_on("python@3.9:", when="@1.2:")
depends_on("python@3.10:", when="@1.4:")
This means that in general we need Python 3.8 or newer; from version Here we leave out the upper bound. If you want to say that a package
1.2 onwards we need Python 3.9 or newer; from version 1.4 onwards we requires Python 2, you can similarly leave out the lower bound:
need Python 3.10 or newer. Notice that it's fine to have overlapping
ranges in the ``when`` clauses.
**Forward compatibility** means that the package requires at most a
certain version of its dependency. Forward compatibility rules are
necessary when there are breaking changes in the dependency that the
package cannot handle. In Spack we often add forward compatibility
bounds only at the time a new, breaking version of a dependency is
released. As with backward compatibility, it is typical to see a list
of forward compatibility bounds in a package file as seperate lines:
.. code-block:: python .. code-block:: python
# forward compatibility with Python depends_on("python@:2")
depends_on("python@:3.12", when="@:1.10")
depends_on("python@:3.13", when="@:1.12")
Notice how the ``:`` now appears before the version number both in the Notice that we didn't use ``@:3``. Version ranges are *inclusive*, so
dependency and in the ``when`` clause. This tells Spack that in general ``@:3`` means "up to and including any 3.x version".
we need Python 3.13 or older up to version ``1.12.x``, and up to version
``1.10.x`` we need Python 3.12 or older. Said differently, forward compatibility
with Python 3.13 was added in version 1.11, while version 1.13 added forward
compatibility with Python 3.14.
Notice that a version range ``@:3.12`` includes *any* patch version You can also simply write
number ``3.12.x``, which is often useful when specifying forward compatibility
bounds.
So far we have seen open-ended version ranges, which is by far the most
common use case. It is also possible to specify both a lower and an upper bound
on the version of a dependency, like this:
.. code-block:: python .. code-block:: python
depends_on("python@3.10:3.12") depends_on("python@2.7")
There is short syntax to specify that a package is compatible with say any to tell Spack that the package needs Python 2.7.x. This is equivalent to
``3.x`` version: ``@2.7:2.7``.
.. code-block:: python
depends_on("python@3")
The above is equivalent to ``depends_on("python@3:3")``, which means at least
Python version 3 and at most any version ``3.x.y``.
In very rare cases, you may need to specify an exact version, for example In very rare cases, you may need to specify an exact version, for example
if you need to distinguish between ``3.2`` and ``3.2.1``: if you need to distinguish between ``3.2`` and ``3.2.1``:

View File

@@ -6,8 +6,8 @@ python-levenshtein==0.25.1
docutils==0.20.1 docutils==0.20.1
pygments==2.17.2 pygments==2.17.2
urllib3==2.2.1 urllib3==2.2.1
pytest==8.2.0 pytest==8.1.1
isort==5.13.2 isort==5.13.2
black==24.4.2 black==24.4.0
flake8==7.0.0 flake8==7.0.0
mypy==1.10.0 mypy==1.9.0

View File

@@ -18,7 +18,7 @@
* Homepage: https://pypi.python.org/pypi/archspec * Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures * Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47) * Version: 0.2.3 (commit 7b8fe60b69e2861e7dac104bc1c183decfcd3daf)
astunparse astunparse
---------------- ----------------

View File

@@ -1,3 +1,3 @@
"""Init file to avoid namespace packages""" """Init file to avoid namespace packages"""
__version__ = "0.2.4" __version__ = "0.2.3"

View File

@@ -5,10 +5,9 @@
"""The "cpu" package permits to query and compare different """The "cpu" package permits to query and compare different
CPU microarchitectures. CPU microarchitectures.
""" """
from .detect import brand_string, host from .detect import host
from .microarchitecture import ( from .microarchitecture import (
TARGETS, TARGETS,
InvalidCompilerVersion,
Microarchitecture, Microarchitecture,
UnsupportedMicroarchitecture, UnsupportedMicroarchitecture,
generic_microarchitecture, generic_microarchitecture,
@@ -16,12 +15,10 @@
) )
__all__ = [ __all__ = [
"brand_string",
"host",
"TARGETS",
"InvalidCompilerVersion",
"Microarchitecture", "Microarchitecture",
"UnsupportedMicroarchitecture", "UnsupportedMicroarchitecture",
"TARGETS",
"generic_microarchitecture", "generic_microarchitecture",
"host",
"version_components", "version_components",
] ]

View File

@@ -47,11 +47,7 @@ def decorator(factory):
def partial_uarch( def partial_uarch(
name: str = "", name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
vendor: str = "",
features: Optional[Set[str]] = None,
generation: int = 0,
cpu_part: str = "",
) -> Microarchitecture: ) -> Microarchitecture:
"""Construct a partial microarchitecture, from information gathered during system scan.""" """Construct a partial microarchitecture, from information gathered during system scan."""
return Microarchitecture( return Microarchitecture(
@@ -61,7 +57,6 @@ def partial_uarch(
features=features or set(), features=features or set(),
compilers={}, compilers={},
generation=generation, generation=generation,
cpu_part=cpu_part,
) )
@@ -95,7 +90,6 @@ def proc_cpuinfo() -> Microarchitecture:
return partial_uarch( return partial_uarch(
vendor=_canonicalize_aarch64_vendor(data), vendor=_canonicalize_aarch64_vendor(data),
features=_feature_set(data, key="Features"), features=_feature_set(data, key="Features"),
cpu_part=data.get("CPU part", ""),
) )
if architecture in (PPC64LE, PPC64): if architecture in (PPC64LE, PPC64):
@@ -161,31 +155,6 @@ def _is_bit_set(self, register: int, bit: int) -> bool:
mask = 1 << bit mask = 1 << bit
return register & mask > 0 return register & mask > 0
def brand_string(self) -> Optional[str]:
"""Returns the brand string, if available."""
if self.highest_extension_support < 0x80000004:
return None
r1 = self.cpuid.registers_for(eax=0x80000002, ecx=0)
r2 = self.cpuid.registers_for(eax=0x80000003, ecx=0)
r3 = self.cpuid.registers_for(eax=0x80000004, ecx=0)
result = struct.pack(
"IIIIIIIIIIII",
r1.eax,
r1.ebx,
r1.ecx,
r1.edx,
r2.eax,
r2.ebx,
r2.ecx,
r2.edx,
r3.eax,
r3.ebx,
r3.ecx,
r3.edx,
).decode("utf-8")
return result.strip("\x00")
@detection(operating_system="Windows") @detection(operating_system="Windows")
def cpuid_info(): def cpuid_info():
@@ -205,8 +174,8 @@ def _check_output(args, env):
WINDOWS_MAPPING = { WINDOWS_MAPPING = {
"AMD64": X86_64, "AMD64": "x86_64",
"ARM64": AARCH64, "ARM64": "aarch64",
} }
@@ -351,10 +320,6 @@ def sorting_fn(item):
generic_candidates = [c for c in candidates if c.vendor == "generic"] generic_candidates = [c for c in candidates if c.vendor == "generic"]
best_generic = max(generic_candidates, key=sorting_fn) best_generic = max(generic_candidates, key=sorting_fn)
# Relevant for AArch64. Filter on "cpu_part" if we have any match
if info.cpu_part != "" and any(c for c in candidates if info.cpu_part == c.cpu_part):
candidates = [c for c in candidates if info.cpu_part == c.cpu_part]
# Filter the candidates to be descendant of the best generic candidate. # Filter the candidates to be descendant of the best generic candidate.
# This is to avoid that the lack of a niche feature that can be disabled # This is to avoid that the lack of a niche feature that can be disabled
# from e.g. BIOS prevents detection of a reasonably performant architecture # from e.g. BIOS prevents detection of a reasonably performant architecture
@@ -444,16 +409,3 @@ def compatibility_check_for_riscv64(info, target):
return (target == arch_root or arch_root in target.ancestors) and ( return (target == arch_root or arch_root in target.ancestors) and (
target.name == info.name or target.vendor == "generic" target.name == info.name or target.vendor == "generic"
) )
def brand_string() -> Optional[str]:
"""Returns the brand string of the host, if detected, or None."""
if platform.system() == "Darwin":
return _check_output(
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
).strip()
if host().family == X86_64:
return CpuidInfoCollector().brand_string()
return None

View File

@@ -2,7 +2,9 @@
# Archspec Project Developers. See the top-level COPYRIGHT file for details. # Archspec Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Types and functions to manage information on CPU microarchitectures.""" """Types and functions to manage information
on CPU microarchitectures.
"""
import functools import functools
import platform import platform
import re import re
@@ -63,31 +65,23 @@ class Microarchitecture:
passed in as argument above. passed in as argument above.
* versions: versions that support this micro-architecture. * versions: versions that support this micro-architecture.
generation (int): generation of the micro-architecture, if relevant. generation (int): generation of the micro-architecture, if
cpu_part (str): cpu part of the architecture, if relevant. relevant.
""" """
# pylint: disable=too-many-arguments,too-many-instance-attributes # pylint: disable=too-many-arguments
#: Aliases for micro-architecture's features #: Aliases for micro-architecture's features
feature_aliases = FEATURE_ALIASES feature_aliases = FEATURE_ALIASES
def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu_part=""): def __init__(self, name, parents, vendor, features, compilers, generation=0):
self.name = name self.name = name
self.parents = parents self.parents = parents
self.vendor = vendor self.vendor = vendor
self.features = features self.features = features
self.compilers = compilers self.compilers = compilers
# Only relevant for PowerPC
self.generation = generation self.generation = generation
# Only relevant for AArch64 # Cache the ancestor computation
self.cpu_part = cpu_part
# Cache the "ancestor" computation
self._ancestors = None self._ancestors = None
# Cache the "generic" computation
self._generic = None
# Cache the "family" computation
self._family = None
@property @property
def ancestors(self): def ancestors(self):
@@ -117,12 +111,8 @@ def __eq__(self, other):
and self.parents == other.parents # avoid ancestors here and self.parents == other.parents # avoid ancestors here
and self.compilers == other.compilers and self.compilers == other.compilers
and self.generation == other.generation and self.generation == other.generation
and self.cpu_part == other.cpu_part
) )
def __hash__(self):
return hash(self.name)
@coerce_target_names @coerce_target_names
def __ne__(self, other): def __ne__(self, other):
return not self == other return not self == other
@@ -153,8 +143,7 @@ def __repr__(self):
cls_name = self.__class__.__name__ cls_name = self.__class__.__name__
fmt = ( fmt = (
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, " cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
"{0.features!r}, {0.compilers!r}, generation={0.generation!r}, " "{0.features!r}, {0.compilers!r}, {0.generation!r})"
"cpu_part={0.cpu_part!r})"
) )
return fmt.format(self) return fmt.format(self)
@@ -179,22 +168,18 @@ def __contains__(self, feature):
@property @property
def family(self): def family(self):
"""Returns the architecture family a given target belongs to""" """Returns the architecture family a given target belongs to"""
if self._family is None: roots = [x for x in [self] + self.ancestors if not x.ancestors]
roots = [x for x in [self] + self.ancestors if not x.ancestors] msg = "a target is expected to belong to just one architecture family"
msg = "a target is expected to belong to just one architecture family" msg += f"[found {', '.join(str(x) for x in roots)}]"
msg += f"[found {', '.join(str(x) for x in roots)}]" assert len(roots) == 1, msg
assert len(roots) == 1, msg
self._family = roots.pop()
return self._family return roots.pop()
@property @property
def generic(self): def generic(self):
"""Returns the best generic architecture that is compatible with self""" """Returns the best generic architecture that is compatible with self"""
if self._generic is None: generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"] return max(generics, key=lambda x: len(x.ancestors))
self._generic = max(generics, key=lambda x: len(x.ancestors))
return self._generic
def to_dict(self): def to_dict(self):
"""Returns a dictionary representation of this object.""" """Returns a dictionary representation of this object."""
@@ -205,7 +190,6 @@ def to_dict(self):
"generation": self.generation, "generation": self.generation,
"parents": [str(x) for x in self.parents], "parents": [str(x) for x in self.parents],
"compilers": self.compilers, "compilers": self.compilers,
"cpupart": self.cpu_part,
} }
@staticmethod @staticmethod
@@ -218,15 +202,12 @@ def from_dict(data) -> "Microarchitecture":
features=set(data["features"]), features=set(data["features"]),
compilers=data.get("compilers", {}), compilers=data.get("compilers", {}),
generation=data.get("generation", 0), generation=data.get("generation", 0),
cpu_part=data.get("cpupart", ""),
) )
def optimization_flags(self, compiler, version): def optimization_flags(self, compiler, version):
"""Returns a string containing the optimization flags that needs """Returns a string containing the optimization flags that needs
to be used to produce code optimized for this micro-architecture. to be used to produce code optimized for this micro-architecture.
The version is expected to be a string of dot separated digits.
If there is no information on the compiler passed as argument the If there is no information on the compiler passed as argument the
function returns an empty string. If it is known that the compiler function returns an empty string. If it is known that the compiler
version we want to use does not support this architecture the function version we want to use does not support this architecture the function
@@ -235,11 +216,6 @@ def optimization_flags(self, compiler, version):
Args: Args:
compiler (str): name of the compiler to be used compiler (str): name of the compiler to be used
version (str): version of the compiler to be used version (str): version of the compiler to be used
Raises:
UnsupportedMicroarchitecture: if the requested compiler does not support
this micro-architecture.
ValueError: if the version doesn't match the expected format
""" """
# If we don't have information on compiler at all return an empty string # If we don't have information on compiler at all return an empty string
if compiler not in self.family.compilers: if compiler not in self.family.compilers:
@@ -256,14 +232,6 @@ def optimization_flags(self, compiler, version):
msg = msg.format(compiler, best_target, best_target.family) msg = msg.format(compiler, best_target, best_target.family)
raise UnsupportedMicroarchitecture(msg) raise UnsupportedMicroarchitecture(msg)
# Check that the version matches the expected format
if not re.match(r"^(?:\d+\.)*\d+$", version):
msg = (
"invalid format for the compiler version argument. "
"Only dot separated digits are allowed."
)
raise InvalidCompilerVersion(msg)
# If we have information on this compiler we need to check the # If we have information on this compiler we need to check the
# version being used # version being used
compiler_info = self.compilers[compiler] compiler_info = self.compilers[compiler]
@@ -324,7 +292,7 @@ def generic_microarchitecture(name):
Args: Args:
name (str): name of the micro-architecture name (str): name of the micro-architecture
""" """
return Microarchitecture(name, parents=[], vendor="generic", features=set(), compilers={}) return Microarchitecture(name, parents=[], vendor="generic", features=[], compilers={})
def version_components(version): def version_components(version):
@@ -377,11 +345,8 @@ def fill_target_from_dict(name, data, targets):
features = set(values["features"]) features = set(values["features"])
compilers = values.get("compilers", {}) compilers = values.get("compilers", {})
generation = values.get("generation", 0) generation = values.get("generation", 0)
cpu_part = values.get("cpupart", "")
targets[name] = Microarchitecture( targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
name, parents, vendor, features, compilers, generation=generation, cpu_part=cpu_part
)
known_targets = {} known_targets = {}
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"] data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
@@ -402,15 +367,7 @@ def fill_target_from_dict(name, data, targets):
TARGETS = LazyDictionary(_known_microarchitectures) TARGETS = LazyDictionary(_known_microarchitectures)
class ArchspecError(Exception): class UnsupportedMicroarchitecture(ValueError):
"""Base class for errors within archspec"""
class UnsupportedMicroarchitecture(ArchspecError, ValueError):
"""Raised if a compiler version does not support optimization for a given """Raised if a compiler version does not support optimization for a given
micro-architecture. micro-architecture.
""" """
class InvalidCompilerVersion(ArchspecError, ValueError):
"""Raised when an invalid format is used for compiler versions in archspec."""

View File

@@ -1482,6 +1482,7 @@
"cldemote", "cldemote",
"movdir64b", "movdir64b",
"movdiri", "movdiri",
"pdcm",
"serialize", "serialize",
"waitpkg" "waitpkg"
], ],
@@ -2224,96 +2225,14 @@
], ],
"nvhpc": [ "nvhpc": [
{ {
"versions": "21.11:23.8", "versions": "21.11:",
"name": "zen3", "name": "zen3",
"flags": "-tp {name}", "flags": "-tp {name}",
"warnings": "zen4 is not fully supported by nvhpc versions < 23.9, falling back to zen3" "warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
},
{
"versions": "23.9:",
"flags": "-tp {name}"
} }
] ]
} }
}, },
"zen5": {
"from": ["zen4"],
"vendor": "AuthenticAMD",
"features": [
"abm",
"aes",
"avx",
"avx2",
"avx512_bf16",
"avx512_bitalg",
"avx512bw",
"avx512cd",
"avx512dq",
"avx512f",
"avx512ifma",
"avx512vbmi",
"avx512_vbmi2",
"avx512vl",
"avx512_vnni",
"avx512_vp2intersect",
"avx512_vpopcntdq",
"avx_vnni",
"bmi1",
"bmi2",
"clflushopt",
"clwb",
"clzero",
"cppc",
"cx16",
"f16c",
"flush_l1d",
"fma",
"fsgsbase",
"gfni",
"ibrs_enhanced",
"mmx",
"movbe",
"movdir64b",
"movdiri",
"pclmulqdq",
"popcnt",
"rdseed",
"sse",
"sse2",
"sse4_1",
"sse4_2",
"sse4a",
"ssse3",
"tsc_adjust",
"vaes",
"vpclmulqdq",
"xsavec",
"xsaveopt"
],
"compilers": {
"gcc": [
{
"versions": "14.1:",
"name": "znver5",
"flags": "-march={name} -mtune={name}"
}
],
"aocc": [
{
"versions": "5.0:",
"name": "znver5",
"flags": "-march={name} -mtune={name}"
}
],
"clang": [
{
"versions": "19.1:",
"name": "znver5",
"flags": "-march={name} -mtune={name}"
}
]
}
},
"ppc64": { "ppc64": {
"from": [], "from": [],
"vendor": "generic", "vendor": "generic",
@@ -2792,8 +2711,7 @@
"flags": "-mcpu=thunderx2t99" "flags": "-mcpu=thunderx2t99"
} }
] ]
}, }
"cpupart": "0x0af"
}, },
"a64fx": { "a64fx": {
"from": ["armv8.2a"], "from": ["armv8.2a"],
@@ -2861,8 +2779,7 @@
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve" "flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
} }
] ]
}, }
"cpupart": "0x001"
}, },
"cortex_a72": { "cortex_a72": {
"from": ["aarch64"], "from": ["aarch64"],
@@ -2899,8 +2816,7 @@
"flags" : "-mcpu=cortex-a72" "flags" : "-mcpu=cortex-a72"
} }
] ]
}, }
"cpupart": "0xd08"
}, },
"neoverse_n1": { "neoverse_n1": {
"from": ["cortex_a72", "armv8.2a"], "from": ["cortex_a72", "armv8.2a"],
@@ -2921,7 +2837,8 @@
"asimdrdm", "asimdrdm",
"lrcpc", "lrcpc",
"dcpop", "dcpop",
"asimddp" "asimddp",
"ssbs"
], ],
"compilers" : { "compilers" : {
"gcc": [ "gcc": [
@@ -2985,8 +2902,7 @@
"flags": "-tp {name}" "flags": "-tp {name}"
} }
] ]
}, }
"cpupart": "0xd0c"
}, },
"neoverse_v1": { "neoverse_v1": {
"from": ["neoverse_n1", "armv8.4a"], "from": ["neoverse_n1", "armv8.4a"],
@@ -3010,6 +2926,8 @@
"lrcpc", "lrcpc",
"dcpop", "dcpop",
"sha3", "sha3",
"sm3",
"sm4",
"asimddp", "asimddp",
"sha512", "sha512",
"sve", "sve",
@@ -3018,6 +2936,9 @@
"uscat", "uscat",
"ilrcpc", "ilrcpc",
"flagm", "flagm",
"ssbs",
"paca",
"pacg",
"dcpodp", "dcpodp",
"svei8mm", "svei8mm",
"svebf16", "svebf16",
@@ -3085,7 +3006,7 @@
}, },
{ {
"versions": "11:", "versions": "11:",
"flags" : "-march=armv8.4-a+sve+fp16+bf16+crypto+i8mm+rng" "flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
}, },
{ {
"versions": "12:", "versions": "12:",
@@ -3109,8 +3030,7 @@
"flags": "-tp {name}" "flags": "-tp {name}"
} }
] ]
}, }
"cpupart": "0xd40"
}, },
"neoverse_v2": { "neoverse_v2": {
"from": ["neoverse_n1", "armv9.0a"], "from": ["neoverse_n1", "armv9.0a"],
@@ -3134,22 +3054,35 @@
"lrcpc", "lrcpc",
"dcpop", "dcpop",
"sha3", "sha3",
"sm3",
"sm4",
"asimddp", "asimddp",
"sha512", "sha512",
"sve", "sve",
"asimdfhm", "asimdfhm",
"dit",
"uscat", "uscat",
"ilrcpc", "ilrcpc",
"flagm", "flagm",
"ssbs",
"sb", "sb",
"paca",
"pacg",
"dcpodp", "dcpodp",
"sve2", "sve2",
"sveaes",
"svepmull",
"svebitperm",
"svesha3",
"svesm4",
"flagm2", "flagm2",
"frint", "frint",
"svei8mm", "svei8mm",
"svebf16", "svebf16",
"i8mm", "i8mm",
"bf16" "bf16",
"dgh",
"bti"
], ],
"compilers" : { "compilers" : {
"gcc": [ "gcc": [
@@ -3174,19 +3107,15 @@
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76" "flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
}, },
{ {
"versions": "10.0:11.3.99", "versions": "10.0:11.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77" "flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
}, },
{
"versions": "11.4:11.99",
"flags" : "-mcpu=neoverse-v2"
},
{ {
"versions": "12.0:12.2.99", "versions": "12.0:12.99",
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710" "flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
}, },
{ {
"versions": "12.3:", "versions": "13.0:",
"flags" : "-mcpu=neoverse-v2" "flags" : "-mcpu=neoverse-v2"
} }
], ],
@@ -3221,112 +3150,7 @@
"flags": "-tp {name}" "flags": "-tp {name}"
} }
] ]
}, }
"cpupart": "0xd4f"
},
"neoverse_n2": {
"from": ["neoverse_n1", "armv9.0a"],
"vendor": "ARM",
"features": [
"fp",
"asimd",
"evtstrm",
"aes",
"pmull",
"sha1",
"sha2",
"crc32",
"atomics",
"fphp",
"asimdhp",
"cpuid",
"asimdrdm",
"jscvt",
"fcma",
"lrcpc",
"dcpop",
"sha3",
"asimddp",
"sha512",
"sve",
"asimdfhm",
"uscat",
"ilrcpc",
"flagm",
"sb",
"dcpodp",
"sve2",
"flagm2",
"frint",
"svei8mm",
"svebf16",
"i8mm",
"bf16"
],
"compilers" : {
"gcc": [
{
"versions": "4.8:5.99",
"flags": "-march=armv8-a"
},
{
"versions": "6:6.99",
"flags" : "-march=armv8.1-a"
},
{
"versions": "7.0:7.99",
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
},
{
"versions": "8.0:8.99",
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
},
{
"versions": "9.0:9.99",
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
},
{
"versions": "10.0:10.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
},
{
"versions": "11.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"clang" : [
{
"versions": "9.0:10.99",
"flags" : "-march=armv8.5-a+sve"
},
{
"versions": "11.0:13.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
},
{
"versions": "14.0:15.99",
"flags" : "-march=armv9-a+i8mm+bf16"
},
{
"versions": "16.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"arm" : [
{
"versions": "23.04.0:",
"flags" : "-mcpu=neoverse-n2"
}
],
"nvhpc" : [
{
"versions": "23.3:",
"name": "neoverse-n1",
"flags": "-tp {name}"
}
]
},
"cpupart": "0xd49"
}, },
"m1": { "m1": {
"from": ["armv8.4a"], "from": ["armv8.4a"],
@@ -3392,8 +3216,7 @@
"flags" : "-mcpu=apple-m1" "flags" : "-mcpu=apple-m1"
} }
] ]
}, }
"cpupart": "0x022"
}, },
"m2": { "m2": {
"from": ["m1", "armv8.5a"], "from": ["m1", "armv8.5a"],
@@ -3471,8 +3294,7 @@
"flags" : "-mcpu=apple-m2" "flags" : "-mcpu=apple-m2"
} }
] ]
}, }
"cpupart": "0x032"
}, },
"arm": { "arm": {
"from": [], "from": [],

View File

@@ -52,9 +52,6 @@
} }
} }
} }
},
"cpupart": {
"type": "string"
} }
}, },
"required": [ "required": [
@@ -110,4 +107,4 @@
"additionalProperties": false "additionalProperties": false
} }
} }
} }

View File

@@ -98,10 +98,3 @@ def path_filter_caller(*args, **kwargs):
if _func: if _func:
return holder_func(_func) return holder_func(_func)
return holder_func return holder_func
def sanitize_win_longpath(path: str) -> str:
"""Strip Windows extended path prefix from strings
Returns sanitized string.
no-op if extended path prefix is not present"""
return path.lstrip("\\\\?\\")

View File

@@ -187,18 +187,12 @@ def polite_filename(filename: str) -> str:
return _polite_antipattern().sub("_", filename) return _polite_antipattern().sub("_", filename)
def getuid() -> Union[str, int]: def getuid():
"""Returns os getuid on non Windows
On Windows returns 0 for admin users, login string otherwise
This is in line with behavior from get_owner_uid which
always returns the login string on Windows
"""
if sys.platform == "win32": if sys.platform == "win32":
import ctypes import ctypes
# If not admin, use the string name of the login as a unique ID
if ctypes.windll.shell32.IsUserAnAdmin() == 0: if ctypes.windll.shell32.IsUserAnAdmin() == 0:
return os.getlogin() return 1
return 0 return 0
else: else:
return os.getuid() return os.getuid()
@@ -219,15 +213,6 @@ def _win_rename(src, dst):
os.replace(src, dst) os.replace(src, dst)
@system_path_filter
def msdos_escape_parens(path):
"""MS-DOS interprets parens as grouping parameters even in a quoted string"""
if sys.platform == "win32":
return path.replace("(", "^(").replace(")", "^)")
else:
return path
@system_path_filter @system_path_filter
def rename(src, dst): def rename(src, dst):
# On Windows, os.rename will fail if the destination file already exists # On Windows, os.rename will fail if the destination file already exists
@@ -568,13 +553,7 @@ def exploding_archive_handler(tarball_container, stage):
@system_path_filter(arg_slice=slice(1)) @system_path_filter(arg_slice=slice(1))
def get_owner_uid(path, err_msg=None) -> Union[str, int]: def get_owner_uid(path, err_msg=None):
"""Returns owner UID of path destination
On non Windows this is the value of st_uid
On Windows this is the login string associated with the
owning user.
"""
if not os.path.exists(path): if not os.path.exists(path):
mkdirp(path, mode=stat.S_IRWXU) mkdirp(path, mode=stat.S_IRWXU)
@@ -843,7 +822,7 @@ def copy_tree(
if islink(s): if islink(s):
link_target = resolve_link_target_relative_to_the_link(s) link_target = resolve_link_target_relative_to_the_link(s)
if symlinks: if symlinks:
target = readlink(s) target = os.readlink(s)
if os.path.isabs(target): if os.path.isabs(target):
def escaped_path(path): def escaped_path(path):
@@ -2450,10 +2429,9 @@ def add_library_dependent(self, *dest):
""" """
for pth in dest: for pth in dest:
if os.path.isfile(pth): if os.path.isfile(pth):
new_pth = pathlib.Path(pth).parent self._additional_library_dependents.add(pathlib.Path(pth).parent)
else: else:
new_pth = pathlib.Path(pth) self._additional_library_dependents.add(pathlib.Path(pth))
self._additional_library_dependents.add(new_pth)
@property @property
def rpaths(self): def rpaths(self):
@@ -2531,14 +2509,8 @@ def establish_link(self):
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib) # for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
# install a symlink to each dependent library # install a symlink to each dependent library
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
# do not rpath for system libraries included in the dag self._link(library, lib_dir)
# we should not be modifying libraries managed by the Windows system
# as this will negatively impact linker behavior and can result in permission
# errors if those system libs are not modifiable by Spack
if "windows-system" not in getattr(self.pkg, "tags", []):
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
self._link(library, lib_dir)
@system_path_filter @system_path_filter

View File

@@ -11,7 +11,7 @@
from llnl.util import lang, tty from llnl.util import lang, tty
from ..path import sanitize_win_longpath, system_path_filter from ..path import system_path_filter
if sys.platform == "win32": if sys.platform == "win32":
from win32file import CreateHardLink from win32file import CreateHardLink
@@ -247,9 +247,9 @@ def _windows_create_junction(source: str, link: str):
out, err = proc.communicate() out, err = proc.communicate()
tty.debug(out.decode()) tty.debug(out.decode())
if proc.returncode != 0: if proc.returncode != 0:
err_str = err.decode() err = err.decode()
tty.error(err_str) tty.error(err)
raise SymlinkError("Make junction command returned a non-zero return code.", err_str) raise SymlinkError("Make junction command returned a non-zero return code.", err)
def _windows_create_hard_link(path: str, link: str): def _windows_create_hard_link(path: str, link: str):
@@ -269,14 +269,14 @@ def _windows_create_hard_link(path: str, link: str):
CreateHardLink(link, path) CreateHardLink(link, path)
def readlink(path: str, *, dir_fd=None): def readlink(path: str):
"""Spack utility to override of os.readlink method to work cross platform""" """Spack utility to override of os.readlink method to work cross platform"""
if _windows_is_hardlink(path): if _windows_is_hardlink(path):
return _windows_read_hard_link(path) return _windows_read_hard_link(path)
elif _windows_is_junction(path): elif _windows_is_junction(path):
return _windows_read_junction(path) return _windows_read_junction(path)
else: else:
return sanitize_win_longpath(os.readlink(path, dir_fd=dir_fd)) return os.readlink(path)
def _windows_read_hard_link(link: str) -> str: def _windows_read_hard_link(link: str) -> str:

View File

@@ -59,7 +59,6 @@
To output an @, use '@@'. To output a } inside braces, use '}}'. To output an @, use '@@'. To output a } inside braces, use '}}'.
""" """
import os
import re import re
import sys import sys
from contextlib import contextmanager from contextlib import contextmanager
@@ -102,29 +101,9 @@ def __init__(self, message):
# Mapping from color arguments to values for tty.set_color # Mapping from color arguments to values for tty.set_color
color_when_values = {"always": True, "auto": None, "never": False} color_when_values = {"always": True, "auto": None, "never": False}
# Force color; None: Only color if stdout is a tty
def _color_when_value(when): # True: Always colorize output, False: Never colorize output
"""Raise a ValueError for an invalid color setting. _force_color = None
Valid values are 'always', 'never', and 'auto', or equivalently,
True, False, and None.
"""
if when in color_when_values:
return color_when_values[when]
elif when not in color_when_values.values():
raise ValueError("Invalid color setting: %s" % when)
return when
def _color_from_environ() -> Optional[bool]:
try:
return _color_when_value(os.environ.get("SPACK_COLOR", "auto"))
except ValueError:
return None
#: When `None` colorize when stdout is tty, when `True` or `False` always or never colorize resp.
_force_color = _color_from_environ()
def try_enable_terminal_color_on_windows(): def try_enable_terminal_color_on_windows():
@@ -185,6 +164,19 @@ def _err_check(result, func, args):
debug("Unable to support color on Windows terminal") debug("Unable to support color on Windows terminal")
def _color_when_value(when):
"""Raise a ValueError for an invalid color setting.
Valid values are 'always', 'never', and 'auto', or equivalently,
True, False, and None.
"""
if when in color_when_values:
return color_when_values[when]
elif when not in color_when_values.values():
raise ValueError("Invalid color setting: %s" % when)
return when
def get_color_when(): def get_color_when():
"""Return whether commands should print color or not.""" """Return whether commands should print color or not."""
if _force_color is not None: if _force_color is not None:

View File

@@ -18,10 +18,9 @@
import threading import threading
import traceback import traceback
from contextlib import contextmanager from contextlib import contextmanager
from multiprocessing.connection import Connection
from threading import Thread from threading import Thread
from types import ModuleType from types import ModuleType
from typing import Callable, Optional from typing import Optional
import llnl.util.tty as tty import llnl.util.tty as tty
@@ -330,6 +329,49 @@ def close(self):
self.file.close() self.file.close()
class MultiProcessFd:
"""Return an object which stores a file descriptor and can be passed as an
argument to a function run with ``multiprocessing.Process``, such that
the file descriptor is available in the subprocess."""
def __init__(self, fd):
self._connection = None
self._fd = None
if sys.version_info >= (3, 8):
self._connection = multiprocessing.connection.Connection(fd)
else:
self._fd = fd
@property
def fd(self):
if self._connection:
return self._connection._handle
else:
return self._fd
def close(self):
if self._connection:
self._connection.close()
else:
os.close(self._fd)
def close_connection_and_file(multiprocess_fd, file):
# MultiprocessFd is intended to transmit a FD
# to a child process, this FD is then opened to a Python File object
# (using fdopen). In >= 3.8, MultiprocessFd encapsulates a
# multiprocessing.connection.Connection; Connection closes the FD
# when it is deleted, and prints a warning about duplicate closure if
# it is not explicitly closed. In < 3.8, MultiprocessFd encapsulates a
# simple FD; closing the FD here appears to conflict with
# closure of the File object (in < 3.8 that is). Therefore this needs
# to choose whether to close the File or the Connection.
if sys.version_info >= (3, 8):
multiprocess_fd.close()
else:
file.close()
@contextmanager @contextmanager
def replace_environment(env): def replace_environment(env):
"""Replace the current environment (`os.environ`) with `env`. """Replace the current environment (`os.environ`) with `env`.
@@ -487,20 +529,22 @@ def __enter__(self):
# forcing debug output. # forcing debug output.
self._saved_debug = tty._debug self._saved_debug = tty._debug
# Pipe for redirecting output to logger # OS-level pipe for redirecting output to logger
read_fd, self.write_fd = multiprocessing.Pipe(duplex=False) read_fd, write_fd = os.pipe()
# Pipe for communication back from the daemon read_multiprocess_fd = MultiProcessFd(read_fd)
# Multiprocessing pipe for communication back from the daemon
# Currently only used to save echo value between uses # Currently only used to save echo value between uses
self.parent_pipe, child_pipe = multiprocessing.Pipe(duplex=False) self.parent_pipe, child_pipe = multiprocessing.Pipe()
# Sets a daemon that writes to file what it reads from a pipe # Sets a daemon that writes to file what it reads from a pipe
try: try:
# need to pass this b/c multiprocessing closes stdin in child. # need to pass this b/c multiprocessing closes stdin in child.
input_fd = None input_multiprocess_fd = None
try: try:
if sys.stdin.isatty(): if sys.stdin.isatty():
input_fd = Connection(os.dup(sys.stdin.fileno())) input_multiprocess_fd = MultiProcessFd(os.dup(sys.stdin.fileno()))
except BaseException: except BaseException:
# just don't forward input if this fails # just don't forward input if this fails
pass pass
@@ -509,9 +553,9 @@ def __enter__(self):
self.process = multiprocessing.Process( self.process = multiprocessing.Process(
target=_writer_daemon, target=_writer_daemon,
args=( args=(
input_fd, input_multiprocess_fd,
read_fd, read_multiprocess_fd,
self.write_fd, write_fd,
self.echo, self.echo,
self.log_file, self.log_file,
child_pipe, child_pipe,
@@ -522,9 +566,9 @@ def __enter__(self):
self.process.start() self.process.start()
finally: finally:
if input_fd: if input_multiprocess_fd:
input_fd.close() input_multiprocess_fd.close()
read_fd.close() read_multiprocess_fd.close()
# Flush immediately before redirecting so that anything buffered # Flush immediately before redirecting so that anything buffered
# goes to the original stream # goes to the original stream
@@ -542,9 +586,9 @@ def __enter__(self):
self._saved_stderr = os.dup(sys.stderr.fileno()) self._saved_stderr = os.dup(sys.stderr.fileno())
# redirect to the pipe we created above # redirect to the pipe we created above
os.dup2(self.write_fd.fileno(), sys.stdout.fileno()) os.dup2(write_fd, sys.stdout.fileno())
os.dup2(self.write_fd.fileno(), sys.stderr.fileno()) os.dup2(write_fd, sys.stderr.fileno())
self.write_fd.close() os.close(write_fd)
else: else:
# Handle I/O the Python way. This won't redirect lower-level # Handle I/O the Python way. This won't redirect lower-level
@@ -557,7 +601,7 @@ def __enter__(self):
self._saved_stderr = sys.stderr self._saved_stderr = sys.stderr
# create a file object for the pipe; redirect to it. # create a file object for the pipe; redirect to it.
pipe_fd_out = os.fdopen(self.write_fd.fileno(), "w", closefd=False) pipe_fd_out = os.fdopen(write_fd, "w")
sys.stdout = pipe_fd_out sys.stdout = pipe_fd_out
sys.stderr = pipe_fd_out sys.stderr = pipe_fd_out
@@ -593,7 +637,6 @@ def __exit__(self, exc_type, exc_val, exc_tb):
else: else:
sys.stdout = self._saved_stdout sys.stdout = self._saved_stdout
sys.stderr = self._saved_stderr sys.stderr = self._saved_stderr
self.write_fd.close()
# print log contents in parent if needed. # print log contents in parent if needed.
if self.log_file.write_in_parent: if self.log_file.write_in_parent:
@@ -807,14 +850,14 @@ def force_echo(self):
def _writer_daemon( def _writer_daemon(
stdin_fd: Optional[Connection], stdin_multiprocess_fd,
read_fd: Connection, read_multiprocess_fd,
write_fd: Connection, write_fd,
echo: bool, echo,
log_file_wrapper: FileWrapper, log_file_wrapper,
control_fd: Connection, control_pipe,
filter_fn: Optional[Callable[[str], str]], filter_fn,
) -> None: ):
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``. """Daemon used by ``log_output`` to write to a log file and to ``stdout``.
The daemon receives output from the parent process and writes it both The daemon receives output from the parent process and writes it both
@@ -851,37 +894,43 @@ def _writer_daemon(
``StringIO`` in the parent. This is mainly for testing. ``StringIO`` in the parent. This is mainly for testing.
Arguments: Arguments:
stdin_fd: optional input from the terminal stdin_multiprocess_fd (int): input from the terminal
read_fd: pipe for reading from parent's redirected stdout read_multiprocess_fd (int): pipe for reading from parent's redirected
echo: initial echo setting -- controlled by user and preserved across multiple writer stdout
daemons echo (bool): initial echo setting -- controlled by user and
log_file_wrapper: file to log all output preserved across multiple writer daemons
control_pipe: multiprocessing pipe on which to send control information to the parent log_file_wrapper (FileWrapper): file to log all output
filter_fn: optional function to filter each line of output control_pipe (Pipe): multiprocessing pipe on which to send control
information to the parent
filter_fn (callable, optional): function to filter each line of output
""" """
# This process depends on closing all instances of write_pipe to terminate the reading loop # If this process was forked, then it will inherit file descriptors from
write_fd.close() # the parent process. This process depends on closing all instances of
# write_fd to terminate the reading loop, so we close the file descriptor
# here. Forking is the process spawning method everywhere except Mac OS
# for Python >= 3.8 and on Windows
if sys.version_info < (3, 8) or sys.platform != "darwin":
os.close(write_fd)
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug # 1. Use line buffering (3rd param = 1) since Python 3 has a bug
# that prevents unbuffered text I/O. # that prevents unbuffered text I/O.
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default # 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
# 3. closefd=False because Connection has "ownership" in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
if stdin_fd: if stdin_multiprocess_fd:
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False) stdin = os.fdopen(stdin_multiprocess_fd.fd)
else: else:
stdin_file = None stdin = None
# list of streams to select from # list of streams to select from
istreams = [read_file, stdin_file] if stdin_file else [read_file] istreams = [in_pipe, stdin] if stdin else [in_pipe]
force_echo = False # parent can force echo for certain output force_echo = False # parent can force echo for certain output
log_file = log_file_wrapper.unwrap() log_file = log_file_wrapper.unwrap()
try: try:
with keyboard_input(stdin_file) as kb: with keyboard_input(stdin) as kb:
while True: while True:
# fix the terminal settings if we recently came to # fix the terminal settings if we recently came to
# the foreground # the foreground
@@ -894,12 +943,12 @@ def _writer_daemon(
# Allow user to toggle echo with 'v' key. # Allow user to toggle echo with 'v' key.
# Currently ignores other chars. # Currently ignores other chars.
# only read stdin if we're in the foreground # only read stdin if we're in the foreground
if stdin_file and stdin_file in rlist and not _is_background_tty(stdin_file): if stdin in rlist and not _is_background_tty(stdin):
# it's possible to be backgrounded between the above # it's possible to be backgrounded between the above
# check and the read, so we ignore SIGTTIN here. # check and the read, so we ignore SIGTTIN here.
with ignore_signal(signal.SIGTTIN): with ignore_signal(signal.SIGTTIN):
try: try:
if stdin_file.read(1) == "v": if stdin.read(1) == "v":
echo = not echo echo = not echo
except IOError as e: except IOError as e:
# If SIGTTIN is ignored, the system gives EIO # If SIGTTIN is ignored, the system gives EIO
@@ -908,13 +957,13 @@ def _writer_daemon(
if e.errno != errno.EIO: if e.errno != errno.EIO:
raise raise
if read_file in rlist: if in_pipe in rlist:
line_count = 0 line_count = 0
try: try:
while line_count < 100: while line_count < 100:
# Handle output from the calling process. # Handle output from the calling process.
try: try:
line = _retry(read_file.readline)() line = _retry(in_pipe.readline)()
except UnicodeDecodeError: except UnicodeDecodeError:
# installs like --test=root gpgme produce non-UTF8 logs # installs like --test=root gpgme produce non-UTF8 logs
line = "<line lost: output was not encoded as UTF-8>\n" line = "<line lost: output was not encoded as UTF-8>\n"
@@ -943,7 +992,7 @@ def _writer_daemon(
if xoff in controls: if xoff in controls:
force_echo = False force_echo = False
if not _input_available(read_file): if not _input_available(in_pipe):
break break
finally: finally:
if line_count > 0: if line_count > 0:
@@ -958,14 +1007,14 @@ def _writer_daemon(
finally: finally:
# send written data back to parent if we used a StringIO # send written data back to parent if we used a StringIO
if isinstance(log_file, io.StringIO): if isinstance(log_file, io.StringIO):
control_fd.send(log_file.getvalue()) control_pipe.send(log_file.getvalue())
log_file_wrapper.close() log_file_wrapper.close()
read_fd.close() close_connection_and_file(read_multiprocess_fd, in_pipe)
if stdin_fd: if stdin_multiprocess_fd:
stdin_fd.close() close_connection_and_file(stdin_multiprocess_fd, stdin)
# send echo value back to the parent so it can be preserved. # send echo value back to the parent so it can be preserved.
control_fd.send(echo) control_pipe.send(echo)
def _retry(function): def _retry(function):

View File

@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string #: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "0.22.4" __version__ = "0.22.0.dev0"
spack_version = __version__ spack_version = __version__

View File

@@ -254,8 +254,8 @@ def _search_duplicate_specs_in_externals(error_cls):
@config_packages @config_packages
def _deprecated_preferences(error_cls): def _deprecated_preferences(error_cls):
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)""" """Search package preferences deprecated in v0.21 (and slated for removal in v0.22)"""
# TODO (v0.23): remove this audit as the attributes will not be allowed in config # TODO (v0.22): remove this audit as the attributes will not be allowed in config
errors = [] errors = []
packages_yaml = spack.config.CONFIG.get_config("packages") packages_yaml = spack.config.CONFIG.get_config("packages")
@@ -779,7 +779,7 @@ def check_virtual_with_variants(spec, msg):
return return
error = error_cls( error = error_cls(
f"{pkg_name}: {msg}", f"{pkg_name}: {msg}",
[f"remove variants from '{spec}' in depends_on directive in {filename}"], f"remove variants from '{spec}' in depends_on directive in {filename}",
) )
errors.append(error) errors.append(error)
@@ -1046,7 +1046,7 @@ def _extracts_errors(triggers, summary):
group="externals", group="externals",
tag="PKG-EXTERNALS", tag="PKG-EXTERNALS",
description="Sanity checks for external software detection", description="Sanity checks for external software detection",
kwargs=("pkgs", "debug_log"), kwargs=("pkgs",),
) )
@@ -1069,7 +1069,7 @@ def packages_with_detection_tests():
@external_detection @external_detection
def _test_detection_by_executable(pkgs, debug_log, error_cls): def _test_detection_by_executable(pkgs, error_cls):
"""Test drive external detection for packages""" """Test drive external detection for packages"""
import spack.detection import spack.detection
@@ -1095,7 +1095,6 @@ def _test_detection_by_executable(pkgs, debug_log, error_cls):
for idx, test_runner in enumerate( for idx, test_runner in enumerate(
spack.detection.detection_tests(pkg_name, spack.repo.PATH) spack.detection.detection_tests(pkg_name, spack.repo.PATH)
): ):
debug_log(f"[{__file__}]: running test {idx} for package {pkg_name}")
specs = test_runner.execute() specs = test_runner.execute()
expected_specs = test_runner.expected_specs expected_specs = test_runner.expected_specs
@@ -1116,10 +1115,11 @@ def _test_detection_by_executable(pkgs, debug_log, error_cls):
for candidate in expected_specs: for candidate in expected_specs:
try: try:
idx = specs.index(candidate) idx = specs.index(candidate)
matched_detection.append((candidate, specs[idx]))
except (AttributeError, ValueError): except (AttributeError, ValueError):
pass pass
matched_detection.append((candidate, specs[idx]))
def _compare_extra_attribute(_expected, _detected, *, _spec): def _compare_extra_attribute(_expected, _detected, *, _spec):
result = [] result = []
# Check items are of the same type # Check items are of the same type

View File

@@ -23,12 +23,12 @@
import warnings import warnings
from contextlib import closing from contextlib import closing
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
from urllib.error import HTTPError, URLError
import llnl.util.filesystem as fsys import llnl.util.filesystem as fsys
import llnl.util.lang import llnl.util.lang
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
from llnl.util.symlink import readlink
import spack.caches import spack.caches
import spack.cmd import spack.cmd
@@ -658,7 +658,7 @@ def get_buildfile_manifest(spec):
# 2. paths are used as strings. # 2. paths are used as strings.
for rel_path in visitor.symlinks: for rel_path in visitor.symlinks:
abs_path = os.path.join(root, rel_path) abs_path = os.path.join(root, rel_path)
link = readlink(abs_path) link = os.readlink(abs_path)
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root): if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
data["link_to_relocate"].append(rel_path) data["link_to_relocate"].append(rel_path)
@@ -898,8 +898,9 @@ def url_read_method(url):
try: try:
_, _, spec_file = web_util.read_from_url(url) _, _, spec_file = web_util.read_from_url(url)
contents = codecs.getreader("utf-8")(spec_file).read() contents = codecs.getreader("utf-8")(spec_file).read()
except web_util.SpackWebError as e: except (URLError, web_util.SpackWebError) as url_err:
tty.error(f"Error reading specfile: {url}: {e}") tty.error("Error reading specfile: {0}".format(url))
tty.error(url_err)
return contents return contents
try: try:
@@ -2000,7 +2001,6 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
with spack.util.path.filter_padding(): with spack.util.path.filter_padding():
tty.msg('Installing "{0}" from a buildcache'.format(spec.format())) tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
extract_tarball(spec, download_result, force) extract_tarball(spec, download_result, force)
spec.package.windows_establish_runtime_linkage()
spack.hooks.post_install(spec, False) spack.hooks.post_install(spec, False)
spack.store.STORE.db.add(spec, spack.store.STORE.layout) spack.store.STORE.db.add(spec, spack.store.STORE.layout)
@@ -2039,17 +2039,21 @@ def try_direct_fetch(spec, mirrors=None):
try: try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json) _, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
specfile_is_signed = True specfile_is_signed = True
except web_util.SpackWebError as e1: except (URLError, web_util.SpackWebError, HTTPError) as url_err:
try: try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json) _, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
except web_util.SpackWebError as e2: except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
tty.debug( tty.debug(
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}", "Did not find {0} on {1}".format(
e1, specfile_name, buildcache_fetch_url_signed_json
),
url_err,
level=2, level=2,
) )
tty.debug( tty.debug(
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2 "Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
url_err_x,
level=2,
) )
continue continue
specfile_contents = codecs.getreader("utf-8")(fs).read() specfile_contents = codecs.getreader("utf-8")(fs).read()
@@ -2134,9 +2138,6 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
for mirror in mirror_collection.values(): for mirror in mirror_collection.values():
fetch_url = mirror.fetch_url fetch_url = mirror.fetch_url
# TODO: oci:// does not support signing.
if fetch_url.startswith("oci://"):
continue
keys_url = url_util.join( keys_url = url_util.join(
fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
) )
@@ -2147,12 +2148,19 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
try: try:
_, _, json_file = web_util.read_from_url(keys_index) _, _, json_file = web_util.read_from_url(keys_index)
json_index = sjson.load(codecs.getreader("utf-8")(json_file)) json_index = sjson.load(codecs.getreader("utf-8")(json_file))
except web_util.SpackWebError as url_err: except (URLError, web_util.SpackWebError) as url_err:
if web_util.url_exists(keys_index): if web_util.url_exists(keys_index):
err_msg = [
"Unable to find public keys in {0},",
" caught exception attempting to read from {1}.",
]
tty.error( tty.error(
f"Unable to find public keys in {url_util.format(fetch_url)}," "".join(err_msg).format(
f" caught exception attempting to read from {url_util.format(keys_index)}." url_util.format(fetch_url), url_util.format(keys_index)
)
) )
tty.debug(url_err) tty.debug(url_err)
continue continue
@@ -2432,7 +2440,7 @@ def get_remote_hash(self):
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash") url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
try: try:
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers)) response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
except (TimeoutError, urllib.error.URLError): except urllib.error.URLError:
return None return None
# Validate the hash # Validate the hash
@@ -2454,7 +2462,7 @@ def conditional_fetch(self) -> FetchIndexResult:
try: try:
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers)) response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
except (TimeoutError, urllib.error.URLError) as e: except urllib.error.URLError as e:
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
try: try:
@@ -2495,7 +2503,10 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
def conditional_fetch(self) -> FetchIndexResult: def conditional_fetch(self) -> FetchIndexResult:
# Just do a conditional fetch immediately # Just do a conditional fetch immediately
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json") url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'} headers = {
"User-Agent": web_util.SPACK_USER_AGENT,
"If-None-Match": '"{}"'.format(self.etag),
}
try: try:
response = self.urlopen(urllib.request.Request(url, headers=headers)) response = self.urlopen(urllib.request.Request(url, headers=headers))
@@ -2503,14 +2514,14 @@ def conditional_fetch(self) -> FetchIndexResult:
if e.getcode() == 304: if e.getcode() == 304:
# Not modified; that means fresh. # Not modified; that means fresh.
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True) return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
raise FetchIndexError(f"Could not fetch index {url}", e) from e raise FetchIndexError("Could not fetch index {}".format(url), e) from e
except (TimeoutError, urllib.error.URLError) as e: except urllib.error.URLError as e:
raise FetchIndexError(f"Could not fetch index {url}", e) from e raise FetchIndexError("Could not fetch index {}".format(url), e) from e
try: try:
result = codecs.getreader("utf-8")(response).read() result = codecs.getreader("utf-8")(response).read()
except ValueError as e: except ValueError as e:
raise FetchIndexError(f"Remote index {url} is invalid", e) from e raise FetchIndexError("Remote index {} is invalid".format(url), e) from e
headers = response.headers headers = response.headers
etag_header_value = headers.get("Etag", None) or headers.get("etag", None) etag_header_value = headers.get("Etag", None) or headers.get("etag", None)
@@ -2541,19 +2552,21 @@ def conditional_fetch(self) -> FetchIndexResult:
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"}, headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
) )
) )
except (TimeoutError, urllib.error.URLError) as e: except urllib.error.URLError as e:
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e raise FetchIndexError(
"Could not fetch manifest from {}".format(url_manifest), e
) from e
try: try:
manifest = json.loads(response.read()) manifest = json.loads(response.read())
except Exception as e: except Exception as e:
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
# Get first blob hash, which should be the index.json # Get first blob hash, which should be the index.json
try: try:
index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"]) index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"])
except Exception as e: except Exception as e:
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
# Fresh? # Fresh?
if index_digest.digest == self.local_hash: if index_digest.digest == self.local_hash:

View File

@@ -5,13 +5,7 @@
"""Function and classes needed to bootstrap Spack itself.""" """Function and classes needed to bootstrap Spack itself."""
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
from .core import ( from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
all_core_root_specs,
ensure_clingo_importable_or_raise,
ensure_core_dependencies,
ensure_gpg_in_path_or_raise,
ensure_patchelf_in_path_or_raise,
)
from .environment import BootstrapEnvironment, ensure_environment_dependencies from .environment import BootstrapEnvironment, ensure_environment_dependencies
from .status import status_message from .status import status_message
@@ -19,8 +13,6 @@
"is_bootstrapping", "is_bootstrapping",
"ensure_bootstrap_configuration", "ensure_bootstrap_configuration",
"ensure_core_dependencies", "ensure_core_dependencies",
"ensure_gpg_in_path_or_raise",
"ensure_clingo_importable_or_raise",
"ensure_patchelf_in_path_or_raise", "ensure_patchelf_in_path_or_raise",
"all_core_root_specs", "all_core_root_specs",
"ensure_environment_dependencies", "ensure_environment_dependencies",

View File

@@ -54,14 +54,10 @@ def _try_import_from_store(
installed_specs = spack.store.STORE.db.query(query_spec, installed=True) installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
for candidate_spec in installed_specs: for candidate_spec in installed_specs:
# previously bootstrapped specs may not have a python-venv dependency. pkg = candidate_spec["python"].package
if candidate_spec.dependencies("python-venv"):
python, *_ = candidate_spec.dependencies("python-venv")
else:
python, *_ = candidate_spec.dependencies("python")
module_paths = [ module_paths = [
os.path.join(candidate_spec.prefix, python.package.purelib), os.path.join(candidate_spec.prefix, pkg.purelib),
os.path.join(candidate_spec.prefix, python.package.platlib), os.path.join(candidate_spec.prefix, pkg.platlib),
] ]
path_before = list(sys.path) path_before = list(sys.path)

View File

@@ -270,6 +270,10 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
with spack_python_interpreter(): with spack_python_interpreter():
# Add hint to use frontend operating system on Cray # Add hint to use frontend operating system on Cray
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python()) concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
# This is needed to help the old concretizer taking the `setuptools` dependency
# only when bootstrapping from sources on Python 3.12
if spec_for_current_python() == "python@3.12":
concrete_spec.constrain("+force_setuptools")
if module == "clingo": if module == "clingo":
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme # TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
@@ -534,41 +538,6 @@ def ensure_patchelf_in_path_or_raise() -> spack.util.executable.Executable:
) )
def ensure_winsdk_external_or_raise() -> None:
"""Ensure the Windows SDK + WGL are available on system
If both of these package are found, the Spack user or bootstrap
configuration (depending on where Spack is running)
will be updated to include all versions and variants detected.
If either the WDK or WSDK are not found, this method will raise
a RuntimeError.
**NOTE:** This modifies the Spack config in the current scope,
either user or environment depending on the calling context.
This is different from all other current bootstrap dependency
checks.
"""
if set(["win-sdk", "wgl"]).issubset(spack.config.get("packages").keys()):
return
externals = spack.detection.by_path(["win-sdk", "wgl"])
if not set(["win-sdk", "wgl"]) == externals.keys():
missing_packages_lst = []
if "wgl" not in externals:
missing_packages_lst.append("wgl")
if "win-sdk" not in externals:
missing_packages_lst.append("win-sdk")
missing_packages = " & ".join(missing_packages_lst)
raise RuntimeError(
f"Unable to find the {missing_packages}, please install these packages \
via the Visual Studio installer \
before proceeding with Spack or provide the path to a non standard install with \
'spack external find --path'"
)
# wgl/sdk are not required for bootstrapping Spack, but
# are required for building anything non trivial
# add to user config so they can be used by subsequent Spack ops
spack.detection.update_configuration(externals, buildable=False)
def ensure_core_dependencies() -> None: def ensure_core_dependencies() -> None:
"""Ensure the presence of all the core dependencies.""" """Ensure the presence of all the core dependencies."""
if sys.platform.lower() == "linux": if sys.platform.lower() == "linux":
@@ -597,10 +566,7 @@ def bootstrapping_sources(scope: Optional[str] = None):
current = copy.copy(entry) current = copy.copy(entry)
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"]) metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME) metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
try: with open(metadata_yaml, encoding="utf-8") as stream:
with open(metadata_yaml, encoding="utf-8") as stream: current.update(spack.util.spack_yaml.load(stream))
current.update(spack.util.spack_yaml.load(stream)) list_of_sources.append(current)
list_of_sources.append(current)
except OSError:
pass
return list_of_sources return list_of_sources

View File

@@ -3,11 +3,13 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Bootstrap non-core Spack dependencies from an environment.""" """Bootstrap non-core Spack dependencies from an environment."""
import glob
import hashlib import hashlib
import os import os
import pathlib import pathlib
import sys import sys
from typing import Iterable, List import warnings
from typing import List
import archspec.cpu import archspec.cpu
@@ -26,16 +28,6 @@
class BootstrapEnvironment(spack.environment.Environment): class BootstrapEnvironment(spack.environment.Environment):
"""Environment to install dependencies of Spack for a given interpreter and architecture""" """Environment to install dependencies of Spack for a given interpreter and architecture"""
def __init__(self) -> None:
if not self.spack_yaml().exists():
self._write_spack_yaml_file()
super().__init__(self.environment_root())
# Remove python package roots created before python-venv was introduced
for s in self.concrete_roots():
if "python" in s.package.extendees and not s.dependencies("python-venv"):
self.deconcretize(s)
@classmethod @classmethod
def spack_dev_requirements(cls) -> List[str]: def spack_dev_requirements(cls) -> List[str]:
"""Spack development requirements""" """Spack development requirements"""
@@ -67,19 +59,31 @@ def view_root(cls) -> pathlib.Path:
return cls.environment_root().joinpath("view") return cls.environment_root().joinpath("view")
@classmethod @classmethod
def bin_dir(cls) -> pathlib.Path: def pythonpaths(cls) -> List[str]:
"""Paths to be added to PATH""" """Paths to be added to sys.path or PYTHONPATH"""
return cls.view_root().joinpath("bin") python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}"
glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**"))
result = glob.glob(glob_expr)
if not result:
msg = f"Cannot find any Python path in {cls.view_root()}"
warnings.warn(msg)
return result
def python_dirs(self) -> Iterable[pathlib.Path]: @classmethod
python = next(s for s in self.all_specs_generator() if s.name == "python-venv").package def bin_dirs(cls) -> List[pathlib.Path]:
return {self.view_root().joinpath(p) for p in (python.platlib, python.purelib)} """Paths to be added to PATH"""
return [cls.view_root().joinpath("bin")]
@classmethod @classmethod
def spack_yaml(cls) -> pathlib.Path: def spack_yaml(cls) -> pathlib.Path:
"""Environment spack.yaml file""" """Environment spack.yaml file"""
return cls.environment_root().joinpath("spack.yaml") return cls.environment_root().joinpath("spack.yaml")
def __init__(self) -> None:
if not self.spack_yaml().exists():
self._write_spack_yaml_file()
super().__init__(self.environment_root())
def update_installations(self) -> None: def update_installations(self) -> None:
"""Update the installations of this environment.""" """Update the installations of this environment."""
log_enabled = tty.is_debug() or tty.is_verbose() log_enabled = tty.is_debug() or tty.is_verbose()
@@ -96,13 +100,21 @@ def update_installations(self) -> None:
self.install_all() self.install_all()
self.write(regenerate=True) self.write(regenerate=True)
def load(self) -> None: def update_syspath_and_environ(self) -> None:
"""Update PATH and sys.path.""" """Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
# Make executables available (shouldn't need PYTHONPATH) the environment view.
os.environ["PATH"] = f"{self.bin_dir()}{os.pathsep}{os.environ.get('PATH', '')}" """
# Do minimal modifications to sys.path and environment variables. In particular, pay
# Spack itself imports pytest # attention to have the smallest PYTHONPATH / sys.path possible, since that may impact
sys.path.extend(str(p) for p in self.python_dirs()) # the performance of the current interpreter
sys.path.extend(self.pythonpaths())
os.environ["PATH"] = os.pathsep.join(
[str(x) for x in self.bin_dirs()] + os.environ.get("PATH", "").split(os.pathsep)
)
os.environ["PYTHONPATH"] = os.pathsep.join(
os.environ.get("PYTHONPATH", "").split(os.pathsep)
+ [str(x) for x in self.pythonpaths()]
)
def _write_spack_yaml_file(self) -> None: def _write_spack_yaml_file(self) -> None:
tty.msg( tty.msg(
@@ -152,4 +164,4 @@ def ensure_environment_dependencies() -> None:
_add_externals_if_missing() _add_externals_if_missing()
with BootstrapEnvironment() as env: with BootstrapEnvironment() as env:
env.update_installations() env.update_installations()
env.load() env.update_syspath_and_environ()

View File

@@ -43,8 +43,7 @@
from collections import defaultdict from collections import defaultdict
from enum import Flag, auto from enum import Flag, auto
from itertools import chain from itertools import chain
from multiprocessing.connection import Connection from typing import List, Set, Tuple
from typing import Callable, Dict, List, Optional, Set, Tuple
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.string import plural from llnl.string import plural
@@ -52,6 +51,7 @@
from llnl.util.lang import dedupe, stable_partition from llnl.util.lang import dedupe, stable_partition
from llnl.util.symlink import symlink from llnl.util.symlink import symlink
from llnl.util.tty.color import cescape, colorize from llnl.util.tty.color import cescape, colorize
from llnl.util.tty.log import MultiProcessFd
import spack.build_systems.cmake import spack.build_systems.cmake
import spack.build_systems.meson import spack.build_systems.meson
@@ -72,7 +72,6 @@
import spack.store import spack.store
import spack.subprocess_context import spack.subprocess_context
import spack.user_environment import spack.user_environment
import spack.util.executable
import spack.util.path import spack.util.path
import spack.util.pattern import spack.util.pattern
from spack import traverse from spack import traverse
@@ -480,12 +479,12 @@ def set_wrapper_variables(pkg, env):
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}")) env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir) env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
# Find ccache binary and hand it to build environment
if spack.config.get("config:ccache"): if spack.config.get("config:ccache"):
# Enable ccache in the compiler wrapper ccache = Executable("ccache")
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True)) if not ccache:
else: raise RuntimeError("No ccache binary found in PATH")
# Avoid cache pollution if a build system forces `ccache <compiler wrapper invocation>`. env.set(SPACK_CCACHE_BINARY, ccache)
env.set("CCACHE_DISABLE", "1")
# Gather information about various types of dependencies # Gather information about various types of dependencies
link_deps = set(pkg.spec.traverse(root=False, deptype=("link"))) link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
@@ -731,28 +730,12 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwa
return compiler(*compiler_args, output=compiler_output) return compiler(*compiler_args, output=compiler_output)
def _get_rpath_deps_from_spec( def get_rpath_deps(pkg):
spec: spack.spec.Spec, transitive_rpaths: bool """Return immediate or transitive RPATHs depending on the package."""
) -> List[spack.spec.Spec]: if pkg.transitive_rpaths:
if not transitive_rpaths: return [d for d in pkg.spec.traverse(root=False, deptype=("link"))]
return spec.dependencies(deptype=dt.LINK) else:
return pkg.spec.dependencies(deptype="link")
by_name: Dict[str, spack.spec.Spec] = {}
for dep in spec.traverse(root=False, deptype=dt.LINK):
lookup = by_name.get(dep.name)
if lookup is None:
by_name[dep.name] = dep
elif lookup.version < dep.version:
by_name[dep.name] = dep
return list(by_name.values())
def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]:
"""Return immediate or transitive dependencies (depending on the package) that need to be
rpath'ed. If a package occurs multiple times, the newest version is kept."""
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
def get_rpaths(pkg): def get_rpaths(pkg):
@@ -1145,60 +1128,18 @@ def get_cmake_prefix_path(pkg):
def _setup_pkg_and_run( def _setup_pkg_and_run(
serialized_pkg: "spack.subprocess_context.PackageInstallContext", serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
function: Callable,
kwargs: Dict,
write_pipe: Connection,
input_pipe: Optional[Connection],
jsfd1: Optional[Connection],
jsfd2: Optional[Connection],
): ):
"""Main entry point in the child process for Spack builds.
``_setup_pkg_and_run`` is called by the child process created in
``start_build_process()``, and its main job is to run ``function()`` on behalf of
some Spack installation (see :ref:`spack.installer.PackageInstaller._install_task`).
The child process is passed a ``write_pipe``, on which it's expected to send one of
the following:
* ``StopPhase``: error raised by a build process indicating it's stopping at a
particular build phase.
* ``BaseException``: any exception raised by a child build process, which will be
wrapped in ``ChildError`` (which adds a bunch of debug info and log context) and
raised in the parent.
* The return value of ``function()``, which can be anything (except an exception).
This is returned to the caller.
Note: ``jsfd1`` and ``jsfd2`` are passed solely to ensure that the child process
does not close these file descriptors. Some ``multiprocessing`` backends will close
them automatically in the child if they are not passed at process creation time.
Arguments:
serialized_pkg: Spack package install context object (serialized form of the
package that we'll build in the child process).
function: function to call in the child process; serialized_pkg is passed to
this as the first argument.
kwargs: additional keyword arguments to pass to ``function()``.
write_pipe: multiprocessing ``Connection`` to the parent process, to which the
child *must* send a result (or an error) back to parent on.
input_multiprocess_fd: stdin from the parent (not passed currently on Windows)
jsfd1: gmake Jobserver file descriptor 1.
jsfd2: gmake Jobserver file descriptor 2.
"""
context: str = kwargs.get("context", "build") context: str = kwargs.get("context", "build")
try: try:
# We are in the child process. Python sets sys.stdin to open(os.devnull) to prevent our # We are in the child process. Python sets sys.stdin to
# process and its parent from simultaneously reading from the original stdin. But, we # open(os.devnull) to prevent our process and its parent from
# assume that the parent process is not going to read from it till we are done with the # simultaneously reading from the original stdin. But, we assume
# child, so we undo Python's precaution. closefd=False since Connection has ownership. # that the parent process is not going to read from it till we
if input_pipe is not None: # are done with the child, so we undo Python's precaution.
sys.stdin = os.fdopen(input_pipe.fileno(), closefd=False) if input_multiprocess_fd is not None:
sys.stdin = os.fdopen(input_multiprocess_fd.fd)
pkg = serialized_pkg.restore() pkg = serialized_pkg.restore()
@@ -1214,14 +1155,13 @@ def _setup_pkg_and_run(
# Do not create a full ChildError from this, it's not an error # Do not create a full ChildError from this, it's not an error
# it's a control statement. # it's a control statement.
write_pipe.send(e) write_pipe.send(e)
except BaseException as e: except BaseException:
# catch ANYTHING that goes wrong in the child process # catch ANYTHING that goes wrong in the child process
exc_type, exc, tb = sys.exc_info()
# Need to unwind the traceback in the child because traceback # Need to unwind the traceback in the child because traceback
# objects can't be sent to the parent. # objects can't be sent to the parent.
exc_type = type(e) tb_string = traceback.format_exc()
tb = e.__traceback__
tb_string = "".join(traceback.format_exception(exc_type, e, tb))
# build up some context from the offending package so we can # build up some context from the offending package so we can
# show that, too. # show that, too.
@@ -1238,8 +1178,8 @@ def _setup_pkg_and_run(
elif context == "test": elif context == "test":
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec)) logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
error_msg = str(e) error_msg = str(exc)
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)): if isinstance(exc, (spack.multimethod.NoSuchMethodError, AttributeError)):
process = "test the installation" if context == "test" else "build from sources" process = "test the installation" if context == "test" else "build from sources"
error_msg = ( error_msg = (
"The '{}' package cannot find an attribute while trying to {}. " "The '{}' package cannot find an attribute while trying to {}. "
@@ -1249,7 +1189,7 @@ def _setup_pkg_and_run(
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure" "More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
).format(pkg.name, process, context) ).format(pkg.name, process, context)
error_msg = colorize("@*R{{{}}}".format(error_msg)) error_msg = colorize("@*R{{{}}}".format(error_msg))
error_msg = "{}\n\n{}".format(str(e), error_msg) error_msg = "{}\n\n{}".format(str(exc), error_msg)
# make a pickleable exception to send to parent. # make a pickleable exception to send to parent.
msg = "%s: %s" % (exc_type.__name__, error_msg) msg = "%s: %s" % (exc_type.__name__, error_msg)
@@ -1267,8 +1207,8 @@ def _setup_pkg_and_run(
finally: finally:
write_pipe.close() write_pipe.close()
if input_pipe is not None: if input_multiprocess_fd is not None:
input_pipe.close() input_multiprocess_fd.close()
def start_build_process(pkg, function, kwargs): def start_build_process(pkg, function, kwargs):
@@ -1295,9 +1235,23 @@ def child_fun():
If something goes wrong, the child process catches the error and If something goes wrong, the child process catches the error and
passes it to the parent wrapped in a ChildError. The parent is passes it to the parent wrapped in a ChildError. The parent is
expected to handle (or re-raise) the ChildError. expected to handle (or re-raise) the ChildError.
This uses `multiprocessing.Process` to create the child process. The
mechanism used to create the process differs on different operating
systems and for different versions of Python. In some cases "fork"
is used (i.e. the "fork" system call) and some cases it starts an
entirely new Python interpreter process (in the docs this is referred
to as the "spawn" start method). Breaking it down by OS:
- Linux always uses fork.
- Mac OS uses fork before Python 3.8 and "spawn" for 3.8 and after.
- Windows always uses the "spawn" start method.
For more information on `multiprocessing` child process creation
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
""" """
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False) read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
input_fd = None input_multiprocess_fd = None
jobserver_fd1 = None jobserver_fd1 = None
jobserver_fd2 = None jobserver_fd2 = None
@@ -1306,13 +1260,14 @@ def child_fun():
try: try:
# Forward sys.stdin when appropriate, to allow toggling verbosity # Forward sys.stdin when appropriate, to allow toggling verbosity
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"): if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
input_fd = Connection(os.dup(sys.stdin.fileno())) input_fd = os.dup(sys.stdin.fileno())
input_multiprocess_fd = MultiProcessFd(input_fd)
mflags = os.environ.get("MAKEFLAGS", False) mflags = os.environ.get("MAKEFLAGS", False)
if mflags: if mflags:
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags) m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
if m: if m:
jobserver_fd1 = Connection(int(m.group(1))) jobserver_fd1 = MultiProcessFd(int(m.group(1)))
jobserver_fd2 = Connection(int(m.group(2))) jobserver_fd2 = MultiProcessFd(int(m.group(2)))
p = multiprocessing.Process( p = multiprocessing.Process(
target=_setup_pkg_and_run, target=_setup_pkg_and_run,
@@ -1321,7 +1276,7 @@ def child_fun():
function, function,
kwargs, kwargs,
write_pipe, write_pipe,
input_fd, input_multiprocess_fd,
jobserver_fd1, jobserver_fd1,
jobserver_fd2, jobserver_fd2,
), ),
@@ -1341,8 +1296,8 @@ def child_fun():
finally: finally:
# Close the input stream in the parent process # Close the input stream in the parent process
if input_fd is not None: if input_multiprocess_fd is not None:
input_fd.close() input_multiprocess_fd.close()
def exitcode_msg(p): def exitcode_msg(p):
typ = "exit" if p.exitcode >= 0 else "signal" typ = "exit" if p.exitcode >= 0 else "signal"

View File

@@ -39,11 +39,16 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables """Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
``find_python_hints`` for context.""" ``find_python_hints`` for context."""
if not getattr(pkg, "find_python_hints", False) or not pkg.spec.dependencies( if not getattr(pkg, "find_python_hints", False):
"python", dt.BUILD | dt.LINK
):
return return
python_executable = pkg.spec["python"].command.path pythons = pkg.spec.dependencies("python", dt.BUILD | dt.LINK)
if len(pythons) != 1:
return
try:
python_executable = pythons[0].package.command.path
except RuntimeError:
return
args.extend( args.extend(
[ [
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable), CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),

View File

@@ -1,144 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import itertools
import os
import pathlib
import re
import sys
from typing import Dict, List, Sequence, Tuple, Union
import llnl.util.tty as tty
from llnl.util.lang import classproperty
import spack.compiler
import spack.package_base
# Local "type" for type hints
Path = Union[str, pathlib.Path]
class CompilerPackage(spack.package_base.PackageBase):
"""A Package mixin for all common logic for packages that implement compilers"""
# TODO: how do these play nicely with other tags
tags: Sequence[str] = ["compiler"]
#: Optional suffix regexes for searching for this type of compiler.
#: Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
#: version suffix for gcc.
compiler_suffixes: List[str] = [r"-.*"]
#: Optional prefix regexes for searching for this compiler
compiler_prefixes: List[str] = []
#: Compiler argument(s) that produces version information
#: If multiple arguments, the earlier arguments must produce errors when invalid
compiler_version_argument: Union[str, Tuple[str]] = "-dumpversion"
#: Regex used to extract version from compiler's output
compiler_version_regex: str = "(.*)"
#: Static definition of languages supported by this class
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
def __init__(self, spec: "spack.spec.Spec"):
super().__init__(spec)
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
msg += f" supports: {self.supported_languages}, valid values: {self.compiler_languages}"
assert set(self.supported_languages) <= set(self.compiler_languages), msg
@property
def supported_languages(self) -> Sequence[str]:
"""Dynamic definition of languages supported by this package"""
return self.compiler_languages
@classproperty
def compiler_names(cls) -> Sequence[str]:
"""Construct list of compiler names from per-language names"""
names = []
for language in cls.compiler_languages:
names.extend(getattr(cls, f"{language}_names"))
return names
@classproperty
def executables(cls) -> Sequence[str]:
"""Construct executables for external detection from names, prefixes, and suffixes."""
regexp_fmt = r"^({0}){1}({2})$"
prefixes = [""] + cls.compiler_prefixes
suffixes = [""] + cls.compiler_suffixes
if sys.platform == "win32":
ext = r"\.(?:exe|bat)"
suffixes += [suf + ext for suf in suffixes]
return [
regexp_fmt.format(prefix, re.escape(name), suffix)
for prefix, name, suffix in itertools.product(prefixes, cls.compiler_names, suffixes)
]
@classmethod
def determine_version(cls, exe: Path):
version_argument = cls.compiler_version_argument
if isinstance(version_argument, str):
version_argument = (version_argument,)
for va in version_argument:
try:
output = spack.compiler.get_compiler_version_output(exe, va)
match = re.search(cls.compiler_version_regex, output)
if match:
return ".".join(match.groups())
except spack.util.executable.ProcessError:
pass
except Exception as e:
tty.debug(
f"[{__file__}] Cannot detect a valid version for the executable "
f"{str(exe)}, for package '{cls.name}': {e}"
)
@classmethod
def compiler_bindir(cls, prefix: Path) -> Path:
"""Overridable method for the location of the compiler bindir within the preifx"""
return os.path.join(prefix, "bin")
@classmethod
def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
"""Compute the paths to compiler executables associated with this package
This is a helper method for ``determine_variants`` to compute the ``extra_attributes``
to include with each spec object."""
# There are often at least two copies (not symlinks) of each compiler executable in the
# same directory: one with a canonical name, e.g. "gfortran", and another one with the
# target prefix, e.g. "x86_64-pc-linux-gnu-gfortran". There also might be a copy of "gcc"
# with the version suffix, e.g. "x86_64-pc-linux-gnu-gcc-6.3.0". To ensure the consistency
# of values in the "paths" dictionary (i.e. we prefer all of them to reference copies
# with canonical names if possible), we iterate over the executables in the reversed sorted
# order:
# First pass over languages identifies exes that are perfect matches for canonical names
# Second pass checks for names with prefix/suffix
# Second pass is sorted by language name length because longer named languages
# e.g. cxx can often contain the names of shorter named languages
# e.g. c (e.g. clang/clang++)
paths = {}
exes = sorted(exes, reverse=True)
languages = {
lang: getattr(cls, f"{lang}_names")
for lang in sorted(cls.compiler_languages, key=len, reverse=True)
}
for exe in exes:
for lang, names in languages.items():
if os.path.basename(exe) in names:
paths[lang] = exe
break
else:
for lang, names in languages.items():
if any(name in os.path.basename(exe) for name in names):
paths[lang] = exe
break
return paths
@classmethod
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
# path determination is separated so it can be reused in subclasses
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}

View File

@@ -145,7 +145,7 @@ def install(self, pkg, spec, prefix):
opts += self.nmake_install_args() opts += self.nmake_install_args()
if self.makefile_name: if self.makefile_name:
opts.append("/F{}".format(self.makefile_name)) opts.append("/F{}".format(self.makefile_name))
opts.append(self.define("PREFIX", fs.windows_sfn(prefix))) opts.append(self.define("PREFIX", prefix))
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
inspect.getmodule(self.pkg).nmake( inspect.getmodule(self.pkg).nmake(
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes *opts, *self.install_targets, ignore_quotes=self.ignore_quotes

View File

@@ -138,21 +138,16 @@ def view_file_conflicts(self, view, merge_map):
return conflicts return conflicts
def add_files_to_view(self, view, merge_map, skip_if_exists=True): def add_files_to_view(self, view, merge_map, skip_if_exists=True):
# Patch up shebangs if the package extends Python and we put a Python interpreter in the # Patch up shebangs to the python linked in the view only if python is built by Spack.
# view. if not self.extendee_spec or self.extendee_spec.external:
if not self.extendee_spec:
return super().add_files_to_view(view, merge_map, skip_if_exists)
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
if python.external:
return super().add_files_to_view(view, merge_map, skip_if_exists) return super().add_files_to_view(view, merge_map, skip_if_exists)
# We only patch shebangs in the bin directory. # We only patch shebangs in the bin directory.
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
bin_dir = self.spec.prefix.bin
bin_dir = self.spec.prefix.bin
python_prefix = self.extendee_spec.prefix
for src, dst in merge_map.items(): for src, dst in merge_map.items():
if skip_if_exists and os.path.lexists(dst): if skip_if_exists and os.path.lexists(dst):
continue continue
@@ -173,7 +168,7 @@ def add_files_to_view(self, view, merge_map, skip_if_exists=True):
copied_files[(s.st_dev, s.st_ino)] = dst copied_files[(s.st_dev, s.st_ino)] = dst
shutil.copy2(src, dst) shutil.copy2(src, dst)
fs.filter_file( fs.filter_file(
python.prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst python_prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
) )
else: else:
view.link(src, dst) view.link(src, dst)
@@ -204,13 +199,14 @@ def remove_files_from_view(self, view, merge_map):
ignore_namespace = True ignore_namespace = True
bin_dir = self.spec.prefix.bin bin_dir = self.spec.prefix.bin
global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec)
to_remove = [] to_remove = []
for src, dst in merge_map.items(): for src, dst in merge_map.items():
if ignore_namespace and namespace_init(dst): if ignore_namespace and namespace_init(dst):
continue continue
if not fs.path_contains_subdirectory(src, bin_dir): if global_view or not fs.path_contains_subdirectory(src, bin_dir):
to_remove.append(dst) to_remove.append(dst)
else: else:
os.remove(dst) os.remove(dst)
@@ -366,12 +362,6 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
return f"https://pypi.org/simple/{name}/" return f"https://pypi.org/simple/{name}/"
return None return None
@property
def python_spec(self):
"""Get python-venv if it exists or python otherwise."""
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
return python
@property @property
def headers(self) -> HeaderList: def headers(self) -> HeaderList:
"""Discover header files in platlib.""" """Discover header files in platlib."""
@@ -381,9 +371,8 @@ def headers(self) -> HeaderList:
# Headers should only be in include or platlib, but no harm in checking purelib too # Headers should only be in include or platlib, but no harm in checking purelib too
include = self.prefix.join(self.spec["python"].package.include).join(name) include = self.prefix.join(self.spec["python"].package.include).join(name)
python = self.python_spec platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
platlib = self.prefix.join(python.package.platlib).join(name) purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
purelib = self.prefix.join(python.package.purelib).join(name)
headers_list = map(fs.find_all_headers, [include, platlib, purelib]) headers_list = map(fs.find_all_headers, [include, platlib, purelib])
headers = functools.reduce(operator.add, headers_list) headers = functools.reduce(operator.add, headers_list)
@@ -402,9 +391,8 @@ def libs(self) -> LibraryList:
name = self.spec.name[3:] name = self.spec.name[3:]
# Libraries should only be in platlib, but no harm in checking purelib too # Libraries should only be in platlib, but no harm in checking purelib too
python = self.python_spec platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
platlib = self.prefix.join(python.package.platlib).join(name) purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
purelib = self.prefix.join(python.package.purelib).join(name)
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True) find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
libs_list = map(find_all_libraries, [platlib, purelib]) libs_list = map(find_all_libraries, [platlib, purelib])
@@ -516,8 +504,6 @@ def global_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None: def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
"""Install everything from build directory.""" """Install everything from build directory."""
pip = spec["python"].command
pip.add_default_arg("-m", "pip")
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"] args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
@@ -533,6 +519,14 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
else: else:
args.append(".") args.append(".")
pip = spec["python"].command
# Hide user packages, since we don't have build isolation. This is
# necessary because pip / setuptools may run hooks from arbitrary
# packages during the build. There is no equivalent variable to hide
# system packages, so this is not reliable for external Python.
pip.add_default_env("PYTHONNOUSERSITE", "1")
pip.add_default_arg("-m")
pip.add_default_arg("pip")
with fs.working_dir(self.build_directory): with fs.working_dir(self.build_directory):
pip(*args) pip(*args)

View File

@@ -22,8 +22,6 @@
from urllib.parse import urlencode from urllib.parse import urlencode
from urllib.request import HTTPHandler, Request, build_opener from urllib.request import HTTPHandler, Request, build_opener
import ruamel.yaml
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import memoized from llnl.util.lang import memoized
@@ -46,7 +44,6 @@
from spack import traverse from spack import traverse
from spack.error import SpackError from spack.error import SpackError
from spack.reporters import CDash, CDashConfiguration from spack.reporters import CDash, CDashConfiguration
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
from spack.reporters.cdash import build_stamp as cdash_build_stamp from spack.reporters.cdash import build_stamp as cdash_build_stamp
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions # See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
@@ -686,22 +683,6 @@ def generate_gitlab_ci_yaml(
"instead.", "instead.",
) )
def ensure_expected_target_path(path):
"""Returns passed paths with all Windows path separators exchanged
for posix separators only if copy_only_pipeline is enabled
This is required as copy_only_pipelines are a unique scenario where
the generate job and child pipelines are run on different platforms.
To make this compatible w/ Windows, we cannot write Windows style path separators
that will be consumed on by the Posix copy job runner.
TODO (johnwparent): Refactor config + cli read/write to deal only in posix
style paths
"""
if copy_only_pipeline and path:
path = path.replace("\\", "/")
return path
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True) pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
deprecated_mirror_config = False deprecated_mirror_config = False
buildcache_destination = None buildcache_destination = None
@@ -825,7 +806,7 @@ def ensure_expected_target_path(path):
if scope not in include_scopes and scope not in env_includes: if scope not in include_scopes and scope not in env_includes:
include_scopes.insert(0, scope) include_scopes.insert(0, scope)
env_includes.extend(include_scopes) env_includes.extend(include_scopes)
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes] env_yaml_root["spack"]["include"] = env_includes
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]: if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci") env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
@@ -1113,7 +1094,7 @@ def main_script_replacements(cmd):
if cdash_handler and cdash_handler.auth_token: if cdash_handler and cdash_handler.auth_token:
try: try:
cdash_handler.populate_buildgroup(all_job_names) cdash_handler.populate_buildgroup(all_job_names)
except (SpackError, HTTPError, URLError, TimeoutError) as err: except (SpackError, HTTPError, URLError) as err:
tty.warn(f"Problem populating buildgroup: {err}") tty.warn(f"Problem populating buildgroup: {err}")
else: else:
tty.warn("Unable to populate buildgroup without CDash credentials") tty.warn("Unable to populate buildgroup without CDash credentials")
@@ -1246,9 +1227,6 @@ def main_script_replacements(cmd):
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything), "SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"), "SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
} }
output_vars = output_object["variables"]
for item, val in output_vars.items():
output_vars[item] = ensure_expected_target_path(val)
# TODO: Remove this block in Spack 0.23 # TODO: Remove this block in Spack 0.23
if deprecated_mirror_config and remote_mirror_override: if deprecated_mirror_config and remote_mirror_override:
@@ -1305,6 +1283,7 @@ def main_script_replacements(cmd):
sorted_output = {} sorted_output = {}
for output_key, output_value in sorted(output_object.items()): for output_key, output_value in sorted(output_object.items()):
sorted_output[output_key] = output_value sorted_output[output_key] = output_value
if known_broken_specs_encountered: if known_broken_specs_encountered:
tty.error("This pipeline generated hashes known to be broken on develop:") tty.error("This pipeline generated hashes known to be broken on develop:")
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered) display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
@@ -1312,11 +1291,8 @@ def main_script_replacements(cmd):
if not rebuild_everything: if not rebuild_everything:
sys.exit(1) sys.exit(1)
# Minimize yaml output size through use of anchors with open(output_file, "w") as outf:
syaml.anchorify(sorted_output) outf.write(syaml.dump(sorted_output, default_flow_style=True))
with open(output_file, "w") as f:
ruamel.yaml.YAML().dump(sorted_output, f)
def _url_encode_string(input_string): def _url_encode_string(input_string):
@@ -1502,12 +1478,6 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir) copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
def win_quote(quote_str: str) -> str:
if IS_WINDOWS:
quote_str = f'"{quote_str}"'
return quote_str
def download_and_extract_artifacts(url, work_dir): def download_and_extract_artifacts(url, work_dir):
"""Look for gitlab artifacts.zip at the given url, and attempt to download """Look for gitlab artifacts.zip at the given url, and attempt to download
and extract the contents into the given work_dir and extract the contents into the given work_dir
@@ -1530,7 +1500,7 @@ def download_and_extract_artifacts(url, work_dir):
request = Request(url, headers=headers) request = Request(url, headers=headers)
request.get_method = lambda: "GET" request.get_method = lambda: "GET"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT) response = opener.open(request)
response_code = response.getcode() response_code = response.getcode()
if response_code != 200: if response_code != 200:
@@ -1972,9 +1942,9 @@ def compose_command_err_handling(args):
# but we need to handle EXEs (git, etc) ourselves # but we need to handle EXEs (git, etc) ourselves
catch_exe_failure = ( catch_exe_failure = (
""" """
if ($LASTEXITCODE -ne 0){{ if ($LASTEXITCODE -ne 0){
throw 'Command {} has failed' throw "Command {} has failed"
}} }
""" """
if IS_WINDOWS if IS_WINDOWS
else "" else ""
@@ -2100,7 +2070,7 @@ def read_broken_spec(broken_spec_url):
""" """
try: try:
_, _, fs = web_util.read_from_url(broken_spec_url) _, _, fs = web_util.read_from_url(broken_spec_url)
except web_util.SpackWebError: except (URLError, web_util.SpackWebError, HTTPError):
tty.warn(f"Unable to read broken spec from {broken_spec_url}") tty.warn(f"Unable to read broken spec from {broken_spec_url}")
return None return None
@@ -2206,13 +2176,13 @@ def __init__(self, ci_cdash):
def args(self): def args(self):
return [ return [
"--cdash-upload-url", "--cdash-upload-url",
win_quote(self.upload_url), self.upload_url,
"--cdash-build", "--cdash-build",
win_quote(self.build_name), self.build_name,
"--cdash-site", "--cdash-site",
win_quote(self.site), self.site,
"--cdash-buildstamp", "--cdash-buildstamp",
win_quote(self.build_stamp), self.build_stamp,
] ]
@property # type: ignore @property # type: ignore
@@ -2278,7 +2248,7 @@ def create_buildgroup(self, opener, headers, url, group_name, group_type):
request = Request(url, data=enc_data, headers=headers) request = Request(url, data=enc_data, headers=headers)
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT) response = opener.open(request)
response_code = response.getcode() response_code = response.getcode()
if response_code not in [200, 201]: if response_code not in [200, 201]:
@@ -2324,7 +2294,7 @@ def populate_buildgroup(self, job_names):
request = Request(url, data=enc_data, headers=headers) request = Request(url, data=enc_data, headers=headers)
request.get_method = lambda: "PUT" request.get_method = lambda: "PUT"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT) response = opener.open(request)
response_code = response.getcode() response_code = response.getcode()
if response_code != 200: if response_code != 200:

View File

@@ -84,7 +84,7 @@ def externals(parser, args):
return return
pkgs = args.name or spack.repo.PATH.all_package_names() pkgs = args.name or spack.repo.PATH.all_package_names()
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs, debug_log=tty.debug) reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
_process_reports(reports) _process_reports(reports)

View File

@@ -2,6 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import os.path import os.path
import shutil import shutil
import sys import sys
@@ -67,13 +68,21 @@
} }
class AllSources:
"""Iterator for bootstrapping sources. Every __iter__ call uses current config, which is robust
in tests."""
def __iter__(self):
yield from (s["name"] for s in spack.bootstrap.core.bootstrapping_sources())
def _add_scope_option(parser): def _add_scope_option(parser):
parser.add_argument( parser.add_argument(
"--scope", action=arguments.ConfigScope, help="configuration scope to read/modify" "--scope", action=arguments.ConfigScope, help="configuration scope to read/modify"
) )
def setup_parser(subparser): def setup_parser(subparser: argparse.ArgumentParser):
sp = subparser.add_subparsers(dest="subcommand") sp = subparser.add_subparsers(dest="subcommand")
now = sp.add_parser("now", help="Spack ready, right now!") now = sp.add_parser("now", help="Spack ready, right now!")
@@ -122,7 +131,9 @@ def setup_parser(subparser):
add.add_argument("metadata_dir", help="directory where to find metadata files") add.add_argument("metadata_dir", help="directory where to find metadata files")
remove = sp.add_parser("remove", help="remove a bootstrapping source") remove = sp.add_parser("remove", help="remove a bootstrapping source")
remove.add_argument("name", help="name of the source to be removed") remove.add_argument(
"name", nargs="+", choices=AllSources(), help=argparse.SUPPRESS, metavar="name"
)
mirror = sp.add_parser("mirror", help="create a local mirror to bootstrap Spack") mirror = sp.add_parser("mirror", help="create a local mirror to bootstrap Spack")
mirror.add_argument( mirror.add_argument(
@@ -197,6 +208,11 @@ def _root(args):
def _list(args): def _list(args):
sources = spack.bootstrap.core.bootstrapping_sources(scope=args.scope) sources = spack.bootstrap.core.bootstrapping_sources(scope=args.scope)
if not sys.stdout.isatty():
for source in sources:
print(source["name"])
return
if not sources: if not sources:
llnl.util.tty.msg("No method available for bootstrapping Spack's dependencies") llnl.util.tty.msg("No method available for bootstrapping Spack's dependencies")
return return
@@ -356,31 +372,35 @@ def _add(args):
def _remove(args): def _remove(args):
initial_sources = spack.bootstrap.core.bootstrapping_sources() configured = set(AllSources())
names = [s["name"] for s in initial_sources] removable = set(args.name)
if args.name not in names: unknown = removable - configured
msg = (
'cannot find any bootstrapping source named "{0}". ' if unknown:
"Run `spack bootstrap list` to see available sources." if not configured:
raise RuntimeError("no bootstrapping sources are configured")
unknown_str = ", ".join(f'"{name}"' for name in removable)
known_str = ", ".join(f'"{name}"' for name in configured)
raise RuntimeError(
f"cannot find any bootstrapping source named {unknown_str}. "
f"Choose from {known_str}."
) )
raise RuntimeError(msg.format(args.name))
for current_scope in spack.config.scopes(): for current_scope in spack.config.scopes():
sources = spack.config.get("bootstrap:sources", scope=current_scope) or [] sources = spack.config.get("bootstrap:sources", scope=current_scope) or []
if args.name in [s["name"] for s in sources]:
sources = [s for s in sources if s["name"] != args.name]
spack.config.set("bootstrap:sources", sources, scope=current_scope)
msg = (
'Removed the bootstrapping source named "{0}" from the '
'"{1}" configuration scope.'
)
llnl.util.tty.msg(msg.format(args.name, current_scope))
trusted = spack.config.get("bootstrap:trusted", scope=current_scope) or [] trusted = spack.config.get("bootstrap:trusted", scope=current_scope) or []
if args.name in trusted: for name in removable:
trusted.pop(args.name) if any(name == s["name"] for s in sources):
spack.config.set("bootstrap:trusted", trusted, scope=current_scope) sources = [s for s in sources if s["name"] != name]
msg = 'Deleting information on "{0}" from list of trusted sources' spack.config.set("bootstrap:sources", sources, scope=current_scope)
llnl.util.tty.msg(msg.format(args.name)) llnl.util.tty.msg(
f'Removed the bootstrapping source named "{name}" from the '
f'"{current_scope}" configuration scope.'
)
if name in trusted:
trusted.pop(name)
spack.config.set("bootstrap:trusted", trusted, scope=current_scope)
llnl.util.tty.msg(f'Deleting information on "{name}" from list of trusted sources')
def _mirror(args): def _mirror(args):

View File

@@ -13,6 +13,7 @@
import shutil import shutil
import sys import sys
import tempfile import tempfile
import urllib.request
from typing import Dict, List, Optional, Tuple, Union from typing import Dict, List, Optional, Tuple, Union
import llnl.util.tty as tty import llnl.util.tty as tty
@@ -53,7 +54,6 @@
from spack.oci.oci import ( from spack.oci.oci import (
copy_missing_layers_with_retry, copy_missing_layers_with_retry,
get_manifest_and_config_with_retry, get_manifest_and_config_with_retry,
list_tags,
upload_blob_with_retry, upload_blob_with_retry,
upload_manifest_with_retry, upload_manifest_with_retry,
) )
@@ -813,7 +813,7 @@ def _push_oci(
def extra_config(spec: Spec): def extra_config(spec: Spec):
spec_dict = spec.to_dict(hash=ht.dag_hash) spec_dict = spec.to_dict(hash=ht.dag_hash)
spec_dict["buildcache_layout_version"] = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION spec_dict["buildcache_layout_version"] = 1
spec_dict["binary_cache_checksum"] = { spec_dict["binary_cache_checksum"] = {
"hash_algorithm": "sha256", "hash_algorithm": "sha256",
"hash": checksums[spec.dag_hash()].compressed_digest.digest, "hash": checksums[spec.dag_hash()].compressed_digest.digest,
@@ -856,7 +856,10 @@ def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None: def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
tags = list_tags(image_ref) request = urllib.request.Request(url=image_ref.tags_url())
response = spack.oci.opener.urlopen(request)
spack.oci.opener.ensure_status(request, response, 200)
tags = json.load(response)["tags"]
# Fetch all image config files in parallel # Fetch all image config files in parallel
spec_dicts = pool.starmap( spec_dicts = pool.starmap(

View File

@@ -31,6 +31,7 @@
level = "long" level = "long"
SPACK_COMMAND = "spack" SPACK_COMMAND = "spack"
MAKE_COMMAND = "make"
INSTALL_FAIL_CODE = 1 INSTALL_FAIL_CODE = 1
FAILED_CREATE_BUILDCACHE_CODE = 100 FAILED_CREATE_BUILDCACHE_CODE = 100
@@ -39,12 +40,6 @@ def deindent(desc):
return desc.replace(" ", "") return desc.replace(" ", "")
def unicode_escape(path: str) -> str:
"""Returns transformed path with any unicode
characters replaced with their corresponding escapes"""
return path.encode("unicode-escape").decode("utf-8")
def setup_parser(subparser): def setup_parser(subparser):
setup_parser.parser = subparser setup_parser.parser = subparser
subparsers = subparser.add_subparsers(help="CI sub-commands") subparsers = subparser.add_subparsers(help="CI sub-commands")
@@ -556,35 +551,75 @@ def ci_rebuild(args):
# No hash match anywhere means we need to rebuild spec # No hash match anywhere means we need to rebuild spec
# Start with spack arguments # Start with spack arguments
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose", "install"] spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose"]
config = cfg.get("config") config = cfg.get("config")
if not config["verify_ssl"]: if not config["verify_ssl"]:
spack_cmd.append("-k") spack_cmd.append("-k")
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}'] install_args = []
can_verify = spack_ci.can_verify_binaries() can_verify = spack_ci.can_verify_binaries()
verify_binaries = can_verify and spack_is_pr_pipeline is False verify_binaries = can_verify and spack_is_pr_pipeline is False
if not verify_binaries: if not verify_binaries:
install_args.append("--no-check-signature") install_args.append("--no-check-signature")
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash()) slash_hash = "/{}".format(job_spec.dag_hash())
# Arguments when installing dependencies from cache
deps_install_args = install_args
# Arguments when installing the root from sources # Arguments when installing the root from sources
deps_install_args = install_args + ["--only=dependencies"] root_install_args = install_args + [
root_install_args = install_args + ["--keep-stage", "--only=package"] "--keep-stage",
"--only=package",
"--use-buildcache=package:never,dependencies:only",
]
if cdash_handler: if cdash_handler:
# Add additional arguments to `spack install` for CDash reporting. # Add additional arguments to `spack install` for CDash reporting.
root_install_args.extend(cdash_handler.args()) root_install_args.extend(cdash_handler.args())
root_install_args.append(slash_hash)
# ["x", "y"] -> "'x' 'y'"
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
commands = [ commands = [
# apparently there's a race when spack bootstraps? do it up front once # apparently there's a race when spack bootstraps? do it up front once
[SPACK_COMMAND, "-e", unicode_escape(env.path), "bootstrap", "now"], [SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
spack_cmd + deps_install_args + [slash_hash], [
spack_cmd + root_install_args + [slash_hash], SPACK_COMMAND,
"-e",
env.path,
"env",
"depfile",
"-o",
"Makefile",
"--use-buildcache=package:never,dependencies:only",
slash_hash, # limit to spec we're building
],
[
# --output-sync requires GNU make 4.x.
# Old make errors when you pass it a flag it doesn't recognize,
# but it doesn't error or warn when you set unrecognized flags in
# this variable.
"export",
"GNUMAKEFLAGS=--output-sync=recurse",
],
[
MAKE_COMMAND,
"SPACK={}".format(args_to_string(spack_cmd)),
"SPACK_COLOR=always",
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
"-j$(nproc)",
"install-deps/{}".format(
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
"{name}-{version}-{hash}"
)
),
],
spack_cmd + ["install"] + root_install_args,
] ]
tty.debug("Installing {0} from source".format(job_spec.name)) tty.debug("Installing {0} from source".format(job_spec.name))
install_exit_code = spack_ci.process_command("install", commands, repro_dir) install_exit_code = spack_ci.process_command("install", commands, repro_dir)

View File

@@ -11,6 +11,7 @@
from argparse import ArgumentParser, Namespace from argparse import ArgumentParser, Namespace
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
@@ -580,6 +581,8 @@ def positionals(
for idx, (args, choices, nargs, help) in enumerate(positionals): for idx, (args, choices, nargs, help) in enumerate(positionals):
# Make sure we always get same order of output # Make sure we always get same order of output
if not help: # this means SUPPRESS was used.
choices = None
if isinstance(choices, dict): if isinstance(choices, dict):
choices = sorted(choices.keys()) choices = sorted(choices.keys())
elif isinstance(choices, (set, frozenset)): elif isinstance(choices, (set, frozenset)):
@@ -866,6 +869,9 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
prepend_header(args, f) prepend_header(args, f)
formatter(args, f) formatter(args, f)
if args.update_completion:
fs.set_executable(args.update)
else: else:
prepend_header(args, sys.stdout) prepend_header(args, sys.stdout)
formatter(args, sys.stdout) formatter(args, sys.stdout)

View File

@@ -563,13 +563,12 @@ def add_concretizer_args(subparser):
help="reuse installed packages/buildcaches when possible", help="reuse installed packages/buildcaches when possible",
) )
subgroup.add_argument( subgroup.add_argument(
"--fresh-roots",
"--reuse-deps", "--reuse-deps",
action=ConfigSetAction, action=ConfigSetAction,
dest="concretizer:reuse", dest="concretizer:reuse",
const="dependencies", const="dependencies",
default=None, default=None,
help="concretize with fresh roots and reused dependencies", help="reuse installed dependencies only",
) )
subgroup.add_argument( subgroup.add_argument(
"--deprecated", "--deprecated",
@@ -661,32 +660,34 @@ def mirror_name_or_url(m):
# accidentally to a dir in the current working directory. # accidentally to a dir in the current working directory.
# If there's a \ or / in the name, it's interpreted as a path or url. # If there's a \ or / in the name, it's interpreted as a path or url.
if "/" in m or "\\" in m or m in (".", ".."): if "/" in m or "\\" in m:
return spack.mirror.Mirror(m) return spack.mirror.Mirror(m)
# Otherwise, the named mirror is required to exist. # Otherwise, the named mirror is required to exist.
try: try:
return spack.mirror.require_mirror_name(m) return spack.mirror.require_mirror_name(m)
except ValueError as e: except ValueError as e:
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e raise argparse.ArgumentTypeError(
str(e) + ". Did you mean {}?".format(os.path.join(".", m))
)
def mirror_url(url): def mirror_url(url):
try: try:
return spack.mirror.Mirror.from_url(url) return spack.mirror.Mirror.from_url(url)
except ValueError as e: except ValueError as e:
raise argparse.ArgumentTypeError(str(e)) from e raise argparse.ArgumentTypeError(str(e))
def mirror_directory(path): def mirror_directory(path):
try: try:
return spack.mirror.Mirror.from_local_path(path) return spack.mirror.Mirror.from_local_path(path)
except ValueError as e: except ValueError as e:
raise argparse.ArgumentTypeError(str(e)) from e raise argparse.ArgumentTypeError(str(e))
def mirror_name(name): def mirror_name(name):
try: try:
return spack.mirror.require_mirror_name(name) return spack.mirror.require_mirror_name(name)
except ValueError as e: except ValueError as e:
raise argparse.ArgumentTypeError(str(e)) from e raise argparse.ArgumentTypeError(str(e))

View File

@@ -10,13 +10,13 @@
import sys import sys
import tempfile import tempfile
from pathlib import Path from pathlib import Path
from typing import List, Optional from typing import Optional
import llnl.string as string import llnl.string as string
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.tty.colify import colify from llnl.util.tty.colify import colify
from llnl.util.tty.color import cescape, colorize from llnl.util.tty.color import colorize
import spack.cmd import spack.cmd
import spack.cmd.common import spack.cmd.common
@@ -61,7 +61,14 @@
# #
def env_create_setup_parser(subparser): def env_create_setup_parser(subparser):
"""create a new environment""" """create a new environment"""
subparser.add_argument("env_name", metavar="env", help="name or directory of environment") subparser.add_argument(
"env_name",
metavar="env",
help=(
"name of managed environment or directory of the anonymous env "
"(when using --dir/-d) to activate"
),
)
subparser.add_argument( subparser.add_argument(
"-d", "--dir", action="store_true", help="create an environment in a specific directory" "-d", "--dir", action="store_true", help="create an environment in a specific directory"
) )
@@ -87,9 +94,6 @@ def env_create_setup_parser(subparser):
default=None, default=None,
help="either a lockfile (must end with '.json' or '.lock') or a manifest file", help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
) )
subparser.add_argument(
"--include-concrete", action="append", help="name of old environment to copy specs from"
)
def env_create(args): def env_create(args):
@@ -107,32 +111,19 @@ def env_create(args):
# the environment should not include a view. # the environment should not include a view.
with_view = None with_view = None
include_concrete = None
if hasattr(args, "include_concrete"):
include_concrete = args.include_concrete
env = _env_create( env = _env_create(
args.env_name, args.env_name,
init_file=args.envfile, init_file=args.envfile,
dir=args.dir or os.path.sep in args.env_name or args.env_name in (".", ".."), dir=args.dir,
with_view=with_view, with_view=with_view,
keep_relative=args.keep_relative, keep_relative=args.keep_relative,
include_concrete=include_concrete,
) )
# Generate views, only really useful for environments created from spack.lock files. # Generate views, only really useful for environments created from spack.lock files.
env.regenerate_views() env.regenerate_views()
def _env_create( def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
name_or_path: str,
*,
init_file: Optional[str] = None,
dir: bool = False,
with_view: Optional[str] = None,
keep_relative: bool = False,
include_concrete: Optional[List[str]] = None,
):
"""Create a new environment, with an optional yaml description. """Create a new environment, with an optional yaml description.
Arguments: Arguments:
@@ -144,31 +135,22 @@ def _env_create(
keep_relative (bool): if True, develop paths are copied verbatim into keep_relative (bool): if True, develop paths are copied verbatim into
the new environment file, otherwise they may be made absolute if the the new environment file, otherwise they may be made absolute if the
new environment is in a different location new environment is in a different location
include_concrete (list): list of the included concrete environments
""" """
if not dir: if not dir:
env = ev.create( env = ev.create(
name_or_path, name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
init_file=init_file,
with_view=with_view,
keep_relative=keep_relative,
include_concrete=include_concrete,
) )
tty.msg( tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
colorize( tty.msg("You can activate this environment with:")
f"Created environment @c{{{cescape(name_or_path)}}} in: @c{{{cescape(env.path)}}}" tty.msg(" spack env activate %s" % (name_or_path))
) return env
)
else: env = ev.create_in_dir(
env = ev.create_in_dir( name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
name_or_path, )
init_file=init_file, tty.msg("Created environment in %s" % env.path)
with_view=with_view, tty.msg("You can activate this environment with:")
keep_relative=keep_relative, tty.msg(" spack env activate %s" % env.path)
include_concrete=include_concrete,
)
tty.msg(colorize(f"Created independent environment in: @c{{{cescape(env.path)}}}"))
tty.msg(f"Activate with: {colorize(f'@c{{spack env activate {cescape(name_or_path)}}}')}")
return env return env
@@ -454,12 +436,6 @@ def env_remove_setup_parser(subparser):
"""remove an existing environment""" """remove an existing environment"""
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove") subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
arguments.add_common_arguments(subparser, ["yes_to_all"]) arguments.add_common_arguments(subparser, ["yes_to_all"])
subparser.add_argument(
"-f",
"--force",
action="store_true",
help="remove the environment even if it is included in another environment",
)
def env_remove(args): def env_remove(args):
@@ -468,34 +444,14 @@ def env_remove(args):
This removes an environment managed by Spack. Directory environments This removes an environment managed by Spack. Directory environments
and manifests embedded in repositories should be removed manually. and manifests embedded in repositories should be removed manually.
""" """
remove_envs = [] read_envs = []
valid_envs = []
bad_envs = [] bad_envs = []
for env_name in args.rm_env:
for env_name in ev.all_environment_names():
try: try:
env = ev.read(env_name) env = ev.read(env_name)
valid_envs.append(env) read_envs.append(env)
if env_name in args.rm_env:
remove_envs.append(env)
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError): except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
if env_name in args.rm_env: bad_envs.append(env_name)
bad_envs.append(env_name)
# Check if remove_env is included from another env before trying to remove
for env in valid_envs:
for remove_env in remove_envs:
# don't check if environment is included to itself
if env.name == remove_env.name:
continue
if remove_env.path in env.included_concrete_envs:
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
if args.force:
tty.warn(msg)
else:
tty.die(msg)
if not args.yes_to_all: if not args.yes_to_all:
environments = string.plural(len(args.rm_env), "environment", show_n=False) environments = string.plural(len(args.rm_env), "environment", show_n=False)
@@ -504,7 +460,7 @@ def env_remove(args):
if not answer: if not answer:
tty.die("Will not remove any environments") tty.die("Will not remove any environments")
for env in remove_envs: for env in read_envs:
name = env.name name = env.name
if env.active: if env.active:
tty.die(f"Environment {name} can't be removed while activated.") tty.die(f"Environment {name} can't be removed while activated.")

View File

@@ -3,7 +3,6 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import copy
import sys import sys
import llnl.util.lang import llnl.util.lang
@@ -169,9 +168,9 @@ def query_arguments(args):
if (args.missing or args.only_missing) and not args.only_deprecated: if (args.missing or args.only_missing) and not args.only_deprecated:
installed.append(InstallStatuses.MISSING) installed.append(InstallStatuses.MISSING)
predicate_fn = None known = any
if args.unknown: if args.unknown:
predicate_fn = lambda x: not spack.repo.PATH.exists(x.spec.name) known = False
explicit = any explicit = any
if args.explicit: if args.explicit:
@@ -179,7 +178,7 @@ def query_arguments(args):
if args.implicit: if args.implicit:
explicit = False explicit = False
q_args = {"installed": installed, "predicate_fn": predicate_fn, "explicit": explicit} q_args = {"installed": installed, "known": known, "explicit": explicit}
install_tree = args.install_tree install_tree = args.install_tree
upstreams = spack.config.get("upstreams", {}) upstreams = spack.config.get("upstreams", {})
@@ -272,27 +271,6 @@ def root_decorator(spec, string):
print() print()
if env.included_concrete_envs:
tty.msg("Included specs")
# Root specs cannot be displayed with prefixes, since those are not
# set for abstract specs. Same for hashes
root_args = copy.copy(args)
root_args.paths = False
# Roots are displayed with variants, etc. so that we can see
# specifically what the user asked for.
cmd.display_specs(
env.included_user_specs,
root_args,
decorator=lambda s, f: color.colorize("@*{%s}" % f),
namespace=True,
show_flags=True,
show_full_compiler=True,
variants=True,
)
print()
if args.show_concretized: if args.show_concretized:
tty.msg("Concretized roots") tty.msg("Concretized roots")
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator) cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)

View File

@@ -61,6 +61,7 @@ def install_kwargs_from_args(args):
"dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc), "dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc),
"dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc), "dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc),
"include_build_deps": args.include_build_deps, "include_build_deps": args.include_build_deps,
"explicit": True, # Use true as a default for install command
"stop_at": args.until, "stop_at": args.until,
"unsigned": args.unsigned, "unsigned": args.unsigned,
"install_deps": ("dependencies" in args.things_to_install), "install_deps": ("dependencies" in args.things_to_install),
@@ -472,7 +473,6 @@ def install_without_active_env(args, install_kwargs, reporter_factory):
require_user_confirmation_for_overwrite(concrete_specs, args) require_user_confirmation_for_overwrite(concrete_specs, args)
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs] install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
installs = [s.package for s in concrete_specs] installs = [(s.package, install_kwargs) for s in concrete_specs]
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs] builder = PackageInstaller(installs)
builder = PackageInstaller(installs, install_kwargs)
builder.install() builder.install()

View File

@@ -3,6 +3,7 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import datetime
import os import os
import re import re
from collections import defaultdict from collections import defaultdict
@@ -96,7 +97,7 @@ def list_files(args):
OLD_LICENSE, SPDX_MISMATCH, GENERAL_MISMATCH = range(1, 4) OLD_LICENSE, SPDX_MISMATCH, GENERAL_MISMATCH = range(1, 4)
#: Latest year that copyright applies. UPDATE THIS when bumping copyright. #: Latest year that copyright applies. UPDATE THIS when bumping copyright.
latest_year = 2024 # year of 0.22 release latest_year = datetime.date.today().year
strict_date = r"Copyright 2013-%s" % latest_year strict_date = r"Copyright 2013-%s" % latest_year
#: regexes for valid license lines at tops of files #: regexes for valid license lines at tops of files

View File

@@ -101,9 +101,8 @@ def do_mark(specs, explicit):
specs (list): list of specs to be marked specs (list): list of specs to be marked
explicit (bool): whether to mark specs as explicitly installed explicit (bool): whether to mark specs as explicitly installed
""" """
with spack.store.STORE.db.write_transaction(): for spec in specs:
for spec in specs: spack.store.STORE.db.update_explicit(spec, explicit)
spack.store.STORE.db.mark(spec, "explicit", explicit)
def mark_specs(args, specs): def mark_specs(args, specs):

View File

@@ -377,10 +377,7 @@ def refresh(module_type, specs, args):
def modules_cmd(parser, args, module_type, callbacks=callbacks): def modules_cmd(parser, args, module_type, callbacks=callbacks):
# Qualifiers to be used when querying the db for specs # Qualifiers to be used when querying the db for specs
constraint_qualifiers = { constraint_qualifiers = {
"refresh": { "refresh": {"installed": True, "known": lambda x: not spack.repo.PATH.exists(x)}
"installed": True,
"predicate_fn": lambda x: spack.repo.PATH.exists(x.spec.name),
}
} }
query_args = constraint_qualifiers.get(args.subparser_name, {}) query_args = constraint_qualifiers.get(args.subparser_name, {})

View File

@@ -23,7 +23,7 @@
# tutorial configuration parameters # tutorial configuration parameters
tutorial_branch = "releases/v0.22" tutorial_branch = "releases/v0.21"
tutorial_mirror = "file:///mirror" tutorial_mirror = "file:///mirror"
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub") tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")

View File

@@ -214,6 +214,8 @@ def unit_test(parser, args, unknown_args):
# Ensure clingo is available before switching to the # Ensure clingo is available before switching to the
# mock configuration used by unit tests # mock configuration used by unit tests
# Note: skip on windows here because for the moment,
# clingo is wholly unsupported from bootstrap
with spack.bootstrap.ensure_bootstrap_configuration(): with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_core_dependencies() spack.bootstrap.ensure_core_dependencies()
if pytest is None: if pytest is None:

View File

@@ -38,10 +38,10 @@
import spack.cmd import spack.cmd
import spack.environment as ev import spack.environment as ev
import spack.filesystem_view as fsv
import spack.schema.projections import spack.schema.projections
import spack.store import spack.store
from spack.config import validate from spack.config import validate
from spack.filesystem_view import YamlFilesystemView, view_func_parser
from spack.util import spack_yaml as s_yaml from spack.util import spack_yaml as s_yaml
description = "project packages to a compact naming scheme on the filesystem" description = "project packages to a compact naming scheme on the filesystem"
@@ -193,13 +193,17 @@ def view(parser, args):
ordered_projections = {} ordered_projections = {}
# What method are we using for this view # What method are we using for this view
link_type = args.action if args.action in actions_link else "symlink" if args.action in actions_link:
view = fsv.YamlFilesystemView( link_fn = view_func_parser(args.action)
else:
link_fn = view_func_parser("symlink")
view = YamlFilesystemView(
path, path,
spack.store.STORE.layout, spack.store.STORE.layout,
projections=ordered_projections, projections=ordered_projections,
ignore_conflicts=getattr(args, "ignore_conflicts", False), ignore_conflicts=getattr(args, "ignore_conflicts", False),
link_type=link_type, link=link_fn,
verbose=args.verbose, verbose=args.verbose,
) )

View File

@@ -20,7 +20,6 @@
import spack.compilers import spack.compilers
import spack.error import spack.error
import spack.schema.environment
import spack.spec import spack.spec
import spack.util.executable import spack.util.executable
import spack.util.libc import spack.util.libc
@@ -290,7 +289,7 @@ def __init__(
operating_system, operating_system,
target, target,
paths, paths,
modules: Optional[List[str]] = None, modules=None,
alias=None, alias=None,
environment=None, environment=None,
extra_rpaths=None, extra_rpaths=None,
@@ -684,8 +683,8 @@ def __str__(self):
@contextlib.contextmanager @contextlib.contextmanager
def compiler_environment(self): def compiler_environment(self):
# Avoid modifying os.environ if possible. # yield immediately if no modules
if not self.modules and not self.environment: if not self.modules:
yield yield
return return
@@ -702,9 +701,13 @@ def compiler_environment(self):
spack.util.module_cmd.load_module(module) spack.util.module_cmd.load_module(module)
# apply other compiler environment changes # apply other compiler environment changes
spack.schema.environment.parse(self.environment).apply_modifications() env = spack.util.environment.EnvironmentModifications()
env.extend(spack.schema.environment.parse(self.environment))
env.apply_modifications()
yield yield
except BaseException:
raise
finally: finally:
# Restore environment regardless of whether inner code succeeded # Restore environment regardless of whether inner code succeeded
os.environ.clear() os.environ.clear()

View File

@@ -164,66 +164,43 @@ def _compiler_config_from_package_config(config):
def _compiler_config_from_external(config): def _compiler_config_from_external(config):
extra_attributes_key = "extra_attributes"
compilers_key = "compilers"
c_key, cxx_key, fortran_key = "c", "cxx", "fortran"
# Allow `@x.y.z` instead of `@=x.y.z`
spec = spack.spec.parse_with_version_concrete(config["spec"]) spec = spack.spec.parse_with_version_concrete(config["spec"])
# use str(spec.versions) to allow `@x.y.z` instead of `@=x.y.z`
compiler_spec = spack.spec.CompilerSpec( compiler_spec = spack.spec.CompilerSpec(
package_name_to_compiler_name.get(spec.name, spec.name), spec.version package_name_to_compiler_name.get(spec.name, spec.name), spec.version
) )
err_header = f"The external spec '{spec}' cannot be used as a compiler" extra_attributes = config.get("extra_attributes", {})
prefix = config.get("prefix", None)
# If extra_attributes is not there I might not want to use this entry as a compiler, compiler_class = class_for_compiler_name(compiler_spec.name)
# therefore just leave a debug message, but don't be loud with a warning. paths = extra_attributes.get("paths", {})
if extra_attributes_key not in config: compiler_langs = ["cc", "cxx", "fc", "f77"]
tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key") for lang in compiler_langs:
if paths.setdefault(lang, None):
continue
if not prefix:
continue
# Check for files that satisfy the naming scheme for this compiler
bindir = os.path.join(prefix, "bin")
for f, regex in itertools.product(os.listdir(bindir), compiler_class.search_regexps(lang)):
if regex.match(f):
paths[lang] = os.path.join(bindir, f)
if all(v is None for v in paths.values()):
return None return None
extra_attributes = config[extra_attributes_key]
# If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler
if compilers_key not in extra_attributes:
warnings.warn(
f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'"
)
return None
attribute_compilers = extra_attributes[compilers_key]
if c_key not in attribute_compilers:
warnings.warn(
f"{err_header}: missing the C compiler path under "
f"'{extra_attributes_key}:{compilers_key}'"
)
return None
c_compiler = attribute_compilers[c_key]
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
if cxx_key not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
if fortran_key not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
# compilers format has cc/fc/f77, externals format has "c/fortran"
paths = {
"cc": c_compiler,
"cxx": attribute_compilers.get(cxx_key, None),
"fc": attribute_compilers.get(fortran_key, None),
"f77": attribute_compilers.get(fortran_key, None),
}
if not spec.architecture: if not spec.architecture:
host_platform = spack.platforms.host() host_platform = spack.platforms.host()
operating_system = host_platform.operating_system("default_os") operating_system = host_platform.operating_system("default_os")
target = host_platform.target("default_target").microarchitecture target = host_platform.target("default_target").microarchitecture
else: else:
target = spec.architecture.target target = spec.target
if not target: if not target:
target = spack.platforms.host().target("default_target") host_platform = spack.platforms.host()
target = target.microarchitecture target = host_platform.target("default_target").microarchitecture
operating_system = spec.os operating_system = spec.os
if not operating_system: if not operating_system:

View File

@@ -74,10 +74,6 @@ class Concretizer:
#: during concretization. Used for testing and for mirror creation #: during concretization. Used for testing and for mirror creation
check_for_compiler_existence = None check_for_compiler_existence = None
#: Packages that the old concretizer cannot deal with correctly, and cannot build anyway.
#: Those will not be considered as providers for virtuals.
non_buildable_packages = {"glibc", "musl"}
def __init__(self, abstract_spec=None): def __init__(self, abstract_spec=None):
if Concretizer.check_for_compiler_existence is None: if Concretizer.check_for_compiler_existence is None:
Concretizer.check_for_compiler_existence = not spack.config.get( Concretizer.check_for_compiler_existence = not spack.config.get(
@@ -117,11 +113,7 @@ def _valid_virtuals_and_externals(self, spec):
pref_key = lambda spec: 0 # no-op pref key pref_key = lambda spec: 0 # no-op pref key
if spec.virtual: if spec.virtual:
candidates = [ candidates = spack.repo.PATH.providers_for(spec)
s
for s in spack.repo.PATH.providers_for(spec)
if s.name not in self.non_buildable_packages
]
if not candidates: if not candidates:
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec) raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)

View File

@@ -34,6 +34,28 @@
"image": "docker.io/fedora:39" "image": "docker.io/fedora:39"
} }
}, },
"fedora:38": {
"bootstrap": {
"template": "container/fedora.dockerfile",
"image": "docker.io/fedora:38"
},
"os_package_manager": "dnf",
"build": "spack/fedora38",
"final": {
"image": "docker.io/fedora:38"
}
},
"fedora:37": {
"bootstrap": {
"template": "container/fedora.dockerfile",
"image": "docker.io/fedora:37"
},
"os_package_manager": "dnf",
"build": "spack/fedora37",
"final": {
"image": "docker.io/fedora:37"
}
},
"rockylinux:9": { "rockylinux:9": {
"bootstrap": { "bootstrap": {
"template": "container/rockylinux_9.dockerfile", "template": "container/rockylinux_9.dockerfile",
@@ -116,13 +138,6 @@
}, },
"os_package_manager": "apt" "os_package_manager": "apt"
}, },
"ubuntu:24.04": {
"bootstrap": {
"template": "container/ubuntu_2404.dockerfile"
},
"os_package_manager": "apt",
"build": "spack/ubuntu-noble"
},
"ubuntu:22.04": { "ubuntu:22.04": {
"bootstrap": { "bootstrap": {
"template": "container/ubuntu_2204.dockerfile" "template": "container/ubuntu_2204.dockerfile"
@@ -136,6 +151,13 @@
}, },
"build": "spack/ubuntu-focal", "build": "spack/ubuntu-focal",
"os_package_manager": "apt" "os_package_manager": "apt"
},
"ubuntu:18.04": {
"bootstrap": {
"template": "container/ubuntu_1804.dockerfile"
},
"os_package_manager": "apt",
"build": "spack/ubuntu-bionic"
} }
}, },
"os_package_managers": { "os_package_managers": {

View File

@@ -283,9 +283,12 @@ def __reduce__(self):
database. If it is a spec, we'll evaluate database. If it is a spec, we'll evaluate
``spec.satisfies(query_spec)`` ``spec.satisfies(query_spec)``
predicate_fn: optional predicate taking an InstallRecord as argument, and returning known (bool or None): Specs that are "known" are those
whether that record is selected for the query. It can be used to craft criteria for which Spack can locate a ``package.py`` file -- i.e.,
that need some data for selection not provided by the Database itself. Spack "knows" how to install them. Specs that are unknown may
represent packages that existed in a previous version of
Spack, but have since either changed their name or
been removed
installed (bool or InstallStatus or typing.Iterable or None): installed (bool or InstallStatus or typing.Iterable or None):
if ``True``, includes only installed if ``True``, includes only installed
@@ -585,9 +588,6 @@ def _path(self, spec: "spack.spec.Spec") -> pathlib.Path:
return self.dir / f"{spec.name}-{spec.dag_hash()}" return self.dir / f"{spec.name}-{spec.dag_hash()}"
SelectType = Callable[[InstallRecord], bool]
class Database: class Database:
#: Fields written for each install record #: Fields written for each install record
record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS
@@ -1367,7 +1367,7 @@ def _deprecate(self, spec, deprecator):
self._data[spec_key] = spec_rec self._data[spec_key] = spec_rec
@_autospec @_autospec
def mark(self, spec: "spack.spec.Spec", key: str, value: Any) -> None: def mark(self, spec, key, value):
"""Mark an arbitrary record on a spec.""" """Mark an arbitrary record on a spec."""
with self.write_transaction(): with self.write_transaction():
return self._mark(spec, key, value) return self._mark(spec, key, value)
@@ -1516,7 +1516,7 @@ def get_by_hash(self, dag_hash, default=None, installed=any):
def _query( def _query(
self, self,
query_spec=any, query_spec=any,
predicate_fn: Optional[SelectType] = None, known=any,
installed=True, installed=True,
explicit=any, explicit=any,
start_date=None, start_date=None,
@@ -1524,7 +1524,7 @@ def _query(
hashes=None, hashes=None,
in_buildcache=any, in_buildcache=any,
origin=None, origin=None,
) -> List["spack.spec.Spec"]: ):
"""Run a query on the database.""" """Run a query on the database."""
# TODO: Specs are a lot like queries. Should there be a # TODO: Specs are a lot like queries. Should there be a
@@ -1570,7 +1570,7 @@ def _query(
if explicit is not any and rec.explicit != explicit: if explicit is not any and rec.explicit != explicit:
continue continue
if predicate_fn is not None and not predicate_fn(rec): if known is not any and known(rec.spec.name):
continue continue
if start_date or end_date: if start_date or end_date:
@@ -1655,14 +1655,14 @@ def query(self, *args, **kwargs):
query.__doc__ = "" query.__doc__ = ""
query.__doc__ += _QUERY_DOCSTRING query.__doc__ += _QUERY_DOCSTRING
def query_one(self, query_spec, predicate_fn=None, installed=True): def query_one(self, query_spec, known=any, installed=True):
"""Query for exactly one spec that matches the query spec. """Query for exactly one spec that matches the query spec.
Raises an assertion error if more than one spec matches the Raises an assertion error if more than one spec matches the
query. Returns None if no installed package matches. query. Returns None if no installed package matches.
""" """
concrete_specs = self.query(query_spec, predicate_fn=predicate_fn, installed=installed) concrete_specs = self.query(query_spec, known=known, installed=installed)
assert len(concrete_specs) <= 1 assert len(concrete_specs) <= 1
return concrete_specs[0] if concrete_specs else None return concrete_specs[0] if concrete_specs else None
@@ -1709,6 +1709,24 @@ def root(key, record):
if id(rec.spec) not in needed and rec.installed if id(rec.spec) not in needed and rec.installed
] ]
def update_explicit(self, spec, explicit):
"""
Update the spec's explicit state in the database.
Args:
spec (spack.spec.Spec): the spec whose install record is being updated
explicit (bool): ``True`` if the package was requested explicitly
by the user, ``False`` if it was pulled in as a dependency of
an explicit package.
"""
rec = self.get_record(spec)
if explicit != rec.explicit:
with self.write_transaction():
message = "{s.name}@{s.version} : marking the package {0}"
status = "explicit" if explicit else "implicit"
tty.debug(message.format(status, s=spec))
rec.explicit = explicit
class UpstreamDatabaseLockingError(SpackError): class UpstreamDatabaseLockingError(SpackError):
"""Raised when an operation would need to lock an upstream database""" """Raised when an operation would need to lock an upstream database"""

View File

@@ -97,7 +97,7 @@ class OpenMpi(Package):
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]] PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
SUPPORTED_LANGUAGES = ("fortran", "cxx", "c") SUPPORTED_LANGUAGES = ("fortran", "cxx")
def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]: def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
@@ -662,7 +662,6 @@ def _execute_redistribute(
@directive(("extendees", "dependencies")) @directive(("extendees", "dependencies"))
def extends(spec, when=None, type=("build", "run"), patches=None): def extends(spec, when=None, type=("build", "run"), patches=None):
"""Same as depends_on, but also adds this package to the extendee list. """Same as depends_on, but also adds this package to the extendee list.
In case of Python, also adds a dependency on python-venv.
keyword arguments can be passed to extends() so that extension keyword arguments can be passed to extends() so that extension
packages can pass parameters to the extendee's extension packages can pass parameters to the extendee's extension
@@ -678,11 +677,6 @@ def _execute_extends(pkg):
_depends_on(pkg, spec, when=when, type=type, patches=patches) _depends_on(pkg, spec, when=when, type=type, patches=patches)
spec_obj = spack.spec.Spec(spec) spec_obj = spack.spec.Spec(spec)
# When extending python, also add a dependency on python-venv. This is done so that
# Spack environment views are Python virtual environments.
if spec_obj.name == "python" and not pkg.name == "python-venv":
_depends_on(pkg, "python-venv", when=when, type=("build", "run"))
# TODO: the values of the extendees dictionary are not used. Remove in next refactor. # TODO: the values of the extendees dictionary are not used. Remove in next refactor.
pkg.extendees[spec_obj.name] = (spec_obj, None) pkg.extendees[spec_obj.name] = (spec_obj, None)

View File

@@ -15,7 +15,6 @@
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.symlink import readlink
import spack.config import spack.config
import spack.hash_types as ht import spack.hash_types as ht
@@ -182,7 +181,7 @@ def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
base_dir = ( base_dir = (
self.path_for_spec(deprecator_spec) self.path_for_spec(deprecator_spec)
if deprecator_spec if deprecator_spec
else readlink(deprecated_spec.prefix) else os.readlink(deprecated_spec.prefix)
) )
yaml_path = os.path.join( yaml_path = os.path.join(

View File

@@ -34,9 +34,6 @@
* ``spec``: a string representation of the abstract spec that was concretized * ``spec``: a string representation of the abstract spec that was concretized
4. ``concrete_specs``: a dictionary containing the specs in the environment. 4. ``concrete_specs``: a dictionary containing the specs in the environment.
5. ``include_concrete`` (dictionary): an optional dictionary that includes the roots
and concrete specs from the included environments, keyed by the path to that
environment
Compatibility Compatibility
------------- -------------
@@ -53,37 +50,26 @@
- ``v2`` - ``v2``
- ``v3`` - ``v3``
- ``v4`` - ``v4``
- ``v5``
* - ``v0.12:0.14`` * - ``v0.12:0.14``
- -
- -
- -
- -
-
* - ``v0.15:0.16`` * - ``v0.15:0.16``
- -
- -
- -
- -
-
* - ``v0.17`` * - ``v0.17``
- -
- -
- -
- -
-
* - ``v0.18:`` * - ``v0.18:``
- -
- -
- -
- -
-
* - ``v0.22:``
-
-
-
-
-
Version 1 Version 1
--------- ---------
@@ -348,118 +334,6 @@
} }
} }
} }
Version 5
---------
Version 5 doesn't change the top-level lockfile format, but an optional dictionary is
added. The dictionary has the ``root`` and ``concrete_specs`` of the included
environments, which are keyed by the path to that environment. Since this is optional
if the environment does not have any included environments ``include_concrete`` will
not be a part of the lockfile.
.. code-block:: json
{
"_meta": {
"file-type": "spack-lockfile",
"lockfile-version": 5,
"specfile-version": 3
},
"roots": [
{
"hash": "<dag_hash 1>",
"spec": "<abstract spec 1>"
},
{
"hash": "<dag_hash 2>",
"spec": "<abstract spec 2>"
}
],
"concrete_specs": {
"<dag_hash 1>": {
"... <spec dict attributes> ...": { },
"dependencies": [
{
"name": "depname_1",
"hash": "<dag_hash for depname_1>",
"type": ["build", "link"]
},
{
"name": "depname_2",
"hash": "<dag_hash for depname_2>",
"type": ["build", "link"]
}
],
"hash": "<dag_hash 1>",
},
"<daghash 2>": {
"... <spec dict attributes> ...": { },
"dependencies": [
{
"name": "depname_3",
"hash": "<dag_hash for depname_3>",
"type": ["build", "link"]
},
{
"name": "depname_4",
"hash": "<dag_hash for depname_4>",
"type": ["build", "link"]
}
],
"hash": "<dag_hash 2>"
}
}
"include_concrete": {
"<path to environment>": {
"roots": [
{
"hash": "<dag_hash 1>",
"spec": "<abstract spec 1>"
},
{
"hash": "<dag_hash 2>",
"spec": "<abstract spec 2>"
}
],
"concrete_specs": {
"<dag_hash 1>": {
"... <spec dict attributes> ...": { },
"dependencies": [
{
"name": "depname_1",
"hash": "<dag_hash for depname_1>",
"type": ["build", "link"]
},
{
"name": "depname_2",
"hash": "<dag_hash for depname_2>",
"type": ["build", "link"]
}
],
"hash": "<dag_hash 1>",
},
"<daghash 2>": {
"... <spec dict attributes> ...": { },
"dependencies": [
{
"name": "depname_3",
"hash": "<dag_hash for depname_3>",
"type": ["build", "link"]
},
{
"name": "depname_4",
"hash": "<dag_hash for depname_4>",
"type": ["build", "link"]
}
],
"hash": "<dag_hash 2>"
}
}
}
}
}
""" """
from .environment import ( from .environment import (

View File

@@ -9,13 +9,11 @@
import os import os
import re import re
import shlex
from enum import Enum from enum import Enum
from typing import List, Optional from typing import List, Optional
import spack.deptypes as dt import spack.deptypes as dt
import spack.environment.environment as ev import spack.environment.environment as ev
import spack.paths
import spack.spec import spack.spec
import spack.traverse as traverse import spack.traverse as traverse
@@ -228,7 +226,6 @@ def to_dict(self):
"install_deps_target": self._target("install-deps"), "install_deps_target": self._target("install-deps"),
"any_hash_target": self._target("%"), "any_hash_target": self._target("%"),
"jobserver_support": self.jobserver_support, "jobserver_support": self.jobserver_support,
"spack_script": shlex.quote(spack.paths.spack_script),
"adjacency_list": self.make_adjacency_list, "adjacency_list": self.make_adjacency_list,
"phony_convenience_targets": " ".join(self.phony_convenience_targets), "phony_convenience_targets": " ".join(self.phony_convenience_targets),
"pkg_ids_variable": self.pkg_identifier_variable, "pkg_ids_variable": self.pkg_identifier_variable,

View File

@@ -16,13 +16,13 @@
import urllib.parse import urllib.parse
import urllib.request import urllib.request
import warnings import warnings
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union from typing import Dict, Iterable, List, Optional, Set, Tuple, Union
import llnl.util.filesystem as fs import llnl.util.filesystem as fs
import llnl.util.tty as tty import llnl.util.tty as tty
import llnl.util.tty.color as clr import llnl.util.tty.color as clr
from llnl.util.link_tree import ConflictingSpecsError from llnl.util.link_tree import ConflictingSpecsError
from llnl.util.symlink import readlink, symlink from llnl.util.symlink import symlink
import spack.compilers import spack.compilers
import spack.concretize import spack.concretize
@@ -30,7 +30,6 @@
import spack.deptypes as dt import spack.deptypes as dt
import spack.error import spack.error
import spack.fetch_strategy import spack.fetch_strategy
import spack.filesystem_view as fsv
import spack.hash_types as ht import spack.hash_types as ht
import spack.hooks import spack.hooks
import spack.main import spack.main
@@ -53,6 +52,7 @@
import spack.util.url import spack.util.url
import spack.version import spack.version
from spack import traverse from spack import traverse
from spack.filesystem_view import SimpleFilesystemView, inverse_view_func_parser, view_func_parser
from spack.installer import PackageInstaller from spack.installer import PackageInstaller
from spack.schema.env import TOP_LEVEL_KEY from spack.schema.env import TOP_LEVEL_KEY
from spack.spec import Spec from spack.spec import Spec
@@ -159,8 +159,6 @@ def default_manifest_yaml():
default_view_name = "default" default_view_name = "default"
# Default behavior to link all packages into views (vs. only root packages) # Default behavior to link all packages into views (vs. only root packages)
default_view_link = "all" default_view_link = "all"
# The name for any included concrete specs
included_concrete_name = "include_concrete"
def installed_specs(): def installed_specs():
@@ -295,7 +293,6 @@ def create(
init_file: Optional[Union[str, pathlib.Path]] = None, init_file: Optional[Union[str, pathlib.Path]] = None,
with_view: Optional[Union[str, pathlib.Path, bool]] = None, with_view: Optional[Union[str, pathlib.Path, bool]] = None,
keep_relative: bool = False, keep_relative: bool = False,
include_concrete: Optional[List[str]] = None,
) -> "Environment": ) -> "Environment":
"""Create a managed environment in Spack and returns it. """Create a managed environment in Spack and returns it.
@@ -312,15 +309,10 @@ def create(
string, it specifies the path to the view string, it specifies the path to the view
keep_relative: if True, develop paths are copied verbatim into the new environment file, keep_relative: if True, develop paths are copied verbatim into the new environment file,
otherwise they are made absolute otherwise they are made absolute
include_concrete: list of concrete environment names/paths to be included
""" """
environment_dir = environment_dir_from_name(name, exists_ok=False) environment_dir = environment_dir_from_name(name, exists_ok=False)
return create_in_dir( return create_in_dir(
environment_dir, environment_dir, init_file=init_file, with_view=with_view, keep_relative=keep_relative
init_file=init_file,
with_view=with_view,
keep_relative=keep_relative,
include_concrete=include_concrete,
) )
@@ -329,7 +321,6 @@ def create_in_dir(
init_file: Optional[Union[str, pathlib.Path]] = None, init_file: Optional[Union[str, pathlib.Path]] = None,
with_view: Optional[Union[str, pathlib.Path, bool]] = None, with_view: Optional[Union[str, pathlib.Path, bool]] = None,
keep_relative: bool = False, keep_relative: bool = False,
include_concrete: Optional[List[str]] = None,
) -> "Environment": ) -> "Environment":
"""Create an environment in the directory passed as input and returns it. """Create an environment in the directory passed as input and returns it.
@@ -343,7 +334,6 @@ def create_in_dir(
string, it specifies the path to the view string, it specifies the path to the view
keep_relative: if True, develop paths are copied verbatim into the new environment file, keep_relative: if True, develop paths are copied verbatim into the new environment file,
otherwise they are made absolute otherwise they are made absolute
include_concrete: concrete environment names/paths to be included
""" """
initialize_environment_dir(root, envfile=init_file) initialize_environment_dir(root, envfile=init_file)
@@ -356,12 +346,6 @@ def create_in_dir(
if with_view is not None: if with_view is not None:
manifest.set_default_view(with_view) manifest.set_default_view(with_view)
if include_concrete is not None:
set_included_envs_to_env_paths(include_concrete)
validate_included_envs_exists(include_concrete)
validate_included_envs_concrete(include_concrete)
manifest.set_include_concrete(include_concrete)
manifest.flush() manifest.flush()
except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e: except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e:
@@ -435,67 +419,6 @@ def ensure_env_root_path_exists():
fs.mkdirp(env_root_path()) fs.mkdirp(env_root_path())
def set_included_envs_to_env_paths(include_concrete: List[str]) -> None:
"""If the included environment(s) is the environment name
it is replaced by the path to the environment
Args:
include_concrete: list of env name or path to env"""
for i, env_name in enumerate(include_concrete):
if is_env_dir(env_name):
include_concrete[i] = env_name
elif exists(env_name):
include_concrete[i] = root(env_name)
def validate_included_envs_exists(include_concrete: List[str]) -> None:
"""Checks that all of the included environments exist
Args:
include_concrete: list of already existing concrete environments to include
Raises:
SpackEnvironmentError: if any of the included environments do not exist
"""
missing_envs = set()
for i, env_name in enumerate(include_concrete):
if not is_env_dir(env_name):
missing_envs.add(env_name)
if missing_envs:
msg = "The following environment(s) are missing: {0}".format(", ".join(missing_envs))
raise SpackEnvironmentError(msg)
def validate_included_envs_concrete(include_concrete: List[str]) -> None:
"""Checks that all of the included environments are concrete
Args:
include_concrete: list of already existing concrete environments to include
Raises:
SpackEnvironmentError: if any of the included environments are not concrete
"""
non_concrete_envs = set()
for env_path in include_concrete:
if not os.path.exists(Environment(env_path).lock_path):
non_concrete_envs.add(Environment(env_path).name)
if non_concrete_envs:
msg = "The following environment(s) are not concrete: {0}\n" "Please run:".format(
", ".join(non_concrete_envs)
)
for env in non_concrete_envs:
msg += f"\n\t`spack -e {env} concretize`"
raise SpackEnvironmentError(msg)
def all_environment_names(): def all_environment_names():
"""List the names of environments that currently exist.""" """List the names of environments that currently exist."""
# just return empty if the env path does not exist. A read-only # just return empty if the env path does not exist. A read-only
@@ -606,7 +529,7 @@ def __init__(
self.projections = projections self.projections = projections
self.select = select self.select = select
self.exclude = exclude self.exclude = exclude
self.link_type = fsv.canonicalize_link_type(link_type) self.link_type = view_func_parser(link_type)
self.link = link self.link = link
def select_fn(self, spec): def select_fn(self, spec):
@@ -640,7 +563,7 @@ def to_dict(self):
if self.exclude: if self.exclude:
ret["exclude"] = self.exclude ret["exclude"] = self.exclude
if self.link_type: if self.link_type:
ret["link_type"] = self.link_type ret["link_type"] = inverse_view_func_parser(self.link_type)
if self.link != default_view_link: if self.link != default_view_link:
ret["link"] = self.link ret["link"] = self.link
return ret return ret
@@ -662,7 +585,7 @@ def _current_root(self):
if not os.path.islink(self.root): if not os.path.islink(self.root):
return None return None
root = readlink(self.root) root = os.readlink(self.root)
if os.path.isabs(root): if os.path.isabs(root):
return root return root
@@ -690,7 +613,7 @@ def get_projection_for_spec(self, spec):
to exist on the filesystem.""" to exist on the filesystem."""
return self._view(self.root).get_projection_for_spec(spec) return self._view(self.root).get_projection_for_spec(spec)
def view(self, new: Optional[str] = None) -> fsv.SimpleFilesystemView: def view(self, new: Optional[str] = None) -> SimpleFilesystemView:
""" """
Returns a view object for the *underlying* view directory. This means that the Returns a view object for the *underlying* view directory. This means that the
self.root symlink is followed, and that the view has to exist on the filesystem self.root symlink is followed, and that the view has to exist on the filesystem
@@ -710,14 +633,14 @@ def view(self, new: Optional[str] = None) -> fsv.SimpleFilesystemView:
) )
return self._view(path) return self._view(path)
def _view(self, root: str) -> fsv.SimpleFilesystemView: def _view(self, root: str) -> SimpleFilesystemView:
"""Returns a view object for a given root dir.""" """Returns a view object for a given root dir."""
return fsv.SimpleFilesystemView( return SimpleFilesystemView(
root, root,
spack.store.STORE.layout, spack.store.STORE.layout,
ignore_conflicts=True, ignore_conflicts=True,
projections=self.projections, projections=self.projections,
link_type=self.link_type, link=self.link_type,
) )
def __contains__(self, spec): def __contains__(self, spec):
@@ -898,18 +821,6 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
self.specs_by_hash: Dict[str, Spec] = {} self.specs_by_hash: Dict[str, Spec] = {}
#: Repository for this environment (memoized) #: Repository for this environment (memoized)
self._repo = None self._repo = None
#: Environment paths for concrete (lockfile) included environments
self.included_concrete_envs: List[str] = []
#: First-level included concretized spec data from/to the lockfile.
self.included_concrete_spec_data: Dict[str, Dict[str, List[str]]] = {}
#: User specs from included environments from the last concretization
self.included_concretized_user_specs: Dict[str, List[Spec]] = {}
#: Roots from included environments with the last concretization, in order
self.included_concretized_order: Dict[str, List[str]] = {}
#: Concretized specs by hash from the included environments
self.included_specs_by_hash: Dict[str, Dict[str, Spec]] = {}
#: Previously active environment #: Previously active environment
self._previous_active = None self._previous_active = None
self._dev_specs = None self._dev_specs = None
@@ -947,7 +858,7 @@ def _read(self):
if os.path.exists(self.lock_path): if os.path.exists(self.lock_path):
with open(self.lock_path) as f: with open(self.lock_path) as f:
read_lock_version = self._read_lockfile(f)["_meta"]["lockfile-version"] read_lock_version = self._read_lockfile(f)
if read_lock_version == 1: if read_lock_version == 1:
tty.debug(f"Storing backup of {self.lock_path} at {self._lock_backup_v1_path}") tty.debug(f"Storing backup of {self.lock_path} at {self._lock_backup_v1_path}")
@@ -1015,20 +926,6 @@ def add_view(name, values):
if self.views == dict(): if self.views == dict():
self.views[default_view_name] = ViewDescriptor(self.path, self.view_path_default) self.views[default_view_name] = ViewDescriptor(self.path, self.view_path_default)
def _process_concrete_includes(self):
"""Extract and load into memory included concrete spec data."""
self.included_concrete_envs = self.manifest[TOP_LEVEL_KEY].get(included_concrete_name, [])
if self.included_concrete_envs:
if os.path.exists(self.lock_path):
with open(self.lock_path) as f:
data = self._read_lockfile(f)
if included_concrete_name in data:
self.included_concrete_spec_data = data[included_concrete_name]
else:
self.include_concrete_envs()
def _construct_state_from_manifest(self): def _construct_state_from_manifest(self):
"""Set up user specs and views from the manifest file.""" """Set up user specs and views from the manifest file."""
self.spec_lists = collections.OrderedDict() self.spec_lists = collections.OrderedDict()
@@ -1045,31 +942,6 @@ def _construct_state_from_manifest(self):
self.spec_lists[user_speclist_name] = user_specs self.spec_lists[user_speclist_name] = user_specs
self._process_view(spack.config.get("view", True)) self._process_view(spack.config.get("view", True))
self._process_concrete_includes()
def all_concretized_user_specs(self) -> List[Spec]:
"""Returns all of the concretized user specs of the environment and
its included environment(s)."""
concretized_user_specs = self.concretized_user_specs[:]
for included_specs in self.included_concretized_user_specs.values():
for included in included_specs:
# Don't duplicate included spec(s)
if included not in concretized_user_specs:
concretized_user_specs.append(included)
return concretized_user_specs
def all_concretized_orders(self) -> List[str]:
"""Returns all of the concretized order of the environment and
its included environment(s)."""
concretized_order = self.concretized_order[:]
for included_concretized_order in self.included_concretized_order.values():
for included in included_concretized_order:
# Don't duplicate included spec(s)
if included not in concretized_order:
concretized_order.append(included)
return concretized_order
@property @property
def user_specs(self): def user_specs(self):
@@ -1094,26 +966,6 @@ def _read_dev_specs(self):
dev_specs[name] = local_entry dev_specs[name] = local_entry
return dev_specs return dev_specs
@property
def included_user_specs(self) -> SpecList:
"""Included concrete user (or root) specs from last concretization."""
spec_list = SpecList()
if not self.included_concrete_envs:
return spec_list
def add_root_specs(included_concrete_specs):
# add specs from the include *and* any nested includes it may have
for env, info in included_concrete_specs.items():
for root_list in info["roots"]:
spec_list.add(root_list["spec"])
if "include_concrete" in info:
add_root_specs(info["include_concrete"])
add_root_specs(self.included_concrete_spec_data)
return spec_list
def clear(self, re_read=False): def clear(self, re_read=False):
"""Clear the contents of the environment """Clear the contents of the environment
@@ -1125,15 +977,9 @@ def clear(self, re_read=False):
self.spec_lists[user_speclist_name] = SpecList() self.spec_lists[user_speclist_name] = SpecList()
self._dev_specs = {} self._dev_specs = {}
self.concretized_order = [] # roots of last concretize, in order
self.concretized_user_specs = [] # user specs from last concretize self.concretized_user_specs = [] # user specs from last concretize
self.concretized_order = [] # roots of last concretize, in order
self.specs_by_hash = {} # concretized specs by hash self.specs_by_hash = {} # concretized specs by hash
self.included_concrete_spec_data = {} # concretized specs from lockfile of included envs
self.included_concretized_order = {} # root specs of the included envs, keyed by env path
self.included_concretized_user_specs = {} # user specs from last concretize's included env
self.included_specs_by_hash = {} # concretized specs by hash from the included envs
self.invalidate_repository_cache() self.invalidate_repository_cache()
self._previous_active = None # previously active environment self._previous_active = None # previously active environment
if not re_read: if not re_read:
@@ -1187,43 +1033,6 @@ def scope_name(self):
"""Name of the config scope of this environment's manifest file.""" """Name of the config scope of this environment's manifest file."""
return self.manifest.scope_name return self.manifest.scope_name
def include_concrete_envs(self):
"""Copy and save the included envs' specs internally"""
root_hash_seen = set()
concrete_hash_seen = set()
self.included_concrete_spec_data = {}
for env_path in self.included_concrete_envs:
# Check that environment exists
if not is_env_dir(env_path):
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
env = Environment(env_path)
self.included_concrete_spec_data[env_path] = {"roots": [], "concrete_specs": {}}
# Copy unique root specs from env
for root_dict in env._concrete_roots_dict():
if root_dict["hash"] not in root_hash_seen:
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
root_hash_seen.add(root_dict["hash"])
# Copy unique concrete specs from env
for dag_hash, spec_details in env._concrete_specs_dict().items():
if dag_hash not in concrete_hash_seen:
self.included_concrete_spec_data[env_path]["concrete_specs"].update(
{dag_hash: spec_details}
)
concrete_hash_seen.add(dag_hash)
# Copy transitive include data
transitive = env.included_concrete_spec_data
if transitive:
self.included_concrete_spec_data[env_path]["include_concrete"] = transitive
self._read_lockfile_dict(self._to_lockfile_dict())
self.write()
def destroy(self): def destroy(self):
"""Remove this environment from Spack entirely.""" """Remove this environment from Spack entirely."""
shutil.rmtree(self.path) shutil.rmtree(self.path)
@@ -1423,10 +1232,6 @@ def concretize(self, force=False, tests=False):
for spec in set(self.concretized_user_specs) - set(self.user_specs): for spec in set(self.concretized_user_specs) - set(self.user_specs):
self.deconcretize(spec, concrete=False) self.deconcretize(spec, concrete=False)
# If a combined env, check updated spec is in the linked envs
if self.included_concrete_envs:
self.include_concrete_envs()
# Pick the right concretization strategy # Pick the right concretization strategy
if self.unify == "when_possible": if self.unify == "when_possible":
return self._concretize_together_where_possible(tests=tests) return self._concretize_together_where_possible(tests=tests)
@@ -1610,7 +1415,7 @@ def _concretize_separately(self, tests=False):
# Ensure we don't try to bootstrap clingo in parallel # Ensure we don't try to bootstrap clingo in parallel
if spack.config.get("config:concretizer", "clingo") == "clingo": if spack.config.get("config:concretizer", "clingo") == "clingo":
with spack.bootstrap.ensure_bootstrap_configuration(): with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_clingo_importable_or_raise() spack.bootstrap.ensure_core_dependencies()
# Ensure all the indexes have been built or updated, since # Ensure all the indexes have been built or updated, since
# otherwise the processes in the pool may timeout on waiting # otherwise the processes in the pool may timeout on waiting
@@ -1899,14 +1704,8 @@ def _partition_roots_by_install_status(self):
of per spec.""" of per spec."""
installed, uninstalled = [], [] installed, uninstalled = [], []
with spack.store.STORE.db.read_transaction(): with spack.store.STORE.db.read_transaction():
for concretized_hash in self.all_concretized_orders(): for concretized_hash in self.concretized_order:
if concretized_hash in self.specs_by_hash: spec = self.specs_by_hash[concretized_hash]
spec = self.specs_by_hash[concretized_hash]
else:
for env_path in self.included_specs_by_hash.keys():
if concretized_hash in self.included_specs_by_hash[env_path]:
spec = self.included_specs_by_hash[env_path][concretized_hash]
break
if not spec.installed or ( if not spec.installed or (
spec.satisfies("dev_path=*") or spec.satisfies("^dev_path=*") spec.satisfies("dev_path=*") or spec.satisfies("^dev_path=*")
): ):
@@ -1936,18 +1735,13 @@ def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
specs = specs if specs is not None else roots specs = specs if specs is not None else roots
# Extend the set of specs to overwrite with modified dev specs and their parents # Extend the set of specs to overwrite with modified dev specs and their parents
install_args["overwrite"] = { install_args["overwrite"] = (
*install_args.get("overwrite", ()), install_args.get("overwrite", []) + self._dev_specs_that_need_overwrite()
*self._dev_specs_that_need_overwrite(), )
}
# Only environment roots are marked explicit installs = [(spec.package, {**install_args, "explicit": spec in roots}) for spec in specs]
install_args["explicit"] = {
*install_args.get("explicit", ()),
*(s.dag_hash() for s in roots),
}
PackageInstaller([spec.package for spec in specs], install_args).install() PackageInstaller(installs).install()
def all_specs_generator(self) -> Iterable[Spec]: def all_specs_generator(self) -> Iterable[Spec]:
"""Returns a generator for all concrete specs""" """Returns a generator for all concrete specs"""
@@ -1991,14 +1785,8 @@ def added_specs(self):
def concretized_specs(self): def concretized_specs(self):
"""Tuples of (user spec, concrete spec) for all concrete specs.""" """Tuples of (user spec, concrete spec) for all concrete specs."""
for s, h in zip(self.all_concretized_user_specs(), self.all_concretized_orders()): for s, h in zip(self.concretized_user_specs, self.concretized_order):
if h in self.specs_by_hash: yield (s, self.specs_by_hash[h])
yield (s, self.specs_by_hash[h])
else:
for env_path in self.included_specs_by_hash.keys():
if h in self.included_specs_by_hash[env_path]:
yield (s, self.included_specs_by_hash[env_path][h])
break
def concrete_roots(self): def concrete_roots(self):
"""Same as concretized_specs, except it returns the list of concrete """Same as concretized_specs, except it returns the list of concrete
@@ -2127,7 +1915,8 @@ def _get_environment_specs(self, recurse_dependencies=True):
If these specs appear under different user_specs, only one copy If these specs appear under different user_specs, only one copy
is added to the list returned. is added to the list returned.
""" """
specs = [self.specs_by_hash[h] for h in self.all_concretized_orders()] specs = [self.specs_by_hash[h] for h in self.concretized_order]
if recurse_dependencies: if recurse_dependencies:
specs.extend( specs.extend(
traverse.traverse_nodes( traverse.traverse_nodes(
@@ -2137,23 +1926,16 @@ def _get_environment_specs(self, recurse_dependencies=True):
return specs return specs
def _concrete_specs_dict(self): def _to_lockfile_dict(self):
"""Create a dictionary to store a lockfile for this environment."""
concrete_specs = {} concrete_specs = {}
for s in traverse.traverse_nodes(self.specs_by_hash.values(), key=traverse.by_dag_hash): for s in traverse.traverse_nodes(self.specs_by_hash.values(), key=traverse.by_dag_hash):
spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash) spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash)
# Assumes no legacy formats, since this was just created. # Assumes no legacy formats, since this was just created.
spec_dict[ht.dag_hash.name] = s.dag_hash() spec_dict[ht.dag_hash.name] = s.dag_hash()
concrete_specs[s.dag_hash()] = spec_dict concrete_specs[s.dag_hash()] = spec_dict
return concrete_specs
def _concrete_roots_dict(self):
hash_spec_list = zip(self.concretized_order, self.concretized_user_specs) hash_spec_list = zip(self.concretized_order, self.concretized_user_specs)
return [{"hash": h, "spec": str(s)} for h, s in hash_spec_list]
def _to_lockfile_dict(self):
"""Create a dictionary to store a lockfile for this environment."""
concrete_specs = self._concrete_specs_dict()
root_specs = self._concrete_roots_dict()
spack_dict = {"version": spack.spack_version} spack_dict = {"version": spack.spack_version}
spack_commit = spack.main.get_spack_commit() spack_commit = spack.main.get_spack_commit()
@@ -2174,81 +1956,36 @@ def _to_lockfile_dict(self):
# spack version information # spack version information
"spack": spack_dict, "spack": spack_dict,
# users specs + hashes are the 'roots' of the environment # users specs + hashes are the 'roots' of the environment
"roots": root_specs, "roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list],
# Concrete specs by hash, including dependencies # Concrete specs by hash, including dependencies
"concrete_specs": concrete_specs, "concrete_specs": concrete_specs,
} }
if self.included_concrete_envs:
data[included_concrete_name] = self.included_concrete_spec_data
return data return data
def _read_lockfile(self, file_or_json): def _read_lockfile(self, file_or_json):
"""Read a lockfile from a file or from a raw string.""" """Read a lockfile from a file or from a raw string."""
lockfile_dict = sjson.load(file_or_json) lockfile_dict = sjson.load(file_or_json)
self._read_lockfile_dict(lockfile_dict) self._read_lockfile_dict(lockfile_dict)
return lockfile_dict return lockfile_dict["_meta"]["lockfile-version"]
def set_included_concretized_user_specs(
self,
env_name: str,
env_info: Dict[str, Dict[str, Any]],
included_json_specs_by_hash: Dict[str, Dict[str, Any]],
) -> Dict[str, Dict[str, Any]]:
"""Sets all of the concretized user specs from included environments
to include those from nested included environments.
Args:
env_name: the name (technically the path) of the included environment
env_info: included concrete environment data
included_json_specs_by_hash: concrete spec data keyed by hash
Returns: updated specs_by_hash
"""
self.included_concretized_order[env_name] = []
self.included_concretized_user_specs[env_name] = []
def add_specs(name, info, specs_by_hash):
# Add specs from the environment as well as any of its nested
# environments.
for root_info in info["roots"]:
self.included_concretized_order[name].append(root_info["hash"])
self.included_concretized_user_specs[name].append(Spec(root_info["spec"]))
if "concrete_specs" in info:
specs_by_hash.update(info["concrete_specs"])
if included_concrete_name in info:
for included_name, included_info in info[included_concrete_name].items():
if included_name not in self.included_concretized_order:
self.included_concretized_order[included_name] = []
self.included_concretized_user_specs[included_name] = []
add_specs(included_name, included_info, specs_by_hash)
add_specs(env_name, env_info, included_json_specs_by_hash)
return included_json_specs_by_hash
def _read_lockfile_dict(self, d): def _read_lockfile_dict(self, d):
"""Read a lockfile dictionary into this environment.""" """Read a lockfile dictionary into this environment."""
self.specs_by_hash = {} self.specs_by_hash = {}
self.included_specs_by_hash = {}
self.included_concretized_user_specs = {}
self.included_concretized_order = {}
roots = d["roots"] roots = d["roots"]
self.concretized_user_specs = [Spec(r["spec"]) for r in roots] self.concretized_user_specs = [Spec(r["spec"]) for r in roots]
self.concretized_order = [r["hash"] for r in roots] self.concretized_order = [r["hash"] for r in roots]
json_specs_by_hash = d["concrete_specs"] json_specs_by_hash = d["concrete_specs"]
included_json_specs_by_hash = {}
if included_concrete_name in d: # Track specs by their lockfile key. Currently spack uses the finest
for env_name, env_info in d[included_concrete_name].items(): # grained hash as the lockfile key, while older formats used the build
included_json_specs_by_hash.update( # hash or a previous incarnation of the DAG hash (one that did not
self.set_included_concretized_user_specs( # include build deps or package hash).
env_name, env_info, included_json_specs_by_hash specs_by_hash = {}
)
)
# Track specs by their DAG hash, allows handling DAG hash collisions
first_seen = {}
current_lockfile_format = d["_meta"]["lockfile-version"] current_lockfile_format = d["_meta"]["lockfile-version"]
try: try:
reader = READER_CLS[current_lockfile_format] reader = READER_CLS[current_lockfile_format]
@@ -2261,39 +1998,6 @@ def _read_lockfile_dict(self, d):
msg += " You need to use a newer Spack version." msg += " You need to use a newer Spack version."
raise SpackEnvironmentError(msg) raise SpackEnvironmentError(msg)
first_seen, self.concretized_order = self.filter_specs(
reader, json_specs_by_hash, self.concretized_order
)
for spec_dag_hash in self.concretized_order:
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
if any(self.included_concretized_order.values()):
first_seen = {}
for env_name, concretized_order in self.included_concretized_order.items():
filtered_spec, self.included_concretized_order[env_name] = self.filter_specs(
reader, included_json_specs_by_hash, concretized_order
)
first_seen.update(filtered_spec)
for env_path, spec_hashes in self.included_concretized_order.items():
self.included_specs_by_hash[env_path] = {}
for spec_dag_hash in spec_hashes:
self.included_specs_by_hash[env_path].update(
{spec_dag_hash: first_seen[spec_dag_hash]}
)
def filter_specs(self, reader, json_specs_by_hash, order_concretized):
# Track specs by their lockfile key. Currently spack uses the finest
# grained hash as the lockfile key, while older formats used the build
# hash or a previous incarnation of the DAG hash (one that did not
# include build deps or package hash).
specs_by_hash = {}
# Track specs by their DAG hash, allows handling DAG hash collisions
first_seen = {}
# First pass: Put each spec in the map ignoring dependencies # First pass: Put each spec in the map ignoring dependencies
for lockfile_key, node_dict in json_specs_by_hash.items(): for lockfile_key, node_dict in json_specs_by_hash.items():
spec = reader.from_node_dict(node_dict) spec = reader.from_node_dict(node_dict)
@@ -2316,8 +2020,7 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
# keep. This is only required as long as we support older lockfile # keep. This is only required as long as we support older lockfile
# formats where the mapping from DAG hash to lockfile key is possibly # formats where the mapping from DAG hash to lockfile key is possibly
# one-to-many. # one-to-many.
for lockfile_key in self.concretized_order:
for lockfile_key in order_concretized:
for s in specs_by_hash[lockfile_key].traverse(): for s in specs_by_hash[lockfile_key].traverse():
if s.dag_hash() not in first_seen: if s.dag_hash() not in first_seen:
first_seen[s.dag_hash()] = s first_seen[s.dag_hash()] = s
@@ -2325,10 +2028,12 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
# Now make sure concretized_order and our internal specs dict # Now make sure concretized_order and our internal specs dict
# contains the keys used by modern spack (i.e. the dag_hash # contains the keys used by modern spack (i.e. the dag_hash
# that includes build deps and package hash). # that includes build deps and package hash).
self.concretized_order = [
specs_by_hash[h_key].dag_hash() for h_key in self.concretized_order
]
order_concretized = [specs_by_hash[h_key].dag_hash() for h_key in order_concretized] for spec_dag_hash in self.concretized_order:
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
return first_seen, order_concretized
def write(self, regenerate: bool = True) -> None: def write(self, regenerate: bool = True) -> None:
"""Writes an in-memory environment to its location on disk. """Writes an in-memory environment to its location on disk.
@@ -2341,7 +2046,7 @@ def write(self, regenerate: bool = True) -> None:
regenerate: regenerate views and run post-write hooks as well as writing if True. regenerate: regenerate views and run post-write hooks as well as writing if True.
""" """
self.manifest_uptodate_or_warn() self.manifest_uptodate_or_warn()
if self.specs_by_hash or self.included_concrete_envs: if self.specs_by_hash:
self.ensure_env_directory_exists(dot_env=True) self.ensure_env_directory_exists(dot_env=True)
self.update_environment_repository() self.update_environment_repository()
self.manifest.flush() self.manifest.flush()
@@ -2840,19 +2545,6 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
raise SpackEnvironmentError(msg) from e raise SpackEnvironmentError(msg) from e
self.changed = True self.changed = True
def set_include_concrete(self, include_concrete: List[str]) -> None:
"""Sets the included concrete environments in the manifest to the value(s) passed as input.
Args:
include_concrete: list of already existing concrete environments to include
"""
self.pristine_configuration[included_concrete_name] = []
for env_path in include_concrete:
self.pristine_configuration[included_concrete_name].append(env_path)
self.changed = True
def add_definition(self, user_spec: str, list_name: str) -> None: def add_definition(self, user_spec: str, list_name: str) -> None:
"""Appends a user spec to the first active definition matching the name passed as argument. """Appends a user spec to the first active definition matching the name passed as argument.
@@ -3036,56 +2728,54 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
for i, config_path in enumerate(reversed(includes)): for i, config_path in enumerate(reversed(includes)):
# allow paths to contain spack config/environment variables, etc. # allow paths to contain spack config/environment variables, etc.
config_path = substitute_path_variables(config_path) config_path = substitute_path_variables(config_path)
include_url = urllib.parse.urlparse(config_path) include_url = urllib.parse.urlparse(config_path)
# If scheme is not valid, config_path is not a url # Transform file:// URLs to direct includes.
# of a type Spack is generally aware if include_url.scheme == "file":
if spack.util.url.validate_scheme(include_url.scheme): config_path = urllib.request.url2pathname(include_url.path)
# Transform file:// URLs to direct includes.
if include_url.scheme == "file":
config_path = urllib.request.url2pathname(include_url.path)
# Any other URL should be fetched. # Any other URL should be fetched.
elif include_url.scheme in ("http", "https", "ftp"): elif include_url.scheme in ("http", "https", "ftp"):
# Stage any remote configuration file(s) # Stage any remote configuration file(s)
staged_configs = ( staged_configs = (
os.listdir(self.config_stage_dir) os.listdir(self.config_stage_dir)
if os.path.exists(self.config_stage_dir) if os.path.exists(self.config_stage_dir)
else [] else []
)
remote_path = urllib.request.url2pathname(include_url.path)
basename = os.path.basename(remote_path)
if basename in staged_configs:
# Do NOT re-stage configuration files over existing
# ones with the same name since there is a risk of
# losing changes (e.g., from 'spack config update').
tty.warn(
"Will not re-stage configuration from {0} to avoid "
"losing changes to the already staged file of the "
"same name.".format(remote_path)
) )
remote_path = urllib.request.url2pathname(include_url.path)
basename = os.path.basename(remote_path)
if basename in staged_configs:
# Do NOT re-stage configuration files over existing
# ones with the same name since there is a risk of
# losing changes (e.g., from 'spack config update').
tty.warn(
"Will not re-stage configuration from {0} to avoid "
"losing changes to the already staged file of the "
"same name.".format(remote_path)
)
# Recognize the configuration stage directory # Recognize the configuration stage directory
# is flattened to ensure a single copy of each # is flattened to ensure a single copy of each
# configuration file. # configuration file.
config_path = self.config_stage_dir config_path = self.config_stage_dir
if basename.endswith(".yaml"): if basename.endswith(".yaml"):
config_path = os.path.join(config_path, basename) config_path = os.path.join(config_path, basename)
else: else:
staged_path = spack.config.fetch_remote_configs( staged_path = spack.config.fetch_remote_configs(
config_path, str(self.config_stage_dir), skip_existing=True config_path, str(self.config_stage_dir), skip_existing=True
)
if not staged_path:
raise SpackEnvironmentError(
"Unable to fetch remote configuration {0}".format(config_path)
)
config_path = staged_path
elif include_url.scheme:
raise ValueError(
f"Unsupported URL scheme ({include_url.scheme}) for "
f"environment include: {config_path}"
) )
if not staged_path:
raise SpackEnvironmentError(
"Unable to fetch remote configuration {0}".format(config_path)
)
config_path = staged_path
elif include_url.scheme:
raise ValueError(
f"Unsupported URL scheme ({include_url.scheme}) for "
f"environment include: {config_path}"
)
# treat relative paths as relative to the environment # treat relative paths as relative to the environment
if not os.path.isabs(config_path): if not os.path.isabs(config_path):

View File

@@ -30,7 +30,6 @@
import shutil import shutil
import urllib.error import urllib.error
import urllib.parse import urllib.parse
import urllib.request
from pathlib import PurePath from pathlib import PurePath
from typing import List, Optional from typing import List, Optional
@@ -274,7 +273,10 @@ def __init__(self, url=None, checksum=None, **kwargs):
@property @property
def curl(self): def curl(self):
if not self._curl: if not self._curl:
self._curl = web_util.require_curl() try:
self._curl = which("curl", required=True)
except CommandNotFoundError as exc:
tty.error(str(exc))
return self._curl return self._curl
def source_id(self): def source_id(self):
@@ -295,23 +297,27 @@ def candidate_urls(self):
@_needs_stage @_needs_stage
def fetch(self): def fetch(self):
if self.archive_file: if self.archive_file:
tty.debug(f"Already downloaded {self.archive_file}") tty.debug("Already downloaded {0}".format(self.archive_file))
return return
errors: List[Exception] = [] url = None
errors = []
for url in self.candidate_urls: for url in self.candidate_urls:
if not web_util.url_exists(url):
tty.debug("URL does not exist: " + url)
continue
try: try:
self._fetch_from_url(url) self._fetch_from_url(url)
break break
except FailedDownloadError as e: except FailedDownloadError as e:
errors.extend(e.exceptions) errors.append(str(e))
else:
raise FailedDownloadError(*errors) for msg in errors:
tty.debug(msg)
if not self.archive_file: if not self.archive_file:
raise FailedDownloadError( raise FailedDownloadError(url)
RuntimeError(f"Missing archive {self.archive_file} after fetching")
)
def _fetch_from_url(self, url): def _fetch_from_url(self, url):
if spack.config.get("config:url_fetch_method") == "curl": if spack.config.get("config:url_fetch_method") == "curl":
@@ -330,20 +336,19 @@ def _check_headers(self, headers):
@_needs_stage @_needs_stage
def _fetch_urllib(self, url): def _fetch_urllib(self, url):
save_file = self.stage.save_filename save_file = self.stage.save_filename
tty.msg("Fetching {0}".format(url))
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT}) # Run urllib but grab the mime type from the http headers
try: try:
response = web_util.urlopen(request) url, headers, response = web_util.read_from_url(url)
except (TimeoutError, urllib.error.URLError) as e: except web_util.SpackWebError as e:
# clean up archive on failure. # clean up archive on failure.
if self.archive_file: if self.archive_file:
os.remove(self.archive_file) os.remove(self.archive_file)
if os.path.lexists(save_file): if os.path.lexists(save_file):
os.remove(save_file) os.remove(save_file)
raise FailedDownloadError(e) from e msg = "urllib failed to fetch with error {0}".format(e)
raise FailedDownloadError(url, msg)
tty.msg(f"Fetching {url}")
if os.path.lexists(save_file): if os.path.lexists(save_file):
os.remove(save_file) os.remove(save_file)
@@ -351,7 +356,7 @@ def _fetch_urllib(self, url):
with open(save_file, "wb") as _open_file: with open(save_file, "wb") as _open_file:
shutil.copyfileobj(response, _open_file) shutil.copyfileobj(response, _open_file)
self._check_headers(str(response.headers)) self._check_headers(str(headers))
@_needs_stage @_needs_stage
def _fetch_curl(self, url): def _fetch_curl(self, url):
@@ -360,7 +365,7 @@ def _fetch_curl(self, url):
if self.stage.save_filename: if self.stage.save_filename:
save_file = self.stage.save_filename save_file = self.stage.save_filename
partial_file = self.stage.save_filename + ".part" partial_file = self.stage.save_filename + ".part"
tty.msg(f"Fetching {url}") tty.msg("Fetching {0}".format(url))
if partial_file: if partial_file:
save_args = [ save_args = [
"-C", "-C",
@@ -400,8 +405,8 @@ def _fetch_curl(self, url):
try: try:
web_util.check_curl_code(curl.returncode) web_util.check_curl_code(curl.returncode)
except spack.error.FetchError as e: except spack.error.FetchError as err:
raise FailedDownloadError(e) from e raise spack.fetch_strategy.FailedDownloadError(url, str(err))
self._check_headers(headers) self._check_headers(headers)
@@ -549,13 +554,13 @@ def fetch(self):
try: try:
response = self._urlopen(self.url) response = self._urlopen(self.url)
except (TimeoutError, urllib.error.URLError) as e: except urllib.error.URLError as e:
# clean up archive on failure. # clean up archive on failure.
if self.archive_file: if self.archive_file:
os.remove(self.archive_file) os.remove(self.archive_file)
if os.path.lexists(file): if os.path.lexists(file):
os.remove(file) os.remove(file)
raise FailedDownloadError(e) from e raise FailedDownloadError(self.url, f"Failed to fetch {self.url}: {e}") from e
if os.path.lexists(file): if os.path.lexists(file):
os.remove(file) os.remove(file)
@@ -1307,41 +1312,35 @@ def __init__(self, *args, **kwargs):
@_needs_stage @_needs_stage
def fetch(self): def fetch(self):
if self.archive_file: if self.archive_file:
tty.debug(f"Already downloaded {self.archive_file}") tty.debug("Already downloaded {0}".format(self.archive_file))
return return
parsed_url = urllib.parse.urlparse(self.url) parsed_url = urllib.parse.urlparse(self.url)
if parsed_url.scheme != "s3": if parsed_url.scheme != "s3":
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.") raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
tty.debug("Fetching {0}".format(self.url))
basename = os.path.basename(parsed_url.path) basename = os.path.basename(parsed_url.path)
request = urllib.request.Request(
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
)
with working_dir(self.stage.path): with working_dir(self.stage.path):
try: _, headers, stream = web_util.read_from_url(self.url)
response = web_util.urlopen(request)
except (TimeoutError, urllib.error.URLError) as e:
raise FailedDownloadError(e) from e
tty.debug(f"Fetching {self.url}")
with open(basename, "wb") as f: with open(basename, "wb") as f:
shutil.copyfileobj(response, f) shutil.copyfileobj(stream, f)
content_type = web_util.get_header(response.headers, "Content-type") content_type = web_util.get_header(headers, "Content-type")
if content_type == "text/html": if content_type == "text/html":
warn_content_type_mismatch(self.archive_file or "the archive") warn_content_type_mismatch(self.archive_file or "the archive")
if self.stage.save_filename: if self.stage.save_filename:
fs.rename(os.path.join(self.stage.path, basename), self.stage.save_filename) llnl.util.filesystem.rename(
os.path.join(self.stage.path, basename), self.stage.save_filename
)
if not self.archive_file: if not self.archive_file:
raise FailedDownloadError( raise FailedDownloadError(self.url)
RuntimeError(f"Missing archive {self.archive_file} after fetching")
)
@fetcher @fetcher
@@ -1367,23 +1366,17 @@ def fetch(self):
if parsed_url.scheme != "gs": if parsed_url.scheme != "gs":
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.") raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
tty.debug("Fetching {0}".format(self.url))
basename = os.path.basename(parsed_url.path) basename = os.path.basename(parsed_url.path)
request = urllib.request.Request(
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
)
with working_dir(self.stage.path): with working_dir(self.stage.path):
try: _, headers, stream = web_util.read_from_url(self.url)
response = web_util.urlopen(request)
except (TimeoutError, urllib.error.URLError) as e:
raise FailedDownloadError(e) from e
tty.debug(f"Fetching {self.url}")
with open(basename, "wb") as f: with open(basename, "wb") as f:
shutil.copyfileobj(response, f) shutil.copyfileobj(stream, f)
content_type = web_util.get_header(response.headers, "Content-type") content_type = web_util.get_header(headers, "Content-type")
if content_type == "text/html": if content_type == "text/html":
warn_content_type_mismatch(self.archive_file or "the archive") warn_content_type_mismatch(self.archive_file or "the archive")
@@ -1392,9 +1385,7 @@ def fetch(self):
os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename) os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
if not self.archive_file: if not self.archive_file:
raise FailedDownloadError( raise FailedDownloadError(self.url)
RuntimeError(f"Missing archive {self.archive_file} after fetching")
)
@fetcher @fetcher
@@ -1731,9 +1722,9 @@ class NoCacheError(spack.error.FetchError):
class FailedDownloadError(spack.error.FetchError): class FailedDownloadError(spack.error.FetchError):
"""Raised when a download fails.""" """Raised when a download fails."""
def __init__(self, *exceptions: Exception): def __init__(self, url, msg=""):
super().__init__("Failed to download") super().__init__("Failed to fetch file from URL: %s" % url, msg)
self.exceptions = exceptions self.url = url
class NoArchiveFileError(spack.error.FetchError): class NoArchiveFileError(spack.error.FetchError):

View File

@@ -10,9 +10,8 @@
import shutil import shutil
import stat import stat
import sys import sys
from typing import Callable, Dict, Optional from typing import Optional
from llnl.string import comma_or
from llnl.util import tty from llnl.util import tty
from llnl.util.filesystem import ( from llnl.util.filesystem import (
mkdirp, mkdirp,
@@ -33,7 +32,6 @@
from llnl.util.tty.color import colorize from llnl.util.tty.color import colorize
import spack.config import spack.config
import spack.directory_layout
import spack.paths import spack.paths
import spack.projections import spack.projections
import spack.relocate import spack.relocate
@@ -51,20 +49,19 @@
_projections_path = ".spack/projections.yaml" _projections_path = ".spack/projections.yaml"
LinkCallbackType = Callable[[str, str, "FilesystemView", Optional[spack.spec.Spec]], None] def view_symlink(src, dst, **kwargs):
# keyword arguments are irrelevant
# here to fit required call signature
def view_symlink(src: str, dst: str, *args, **kwargs) -> None:
symlink(src, dst) symlink(src, dst)
def view_hardlink(src: str, dst: str, *args, **kwargs) -> None: def view_hardlink(src, dst, **kwargs):
# keyword arguments are irrelevant
# here to fit required call signature
os.link(src, dst) os.link(src, dst)
def view_copy( def view_copy(src: str, dst: str, view, spec: Optional[spack.spec.Spec] = None):
src: str, dst: str, view: "FilesystemView", spec: Optional[spack.spec.Spec] = None
) -> None:
""" """
Copy a file from src to dst. Copy a file from src to dst.
@@ -107,40 +104,27 @@ def view_copy(
tty.debug(f"Can't change the permissions for {dst}") tty.debug(f"Can't change the permissions for {dst}")
#: supported string values for `link_type` in an env, mapped to canonical values def view_func_parser(parsed_name):
_LINK_TYPES = { # What method are we using for this view
"hardlink": "hardlink", if parsed_name in ("hardlink", "hard"):
"hard": "hardlink",
"copy": "copy",
"relocate": "copy",
"add": "symlink",
"symlink": "symlink",
"soft": "symlink",
}
_VALID_LINK_TYPES = sorted(set(_LINK_TYPES.values()))
def canonicalize_link_type(link_type: str) -> str:
"""Return canonical"""
canonical = _LINK_TYPES.get(link_type)
if not canonical:
raise ValueError(
f"Invalid link type: '{link_type}. Must be one of {comma_or(_VALID_LINK_TYPES)}'"
)
return canonical
def function_for_link_type(link_type: str) -> LinkCallbackType:
link_type = canonicalize_link_type(link_type)
if link_type == "hardlink":
return view_hardlink return view_hardlink
elif link_type == "symlink": elif parsed_name in ("copy", "relocate"):
return view_symlink
elif link_type == "copy":
return view_copy return view_copy
elif parsed_name in ("add", "symlink", "soft"):
return view_symlink
else:
raise ValueError(f"invalid link type for view: '{parsed_name}'")
assert False, "invalid link type" # need mypy Literal values
def inverse_view_func_parser(view_type):
# get string based on view type
if view_type is view_hardlink:
link_name = "hardlink"
elif view_type is view_copy:
link_name = "copy"
else:
link_name = "symlink"
return link_name
class FilesystemView: class FilesystemView:
@@ -156,16 +140,7 @@ class FilesystemView:
directory structure. directory structure.
""" """
def __init__( def __init__(self, root, layout, **kwargs):
self,
root: str,
layout: spack.directory_layout.DirectoryLayout,
*,
projections: Optional[Dict] = None,
ignore_conflicts: bool = False,
verbose: bool = False,
link_type: str = "symlink",
):
""" """
Initialize a filesystem view under the given `root` directory with Initialize a filesystem view under the given `root` directory with
corresponding directory `layout`. corresponding directory `layout`.
@@ -174,17 +149,15 @@ def __init__(
""" """
self._root = root self._root = root
self.layout = layout self.layout = layout
self.projections = {} if projections is None else projections
self.ignore_conflicts = ignore_conflicts self.projections = kwargs.get("projections", {})
self.verbose = verbose
self.ignore_conflicts = kwargs.get("ignore_conflicts", False)
self.verbose = kwargs.get("verbose", False)
# Setup link function to include view # Setup link function to include view
self.link_type = link_type link_func = kwargs.get("link", view_symlink)
self._link = function_for_link_type(link_type) self.link = ft.partial(link_func, view=self)
def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None:
self._link(src, dst, self, spec)
def add_specs(self, *specs, **kwargs): def add_specs(self, *specs, **kwargs):
""" """
@@ -282,24 +255,8 @@ class YamlFilesystemView(FilesystemView):
Filesystem view to work with a yaml based directory layout. Filesystem view to work with a yaml based directory layout.
""" """
def __init__( def __init__(self, root, layout, **kwargs):
self, super().__init__(root, layout, **kwargs)
root: str,
layout: spack.directory_layout.DirectoryLayout,
*,
projections: Optional[Dict] = None,
ignore_conflicts: bool = False,
verbose: bool = False,
link_type: str = "symlink",
):
super().__init__(
root,
layout,
projections=projections,
ignore_conflicts=ignore_conflicts,
verbose=verbose,
link_type=link_type,
)
# Super class gets projections from the kwargs # Super class gets projections from the kwargs
# YAML specific to get projections from YAML file # YAML specific to get projections from YAML file
@@ -681,6 +638,9 @@ class SimpleFilesystemView(FilesystemView):
"""A simple and partial implementation of FilesystemView focused on performance and immutable """A simple and partial implementation of FilesystemView focused on performance and immutable
views, where specs cannot be removed after they were added.""" views, where specs cannot be removed after they were added."""
def __init__(self, root, layout, **kwargs):
super().__init__(root, layout, **kwargs)
def _sanity_check_view_projection(self, specs): def _sanity_check_view_projection(self, specs):
"""A very common issue is that we end up with two specs of the same package, that project """A very common issue is that we end up with two specs of the same package, that project
to the same prefix. We want to catch that as early as possible and give a sensible error to to the same prefix. We want to catch that as early as possible and give a sensible error to

View File

@@ -41,9 +41,8 @@ def _populate_hooks(cls):
relative_names = list(list_modules(spack.paths.hooks_path)) relative_names = list(list_modules(spack.paths.hooks_path))
# write_install_manifest should come after any mutation of the install prefix, and # Ensure that write_install_manifest comes last
# autopush should include the install manifest. ensure_last(relative_names, "absolutify_elf_sonames", "write_install_manifest")
ensure_last(relative_names, "absolutify_elf_sonames", "write_install_manifest", "autopush")
for name in relative_names: for name in relative_names:
module_name = __name__ + "." + name module_name = __name__ + "." + name

View File

@@ -12,10 +12,6 @@
def post_install(spec, explicit): def post_install(spec, explicit):
# Push package to all buildcaches with autopush==True # Push package to all buildcaches with autopush==True
# Do nothing if spec is an external package
if spec.external:
return
# Do nothing if package was not installed from source # Do nothing if package was not installed from source
pkg = spec.package pkg = spec.package
if pkg.installed_from_binary_cache: if pkg.installed_from_binary_cache:

View File

@@ -1,8 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
def post_install(spec, explicit=None):
spec.package.windows_establish_runtime_linkage()

View File

@@ -440,7 +440,7 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
tty.debug(f"{pre} already registered in DB") tty.debug(f"{pre} already registered in DB")
record = spack.store.STORE.db.get_record(spec) record = spack.store.STORE.db.get_record(spec)
if explicit and not record.explicit: if explicit and not record.explicit:
spack.store.STORE.db.mark(spec, "explicit", True) spack.store.STORE.db.update_explicit(spec, explicit)
except KeyError: except KeyError:
# If not, register it and generate the module file. # If not, register it and generate the module file.
@@ -488,7 +488,6 @@ def _process_binary_cache_tarball(
with timer.measure("install"), spack.util.path.filter_padding(): with timer.measure("install"), spack.util.path.filter_padding():
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer) binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
pkg.windows_establish_runtime_linkage()
if hasattr(pkg, "_post_buildcache_install_hook"): if hasattr(pkg, "_post_buildcache_install_hook"):
pkg._post_buildcache_install_hook() pkg._post_buildcache_install_hook()
@@ -761,8 +760,12 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
if not self.pkg.spec.concrete: if not self.pkg.spec.concrete:
raise ValueError(f"{self.pkg.name} must have a concrete spec") raise ValueError(f"{self.pkg.name} must have a concrete spec")
self.pkg.stop_before_phase = install_args.get("stop_before") # type: ignore[attr-defined] # noqa: E501 # Cache the package phase options with the explicit package,
self.pkg.last_phase = install_args.get("stop_at") # type: ignore[attr-defined] # popping the options to ensure installation of associated
# dependencies is NOT affected by these options.
self.pkg.stop_before_phase = install_args.pop("stop_before", None) # type: ignore[attr-defined] # noqa: E501
self.pkg.last_phase = install_args.pop("stop_at", None) # type: ignore[attr-defined]
# Cache the package id for convenience # Cache the package id for convenience
self.pkg_id = package_id(pkg.spec) self.pkg_id = package_id(pkg.spec)
@@ -1072,17 +1075,19 @@ def flag_installed(self, installed: List[str]) -> None:
@property @property
def explicit(self) -> bool: def explicit(self) -> bool:
return self.pkg.spec.dag_hash() in self.request.install_args.get("explicit", []) """The package was explicitly requested by the user."""
return self.is_root and self.request.install_args.get("explicit", True)
@property @property
def is_build_request(self) -> bool: def is_root(self) -> bool:
"""The package was requested directly""" """The package was requested directly, but may or may not be explicit
in an environment."""
return self.pkg == self.request.pkg return self.pkg == self.request.pkg
@property @property
def use_cache(self) -> bool: def use_cache(self) -> bool:
_use_cache = True _use_cache = True
if self.is_build_request: if self.is_root:
return self.request.install_args.get("package_use_cache", _use_cache) return self.request.install_args.get("package_use_cache", _use_cache)
else: else:
return self.request.install_args.get("dependencies_use_cache", _use_cache) return self.request.install_args.get("dependencies_use_cache", _use_cache)
@@ -1090,7 +1095,7 @@ def use_cache(self) -> bool:
@property @property
def cache_only(self) -> bool: def cache_only(self) -> bool:
_cache_only = False _cache_only = False
if self.is_build_request: if self.is_root:
return self.request.install_args.get("package_cache_only", _cache_only) return self.request.install_args.get("package_cache_only", _cache_only)
else: else:
return self.request.install_args.get("dependencies_cache_only", _cache_only) return self.request.install_args.get("dependencies_cache_only", _cache_only)
@@ -1116,17 +1121,24 @@ def priority(self):
class PackageInstaller: class PackageInstaller:
""" """
Class for managing the install process for a Spack instance based on a bottom-up DAG approach. Class for managing the install process for a Spack instance based on a
bottom-up DAG approach.
This installer can coordinate concurrent batch and interactive, local and distributed (on a This installer can coordinate concurrent batch and interactive, local
shared file system) builds for the same Spack instance. and distributed (on a shared file system) builds for the same Spack
instance.
""" """
def __init__( def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]] = []) -> None:
self, packages: List["spack.package_base.PackageBase"], install_args: dict """Initialize the installer.
) -> None:
Args:
installs (list): list of tuples, where each
tuple consists of a package (PackageBase) and its associated
install arguments (dict)
"""
# List of build requests # List of build requests
self.build_requests = [BuildRequest(pkg, install_args) for pkg in packages] self.build_requests = [BuildRequest(pkg, install_args) for pkg, install_args in installs]
# Priority queue of build tasks # Priority queue of build tasks
self.build_pq: List[Tuple[Tuple[int, int], BuildTask]] = [] self.build_pq: List[Tuple[Tuple[int, int], BuildTask]] = []
@@ -1363,8 +1375,8 @@ def _prepare_for_install(self, task: BuildTask) -> None:
self._update_installed(task) self._update_installed(task)
# Only update the explicit entry once for the explicit package # Only update the explicit entry once for the explicit package
if task.explicit and not rec.explicit: if task.explicit:
spack.store.STORE.db.mark(task.pkg.spec, "explicit", True) spack.store.STORE.db.update_explicit(task.pkg.spec, True)
def _cleanup_all_tasks(self) -> None: def _cleanup_all_tasks(self) -> None:
"""Cleanup all build tasks to include releasing their locks.""" """Cleanup all build tasks to include releasing their locks."""
@@ -1544,6 +1556,17 @@ def _add_tasks(self, request: BuildRequest, all_deps):
tty.warn(f"Installation request refused: {str(err)}") tty.warn(f"Installation request refused: {str(err)}")
return return
# Skip out early if the spec is not being installed locally (i.e., if
# external or upstream).
#
# External and upstream packages need to get flagged as installed to
# ensure proper status tracking for environment build.
explicit = request.install_args.get("explicit", True)
not_local = _handle_external_and_upstream(request.pkg, explicit)
if not_local:
self._flag_installed(request.pkg)
return
install_compilers = spack.config.get("config:install_missing_compilers", False) install_compilers = spack.config.get("config:install_missing_compilers", False)
install_deps = request.install_args.get("install_deps") install_deps = request.install_args.get("install_deps")
@@ -1659,6 +1682,10 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
if not pkg.unit_test_check(): if not pkg.unit_test_check():
return return
# Injecting information to know if this installation request is the root one
# to determine in BuildProcessInstaller whether installation is explicit or not
install_args["is_root"] = task.is_root
try: try:
self._setup_install_dir(pkg) self._setup_install_dir(pkg)
@@ -1671,6 +1698,10 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process( spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
pkg, build_process, install_args pkg, build_process, install_args
) )
# Currently this is how RPATH-like behavior is achieved on Windows, after install
# establish runtime linkage via Windows Runtime link object
# Note: this is a no-op on non Windows platforms
pkg.windows_establish_runtime_linkage()
# Note: PARENT of the build process adds the new package to # Note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file. # the database, so that we don't need to re-read from file.
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit) spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
@@ -1970,8 +2001,8 @@ def install(self) -> None:
self._init_queue() self._init_queue()
fail_fast_err = "Terminating after first install failure" fail_fast_err = "Terminating after first install failure"
single_requested_spec = len(self.build_requests) == 1 single_explicit_spec = len(self.build_requests) == 1
failed_build_requests = [] failed_explicits = []
install_status = InstallStatus(len(self.build_pq)) install_status = InstallStatus(len(self.build_pq))
@@ -2020,10 +2051,11 @@ def install(self) -> None:
# Skip the installation if the spec is not being installed locally # Skip the installation if the spec is not being installed locally
# (i.e., if external or upstream) BUT flag it as installed since # (i.e., if external or upstream) BUT flag it as installed since
# some package likely depends on it. # some package likely depends on it.
if _handle_external_and_upstream(pkg, task.explicit): if not task.explicit:
term_status.clear() if _handle_external_and_upstream(pkg, False):
self._flag_installed(pkg, task.dependents) term_status.clear()
continue self._flag_installed(pkg, task.dependents)
continue
# Flag a failed spec. Do not need an (install) prefix lock since # Flag a failed spec. Do not need an (install) prefix lock since
# assume using a separate (failed) prefix lock file. # assume using a separate (failed) prefix lock file.
@@ -2168,11 +2200,14 @@ def install(self) -> None:
if self.fail_fast: if self.fail_fast:
raise InstallError(f"{fail_fast_err}: {str(exc)}", pkg=pkg) raise InstallError(f"{fail_fast_err}: {str(exc)}", pkg=pkg)
# Terminate when a single build request has failed, or summarize errors later. # Terminate at this point if the single explicit spec has
if task.is_build_request: # failed to install.
if single_requested_spec: if single_explicit_spec and task.explicit:
raise raise
failed_build_requests.append((pkg, pkg_id, str(exc)))
# Track explicit spec id and error to summarize when done
if task.explicit:
failed_explicits.append((pkg, pkg_id, str(exc)))
finally: finally:
# Remove the install prefix if anything went wrong during # Remove the install prefix if anything went wrong during
@@ -2195,16 +2230,16 @@ def install(self) -> None:
if request.install_args.get("install_package") and request.pkg_id not in self.installed if request.install_args.get("install_package") and request.pkg_id not in self.installed
] ]
if failed_build_requests or missing: if failed_explicits or missing:
for _, pkg_id, err in failed_build_requests: for _, pkg_id, err in failed_explicits:
tty.error(f"{pkg_id}: {err}") tty.error(f"{pkg_id}: {err}")
for _, pkg_id in missing: for _, pkg_id in missing:
tty.error(f"{pkg_id}: Package was not installed") tty.error(f"{pkg_id}: Package was not installed")
if len(failed_build_requests) > 0: if len(failed_explicits) > 0:
pkg = failed_build_requests[0][0] pkg = failed_explicits[0][0]
ids = [pkg_id for _, pkg_id, _ in failed_build_requests] ids = [pkg_id for _, pkg_id, _ in failed_explicits]
tty.debug( tty.debug(
"Associating installation failure with first failed " "Associating installation failure with first failed "
f"explicit package ({ids[0]}) from {', '.join(ids)}" f"explicit package ({ids[0]}) from {', '.join(ids)}"
@@ -2263,7 +2298,7 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
self.verbose = bool(install_args.get("verbose", False)) self.verbose = bool(install_args.get("verbose", False))
# whether installation was explicitly requested by the user # whether installation was explicitly requested by the user
self.explicit = pkg.spec.dag_hash() in install_args.get("explicit", []) self.explicit = install_args.get("is_root", False) and install_args.get("explicit", True)
# env before starting installation # env before starting installation
self.unmodified_env = install_args.get("unmodified_env", {}) self.unmodified_env = install_args.get("unmodified_env", {})

View File

@@ -427,7 +427,7 @@ def make_argument_parser(**kwargs):
parser.add_argument( parser.add_argument(
"--color", "--color",
action="store", action="store",
default=None, default=os.environ.get("SPACK_COLOR", "auto"),
choices=("always", "never", "auto"), choices=("always", "never", "auto"),
help="when to colorize output (default: auto)", help="when to colorize output (default: auto)",
) )
@@ -622,8 +622,7 @@ def setup_main_options(args):
# with color # with color
color.try_enable_terminal_color_on_windows() color.try_enable_terminal_color_on_windows()
# when to use color (takes always, auto, or never) # when to use color (takes always, auto, or never)
if args.color is not None: color.set_color_when(args.color)
color.set_color_when(args.color)
def allows_unknown_args(command): def allows_unknown_args(command):

View File

@@ -87,8 +87,9 @@ def from_url(url: str):
"""Create an anonymous mirror by URL. This method validates the URL.""" """Create an anonymous mirror by URL. This method validates the URL."""
if not urllib.parse.urlparse(url).scheme in supported_url_schemes: if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
raise ValueError( raise ValueError(
f'"{url}" is not a valid mirror URL. ' '"{}" is not a valid mirror URL. Scheme must be once of {}.'.format(
f"Scheme must be one of {supported_url_schemes}." url, ", ".join(supported_url_schemes)
)
) )
return Mirror(url) return Mirror(url)
@@ -733,7 +734,7 @@ def require_mirror_name(mirror_name):
"""Find a mirror by name and raise if it does not exist""" """Find a mirror by name and raise if it does not exist"""
mirror = spack.mirror.MirrorCollection().get(mirror_name) mirror = spack.mirror.MirrorCollection().get(mirror_name)
if not mirror: if not mirror:
raise ValueError(f'no mirror named "{mirror_name}"') raise ValueError('no mirror named "{0}"'.format(mirror_name))
return mirror return mirror

View File

@@ -11,7 +11,7 @@
import urllib.parse import urllib.parse
import urllib.request import urllib.request
from http.client import HTTPResponse from http.client import HTTPResponse
from typing import List, NamedTuple, Tuple from typing import NamedTuple, Tuple
from urllib.request import Request from urllib.request import Request
import llnl.util.tty as tty import llnl.util.tty as tty
@@ -27,7 +27,6 @@
import spack.stage import spack.stage
import spack.traverse import spack.traverse
import spack.util.crypto import spack.util.crypto
import spack.util.url
from .image import Digest, ImageReference from .image import Digest, ImageReference
@@ -70,42 +69,6 @@ def with_query_param(url: str, param: str, value: str) -> str:
) )
def list_tags(ref: ImageReference, _urlopen: spack.oci.opener.MaybeOpen = None) -> List[str]:
"""Retrieves the list of tags associated with an image, handling pagination."""
_urlopen = _urlopen or spack.oci.opener.urlopen
tags = set()
fetch_url = ref.tags_url()
while True:
# Fetch tags
request = Request(url=fetch_url)
response = _urlopen(request)
spack.oci.opener.ensure_status(request, response, 200)
tags.update(json.load(response)["tags"])
# Check for pagination
link_header = response.headers["Link"]
if link_header is None:
break
tty.debug(f"OCI tag pagination: {link_header}")
rel_next_value = spack.util.url.parse_link_rel_next(link_header)
if rel_next_value is None:
break
rel_next = urllib.parse.urlparse(rel_next_value)
if rel_next.scheme not in ("https", ""):
break
fetch_url = ref.endpoint(rel_next_value)
return sorted(tags)
def upload_blob( def upload_blob(
ref: ImageReference, ref: ImageReference,
file: str, file: str,

View File

@@ -398,7 +398,7 @@ def create_opener():
opener = urllib.request.OpenerDirector() opener = urllib.request.OpenerDirector()
for handler in [ for handler in [
urllib.request.UnknownHandler(), urllib.request.UnknownHandler(),
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()), urllib.request.HTTPSHandler(),
spack.util.web.SpackHTTPDefaultErrorHandler(), spack.util.web.SpackHTTPDefaultErrorHandler(),
urllib.request.HTTPRedirectHandler(), urllib.request.HTTPRedirectHandler(),
urllib.request.HTTPErrorProcessor(), urllib.request.HTTPErrorProcessor(),
@@ -418,27 +418,18 @@ def ensure_status(request: urllib.request.Request, response: HTTPResponse, statu
) )
def default_retry(f, retries: int = 5, sleep=None): def default_retry(f, retries: int = 3, sleep=None):
sleep = sleep or time.sleep sleep = sleep or time.sleep
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
for i in range(retries): for i in range(retries):
try: try:
return f(*args, **kwargs) return f(*args, **kwargs)
except (urllib.error.URLError, TimeoutError) as e: except urllib.error.HTTPError as e:
# Retry on internal server errors, and rate limit errors # Retry on internal server errors, and rate limit errors
# Potentially this could take into account the Retry-After header # Potentially this could take into account the Retry-After header
# if registries support it # if registries support it
if i + 1 != retries and ( if i + 1 != retries and (500 <= e.code < 600 or e.code == 429):
(
isinstance(e, urllib.error.HTTPError)
and (500 <= e.code < 600 or e.code == 429)
)
or (
isinstance(e, urllib.error.URLError) and isinstance(e.reason, TimeoutError)
)
or isinstance(e, TimeoutError)
):
# Exponential backoff # Exponential backoff
sleep(2**i) sleep(2**i)
continue continue

View File

@@ -143,7 +143,6 @@ def __init__(self):
"12": "monterey", "12": "monterey",
"13": "ventura", "13": "ventura",
"14": "sonoma", "14": "sonoma",
"15": "sequoia",
} }
version = macos_version() version = macos_version()

View File

@@ -39,7 +39,6 @@
) )
from spack.build_systems.cargo import CargoPackage from spack.build_systems.cargo import CargoPackage
from spack.build_systems.cmake import CMakePackage, generator from spack.build_systems.cmake import CMakePackage, generator
from spack.build_systems.compiler import CompilerPackage
from spack.build_systems.cuda import CudaPackage from spack.build_systems.cuda import CudaPackage
from spack.build_systems.generic import Package from spack.build_systems.generic import Package
from spack.build_systems.gnu import GNUMirrorPackage from spack.build_systems.gnu import GNUMirrorPackage

View File

@@ -161,11 +161,7 @@ def windows_establish_runtime_linkage(self):
Performs symlinking to incorporate rpath dependencies to Windows runtime search paths Performs symlinking to incorporate rpath dependencies to Windows runtime search paths
""" """
# If spec is an external, we should not be modifying its bin directory, as we would if sys.platform == "win32":
# be doing in this method
# Spack should in general not modify things it has not installed
# we can reasonably expect externals to have their link interface properly established
if sys.platform == "win32" and not self.spec.external:
self.win_rpath.add_library_dependent(*self.win_add_library_dependent()) self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
self.win_rpath.add_rpath(*self.win_add_rpath()) self.win_rpath.add_rpath(*self.win_add_rpath())
self.win_rpath.establish_link() self.win_rpath.establish_link()
@@ -199,10 +195,10 @@ def __init__(cls, name, bases, attr_dict):
# assumed to be detectable # assumed to be detectable
if hasattr(cls, "executables") or hasattr(cls, "libraries"): if hasattr(cls, "executables") or hasattr(cls, "libraries"):
# Append a tag to each detectable package, so that finding them is faster # Append a tag to each detectable package, so that finding them is faster
if not hasattr(cls, "tags"): if hasattr(cls, "tags"):
getattr(cls, "tags").append(DetectablePackageMeta.TAG)
else:
setattr(cls, "tags", [DetectablePackageMeta.TAG]) setattr(cls, "tags", [DetectablePackageMeta.TAG])
elif DetectablePackageMeta.TAG not in cls.tags:
cls.tags.append(DetectablePackageMeta.TAG)
@classmethod @classmethod
def platform_executables(cls): def platform_executables(cls):
@@ -1119,9 +1115,10 @@ def _make_stage(self):
if not link_format: if not link_format:
link_format = "build-{arch}-{hash:7}" link_format = "build-{arch}-{hash:7}"
stage_link = self.spec.format_path(link_format) stage_link = self.spec.format_path(link_format)
source_stage = DevelopStage(compute_stage_name(self.spec), dev_path, stage_link) return DevelopStage(compute_stage_name(self.spec), dev_path, stage_link)
else:
source_stage = self._make_root_stage(self.fetcher) # To fetch the current version
source_stage = self._make_root_stage(self.fetcher)
# all_stages is source + resources + patches # all_stages is source + resources + patches
all_stages = StageComposite() all_stages = StageComposite()
@@ -1243,7 +1240,7 @@ def install_test_root(self):
"""Return the install test root directory.""" """Return the install test root directory."""
tty.warn( tty.warn(
"The 'pkg.install_test_root' property is deprecated with removal " "The 'pkg.install_test_root' property is deprecated with removal "
"expected v0.23. Use 'install_test_root(pkg)' instead." "expected v0.22. Use 'install_test_root(pkg)' instead."
) )
return install_test_root(self) return install_test_root(self)
@@ -1450,8 +1447,10 @@ def do_fetch(self, mirror_only=False):
return return
checksum = spack.config.get("config:checksum") checksum = spack.config.get("config:checksum")
fetch = self.stage.needs_fetching
if ( if (
checksum checksum
and fetch
and (self.version not in self.versions) and (self.version not in self.versions)
and (not isinstance(self.version, GitVersion)) and (not isinstance(self.version, GitVersion))
): ):
@@ -1558,11 +1557,13 @@ def do_patch(self):
tty.debug("Patching failed last time. Restaging.") tty.debug("Patching failed last time. Restaging.")
self.stage.restage() self.stage.restage()
else: else:
# develop specs may have patch failures but should never be restaged # develop specs/ DIYStages may have patch failures but
tty.warn( # should never be restaged
f"A patch failure was detected in {self.name}." msg = (
" Build errors may occur due to this." "A patch failure was detected in %s." % self.name
+ " Build errors may occur due to this."
) )
tty.warn(msg)
return return
# If this file exists, then we already applied all the patches. # If this file exists, then we already applied all the patches.
@@ -1876,10 +1877,7 @@ def do_install(self, **kwargs):
verbose (bool): Display verbose build output (by default, verbose (bool): Display verbose build output (by default,
suppresses it) suppresses it)
""" """
explicit = kwargs.get("explicit", True) PackageInstaller([(self, kwargs)]).install()
if isinstance(explicit, bool):
kwargs["explicit"] = {self.spec.dag_hash()} if explicit else set()
PackageInstaller([self], kwargs).install()
# TODO (post-34236): Update tests and all packages that use this as a # TODO (post-34236): Update tests and all packages that use this as a
# TODO (post-34236): package method to the routine made available to # TODO (post-34236): package method to the routine made available to
@@ -1900,7 +1898,7 @@ def cache_extra_test_sources(self, srcs):
""" """
msg = ( msg = (
"'pkg.cache_extra_test_sources(srcs) is deprecated with removal " "'pkg.cache_extra_test_sources(srcs) is deprecated with removal "
"expected in v0.23. Use 'cache_extra_test_sources(pkg, srcs)' " "expected in v0.22. Use 'cache_extra_test_sources(pkg, srcs)' "
"instead." "instead."
) )
warnings.warn(msg) warnings.warn(msg)
@@ -2448,18 +2446,9 @@ def rpath(self):
# on Windows, libraries of runtime interest are typically # on Windows, libraries of runtime interest are typically
# stored in the bin directory # stored in the bin directory
# Do not include Windows system libraries in the rpath interface
# these libraries are handled automatically by VS/VCVARS and adding
# Spack derived system libs into the link path or address space of a program
# can result in conflicting versions, which makes Spack packages less useable
if sys.platform == "win32": if sys.platform == "win32":
rpaths = [self.prefix.bin] rpaths = [self.prefix.bin]
rpaths.extend( rpaths.extend(d.prefix.bin for d in deps if os.path.isdir(d.prefix.bin))
d.prefix.bin
for d in deps
if os.path.isdir(d.prefix.bin)
and "windows-system" not in getattr(d.package, "tags", [])
)
else: else:
rpaths = [self.prefix.lib, self.prefix.lib64] rpaths = [self.prefix.lib, self.prefix.lib64]
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib)) rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
@@ -2566,12 +2555,7 @@ class PackageStillNeededError(InstallError):
"""Raised when package is still needed by another on uninstall.""" """Raised when package is still needed by another on uninstall."""
def __init__(self, spec, dependents): def __init__(self, spec, dependents):
spec_fmt = spack.spec.DEFAULT_FORMAT + " /{hash:7}" super().__init__("Cannot uninstall %s" % spec)
dep_fmt = "{name}{@versions} /{hash:7}"
super().__init__(
f"Cannot uninstall {spec.format(spec_fmt)}, "
f"needed by {[dep.format(dep_fmt) for dep in dependents]}"
)
self.spec = spec self.spec = spec
self.dependents = dependents self.dependents = dependents

View File

@@ -10,7 +10,6 @@
import archspec.cpu import archspec.cpu
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.symlink import readlink
import spack.target import spack.target
import spack.version import spack.version
@@ -134,7 +133,7 @@ def craype_type_and_version(cls):
# Take the default version from known symlink path # Take the default version from known symlink path
default_path = os.path.join(craype_dir, "default") default_path = os.path.join(craype_dir, "default")
if os.path.islink(default_path): if os.path.islink(default_path):
version = spack.version.Version(readlink(default_path)) version = spack.version.Version(os.readlink(default_path))
return (craype_type, version) return (craype_type, version)
# If no default version, sort available versions and return latest # If no default version, sort available versions and return latest

View File

@@ -16,7 +16,7 @@
import llnl.util.lang import llnl.util.lang
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import memoized from llnl.util.lang import memoized
from llnl.util.symlink import readlink, symlink from llnl.util.symlink import symlink
import spack.paths import spack.paths
import spack.platforms import spack.platforms
@@ -25,7 +25,6 @@
import spack.store import spack.store
import spack.util.elf as elf import spack.util.elf as elf
import spack.util.executable as executable import spack.util.executable as executable
import spack.util.path
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
@@ -566,7 +565,7 @@ def make_link_relative(new_links, orig_links):
orig_links (list): original links orig_links (list): original links
""" """
for new_link, orig_link in zip(new_links, orig_links): for new_link, orig_link in zip(new_links, orig_links):
target = readlink(orig_link) target = os.readlink(orig_link)
relative_target = os.path.relpath(target, os.path.dirname(orig_link)) relative_target = os.path.relpath(target, os.path.dirname(orig_link))
os.unlink(new_link) os.unlink(new_link)
symlink(relative_target, new_link) symlink(relative_target, new_link)
@@ -614,7 +613,7 @@ def relocate_links(links, prefix_to_prefix):
"""Relocate links to a new install prefix.""" """Relocate links to a new install prefix."""
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys())) regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
for link in links: for link in links:
old_target = readlink(link) old_target = os.readlink(link)
match = regex.match(old_target) match = regex.match(old_target)
# No match. # No match.

View File

@@ -241,7 +241,7 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
Arguments: Arguments:
type (str): String containing one or more of 'A', 'R', 'C' type (str): String containing one or more of 'A', 'B', 'C'
rev1 (str): Revision to compare against, default is 'HEAD^' rev1 (str): Revision to compare against, default is 'HEAD^'
rev2 (str): Revision to compare to rev1, default is 'HEAD' rev2 (str): Revision to compare to rev1, default is 'HEAD'
@@ -264,7 +264,7 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
lines = [] if not out else re.split(r"\s+", out) lines = [] if not out else re.split(r"\s+", out)
changed = set() changed = set()
for path in lines: for path in lines:
pkg_name, _, _ = path.partition("/") pkg_name, _, _ = path.partition(os.sep)
if pkg_name not in added and pkg_name not in removed: if pkg_name not in added and pkg_name not in removed:
changed.add(pkg_name) changed.add(pkg_name)

View File

@@ -27,7 +27,7 @@
from spack.error import SpackError from spack.error import SpackError
from spack.util.crypto import checksum from spack.util.crypto import checksum
from spack.util.log_parse import parse_log_events from spack.util.log_parse import parse_log_events
from spack.util.web import ssl_create_default_context from spack.util.web import urllib_ssl_cert_handler
from .base import Reporter from .base import Reporter
from .extract import extract_test_parts from .extract import extract_test_parts
@@ -58,8 +58,7 @@
# Initialize data structures common to each phase's report. # Initialize data structures common to each phase's report.
CDASH_PHASES = set(MAP_PHASES_TO_CDASH.values()) CDASH_PHASES = set(MAP_PHASES_TO_CDASH.values())
CDASH_PHASES.add("update") CDASH_PHASES.add("update")
# CDash request timeout in seconds
SPACK_CDASH_TIMEOUT = 45
CDashConfiguration = collections.namedtuple( CDashConfiguration = collections.namedtuple(
"CDashConfiguration", ["upload_url", "packages", "build", "site", "buildstamp", "track"] "CDashConfiguration", ["upload_url", "packages", "build", "site", "buildstamp", "track"]
@@ -429,7 +428,7 @@ def upload(self, filename):
# Compute md5 checksum for the contents of this file. # Compute md5 checksum for the contents of this file.
md5sum = checksum(hashlib.md5, filename, block_size=8192) md5sum = checksum(hashlib.md5, filename, block_size=8192)
opener = build_opener(HTTPSHandler(context=ssl_create_default_context())) opener = build_opener(HTTPSHandler(context=urllib_ssl_cert_handler()))
with open(filename, "rb") as f: with open(filename, "rb") as f:
params_dict = { params_dict = {
"build": self.buildname, "build": self.buildname,
@@ -448,7 +447,7 @@ def upload(self, filename):
# By default, urllib2 only support GET and POST. # By default, urllib2 only support GET and POST.
# CDash expects this file to be uploaded via PUT. # CDash expects this file to be uploaded via PUT.
request.get_method = lambda: "PUT" request.get_method = lambda: "PUT"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT) response = opener.open(request)
if self.current_package_name not in self.buildIds: if self.current_package_name not in self.buildIds:
resp_value = response.read() resp_value = response.read()
if isinstance(resp_value, bytes): if isinstance(resp_value, bytes):

View File

@@ -9,7 +9,7 @@
import tempfile import tempfile
from collections import OrderedDict from collections import OrderedDict
from llnl.util.symlink import readlink, symlink from llnl.util.symlink import symlink
import spack.binary_distribution as bindist import spack.binary_distribution as bindist
import spack.error import spack.error
@@ -26,7 +26,7 @@ def _relocate_spliced_links(links, orig_prefix, new_prefix):
in our case. This still needs to be called after the copy to destination in our case. This still needs to be called after the copy to destination
because it expects the new directory structure to be in place.""" because it expects the new directory structure to be in place."""
for link in links: for link in links:
link_target = readlink(os.path.join(orig_prefix, link)) link_target = os.readlink(os.path.join(orig_prefix, link))
link_target = re.sub("^" + orig_prefix, new_prefix, link_target) link_target = re.sub("^" + orig_prefix, new_prefix, link_target)
new_link_path = os.path.join(new_prefix, link) new_link_path = os.path.join(new_prefix, link)
os.unlink(new_link_path) os.unlink(new_link_path)

Some files were not shown because too many files have changed in this diff Show More