Compare commits
2 Commits
packages/l
...
ci/find-bo
Author | SHA1 | Date | |
---|---|---|---|
![]() |
56ca5b253a | ||
![]() |
09c6243d45 |
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
|
5
.github/workflows/bootstrap-test.sh
vendored
5
.github/workflows/bootstrap-test.sh
vendored
@@ -1,8 +1,7 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
set -ex
|
||||
source share/spack/setup-env.sh
|
||||
$PYTHON bin/spack bootstrap disable github-actions-v0.4
|
||||
$PYTHON bin/spack bootstrap disable spack-install
|
||||
$PYTHON bin/spack $SPACK_FLAGS solve zlib
|
||||
$PYTHON bin/spack -d solve zlib
|
||||
tree $BOOTSTRAP/store
|
||||
exit 0
|
||||
|
352
.github/workflows/bootstrap.yml
vendored
352
.github/workflows/bootstrap.yml
vendored
@@ -13,22 +13,118 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
distros-clingo-sources:
|
||||
fedora-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: ${{ matrix.image }}
|
||||
strategy:
|
||||
matrix:
|
||||
image: ["fedora:latest", "opensuse/leap:latest"]
|
||||
container: "fedora:latest"
|
||||
steps:
|
||||
- name: Setup Fedora
|
||||
if: ${{ matrix.image == 'fedora:latest' }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Setup OpenSUSE
|
||||
if: ${{ matrix.image == 'opensuse/leap:latest' }}
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-clingo-binaries-and-patchelf:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
opensuse-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
|
||||
zypper update -y || zypper update -y
|
||||
@@ -37,9 +133,15 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -49,102 +151,77 @@ jobs:
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
clingo-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
macos-clingo-sources:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install cmake bison tree
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
gnupg-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
macos-clingo-binaries:
|
||||
runs-on: ${{ matrix.macos-version }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: [ 'macos-13', 'macos-14', "ubuntu-latest" ]
|
||||
macos-version: ['macos-11', 'macos-12']
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
from-binaries:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: |
|
||||
3.8
|
||||
3.9
|
||||
3.10
|
||||
3.11
|
||||
3.12
|
||||
- name: Set bootstrap sources
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -e
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
||||
set -ex
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
if $ver_dir/python --version ; then
|
||||
export PYTHON="$ver_dir/python"
|
||||
not_found=0
|
||||
old_path="$PATH"
|
||||
export PATH="$ver_dir:$PATH"
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
||||
export PATH="$old_path"
|
||||
fi
|
||||
fi
|
||||
# NOTE: test all pythons that exist, not all do on 12
|
||||
done
|
||||
|
||||
ubuntu-clingo-binaries:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
echo "Testing $ver_dir"
|
||||
if $ver_dir/python --version ; then
|
||||
export PYTHON="$ver_dir/python"
|
||||
not_found=0
|
||||
@@ -159,9 +236,122 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
ubuntu-gnupg-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-gnupg-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-gnupg-binaries:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-gnupg-sources:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install gawk tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
|
||||
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
|
||||
# See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
|
20
.github/workflows/build-containers.yml
vendored
20
.github/workflows/build-containers.yml
vendored
@@ -45,18 +45,19 @@ jobs:
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38'],
|
||||
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
||||
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
@@ -88,9 +89,9 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
with:
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
@@ -121,14 +122,3 @@ jobs:
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||
|
||||
merge-dockerfiles:
|
||||
runs-on: ubuntu-latest
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
delete-merged: true
|
||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
|
2
.github/workflows/style/requirements.txt
vendored
2
.github/workflows/style/requirements.txt
vendored
@@ -1,4 +1,4 @@
|
||||
black==24.4.2
|
||||
black==24.4.0
|
||||
clingo==5.7.1
|
||||
flake8==7.0.0
|
||||
isort==5.13.2
|
||||
|
20
.github/workflows/unit_tests.yaml
vendored
20
.github/workflows/unit_tests.yaml
vendored
@@ -51,7 +51,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -141,7 +141,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -160,7 +160,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -185,7 +185,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -195,10 +195,10 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-13, macos-14]
|
||||
os: [macos-latest, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -223,7 +223,7 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
10
.github/workflows/windows_python.yml
vendored
10
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -59,7 +59,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@44c2b7a8a4ea60a981eaca3cf939b5f4305c123b
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
|
45
CHANGELOG.md
45
CHANGELOG.md
@@ -1,48 +1,3 @@
|
||||
# v0.21.2 (2024-03-01)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Containerize: accommodate nested or pre-existing spack-env paths (#41558)
|
||||
- Fix setup-env script, when going back and forth between instances (#40924)
|
||||
- Fix using fully-qualified namespaces from root specs (#41957)
|
||||
- Fix a bug when a required provider is requested for multiple virtuals (#42088)
|
||||
- OCI buildcaches:
|
||||
- only push in parallel when forking (#42143)
|
||||
- use pickleable errors (#42160)
|
||||
- Fix using sticky variants in externals (#42253)
|
||||
- Fix a rare issue with conditional requirements and multi-valued variants (#42566)
|
||||
|
||||
## Package updates
|
||||
- rust: add v1.75, rework a few variants (#41161,#41903)
|
||||
- py-transformers: add v4.35.2 (#41266)
|
||||
- mgard: fix OpenMP on AppleClang (#42933)
|
||||
|
||||
# v0.21.1 (2024-01-11)
|
||||
|
||||
## New features
|
||||
- Add support for reading buildcaches created by Spack v0.22 (#41773)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- spack graph: fix coloring with environments (#41240)
|
||||
- spack info: sort variants in --variants-by-name (#41389)
|
||||
- Spec.format: error on old style format strings (#41934)
|
||||
- ASP-based solver:
|
||||
- fix infinite recursion when computing concretization errors (#41061)
|
||||
- don't error for type mismatch on preferences (#41138)
|
||||
- don't emit spurious debug output (#41218)
|
||||
- Improve the error message for deprecated preferences (#41075)
|
||||
- Fix MSVC preview version breaking clingo build on Windows (#41185)
|
||||
- Fix multi-word aliases (#41126)
|
||||
- Add a warning for unconfigured compiler (#41213)
|
||||
- environment: fix an issue with deconcretization/reconcretization of specs (#41294)
|
||||
- buildcache: don't error if a patch is missing, when installing from binaries (#41986)
|
||||
- Multiple improvements to unit-tests (#41215,#41369,#41495,#41359,#41361,#41345,#41342,#41308,#41226)
|
||||
|
||||
## Package updates
|
||||
- root: add a webgui patch to address security issue (#41404)
|
||||
- BerkeleyGW: update source urls (#38218)
|
||||
|
||||
# v0.21.0 (2023-11-11)
|
||||
|
||||
`v0.21.0` is a major feature release.
|
||||
|
@@ -1,19 +0,0 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This file controls default concretization preferences for Spack.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/packages.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
iconv: [glibc, musl, libiconv]
|
@@ -1,19 +0,0 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This file controls default concretization preferences for Spack.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/packages.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
iconv: [glibc, musl, libiconv]
|
@@ -18,7 +18,6 @@ packages:
|
||||
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
|
||||
providers:
|
||||
awk: [gawk]
|
||||
armci: [armcimpi]
|
||||
blas: [openblas, amdblis]
|
||||
D: [ldc]
|
||||
daal: [intel-oneapi-daal]
|
||||
|
12
lib/spack/docs/_templates/layout.html
vendored
12
lib/spack/docs/_templates/layout.html
vendored
@@ -1,12 +0,0 @@
|
||||
{% extends "!layout.html" %}
|
||||
|
||||
{%- block extrahead %}
|
||||
<!-- Google tag (gtag.js) -->
|
||||
<script async src="https://www.googletagmanager.com/gtag/js?id=G-S0PQ7WV75K"></script>
|
||||
<script>
|
||||
window.dataLayer = window.dataLayer || [];
|
||||
function gtag(){dataLayer.push(arguments);}
|
||||
gtag('js', new Date());
|
||||
gtag('config', 'G-S0PQ7WV75K');
|
||||
</script>
|
||||
{% endblock %}
|
@@ -865,7 +865,7 @@ There are several different ways to use Spack packages once you have
|
||||
installed them. As you've seen, spack packages are installed into long
|
||||
paths with hashes, and you need a way to get them into your path. The
|
||||
easiest way is to use :ref:`spack load <cmd-spack-load>`, which is
|
||||
described in this section.
|
||||
described in the next section.
|
||||
|
||||
Some more advanced ways to use Spack packages include:
|
||||
|
||||
@@ -959,86 +959,7 @@ use ``spack find --loaded``.
|
||||
You can also use ``spack load --list`` to get the same output, but it
|
||||
does not have the full set of query options that ``spack find`` offers.
|
||||
|
||||
We'll learn more about Spack's spec syntax in :ref:`a later section <sec-specs>`.
|
||||
|
||||
|
||||
.. _extensions:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Python packages and virtual environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack can install a large number of Python packages. Their names are
|
||||
typically prefixed with ``py-``. Installing and using them is no
|
||||
different from any other package:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install py-numpy
|
||||
$ spack load py-numpy
|
||||
$ python3
|
||||
>>> import numpy
|
||||
|
||||
The ``spack load`` command sets the ``PATH`` variable so that the right Python
|
||||
executable is used, and makes sure that ``numpy`` and its dependencies can be
|
||||
located in the ``PYTHONPATH``.
|
||||
|
||||
Spack is different from other Python package managers in that it installs
|
||||
every package into its *own* prefix. This is in contrast to ``pip``, which
|
||||
installs all packages into the same prefix, be it in a virtual environment
|
||||
or not.
|
||||
|
||||
For many users, **virtual environments** are more convenient than repeated
|
||||
``spack load`` commands, particularly when working with multiple Python
|
||||
packages. Fortunately Spack supports environments itself, which together
|
||||
with a view are no different from Python virtual environments.
|
||||
|
||||
The recommended way of working with Python extensions such as ``py-numpy``
|
||||
is through :ref:`Environments <environments>`. The following example creates
|
||||
a Spack environment with ``numpy`` in the current working directory. It also
|
||||
puts a filesystem view in ``./view``, which is a more traditional combined
|
||||
prefix for all packages in the environment.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create --with-view view --dir .
|
||||
$ spack -e . add py-numpy
|
||||
$ spack -e . concretize
|
||||
$ spack -e . install
|
||||
|
||||
Now you can activate the environment and start using the packages:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate .
|
||||
$ python3
|
||||
>>> import numpy
|
||||
|
||||
The environment view is also a virtual environment, which is useful if you are
|
||||
sharing the environment with others who are unfamiliar with Spack. They can
|
||||
either use the Python executable directly:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ./view/bin/python3
|
||||
>>> import numpy
|
||||
|
||||
or use the activation script:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ source ./view/bin/activate
|
||||
$ python3
|
||||
>>> import numpy
|
||||
|
||||
In general, there should not be much difference between ``spack env activate``
|
||||
and using the virtual environment. The main advantage of ``spack env activate``
|
||||
is that it knows about more packages than just Python packages, and it may set
|
||||
additional runtime variables that are not covered by the virtual environment
|
||||
activation script.
|
||||
|
||||
See :ref:`environments` for a more in-depth description of Spack
|
||||
environments and customizations to views.
|
||||
We'll learn more about Spack's spec syntax in the next section.
|
||||
|
||||
|
||||
.. _sec-specs:
|
||||
@@ -1784,6 +1705,165 @@ check only local packages (as opposed to those used transparently from
|
||||
``upstream`` spack instances) and the ``-j,--json`` option to output
|
||||
machine-readable json data for any errors.
|
||||
|
||||
|
||||
.. _extensions:
|
||||
|
||||
---------------------------
|
||||
Extensions & Python support
|
||||
---------------------------
|
||||
|
||||
Spack's installation model assumes that each package will live in its
|
||||
own install prefix. However, certain packages are typically installed
|
||||
*within* the directory hierarchy of other packages. For example,
|
||||
`Python <https://www.python.org>`_ packages are typically installed in the
|
||||
``$prefix/lib/python-2.7/site-packages`` directory.
|
||||
|
||||
In Spack, installation prefixes are immutable, so this type of installation
|
||||
is not directly supported. However, it is possible to create views that
|
||||
allow you to merge install prefixes of multiple packages into a single new prefix.
|
||||
Views are a convenient way to get a more traditional filesystem structure.
|
||||
Using *extensions*, you can ensure that Python packages always share the
|
||||
same prefix in the view as Python itself. Suppose you have
|
||||
Python installed like so:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find python
|
||||
==> 1 installed packages.
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
python@2.7.8
|
||||
|
||||
.. _cmd-spack-extensions:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
``spack extensions``
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can find extensions for your Python installation like this:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack extensions python
|
||||
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
|
||||
==> 36 extensions:
|
||||
geos py-ipython py-pexpect py-pyside py-sip
|
||||
py-basemap py-libxml2 py-pil py-pytz py-six
|
||||
py-biopython py-mako py-pmw py-rpy2 py-sympy
|
||||
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
|
||||
py-dateutil py-mpi4py py-pygments py-scikit-learn
|
||||
py-epydoc py-mx py-pylint py-scipy
|
||||
py-gnuplot py-nose py-pyparsing py-setuptools
|
||||
py-h5py py-numpy py-pyqt py-shiboken
|
||||
|
||||
==> 12 installed:
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
|
||||
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
|
||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
||||
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
||||
|
||||
The extensions are a subset of what's returned by ``spack list``, and
|
||||
they are packages like any other. They are installed into their own
|
||||
prefixes, and you can see this with ``spack find --paths``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find --paths py-numpy
|
||||
==> 1 installed packages.
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-numpy@1.9.1 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/py-numpy@1.9.1-66733244
|
||||
|
||||
However, even though this package is installed, you cannot use it
|
||||
directly when you run ``python``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load python
|
||||
$ python
|
||||
Python 2.7.8 (default, Feb 17 2015, 01:35:25)
|
||||
[GCC 4.4.7 20120313 (Red Hat 4.4.7-11)] on linux2
|
||||
Type "help", "copyright", "credits" or "license" for more information.
|
||||
>>> import numpy
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
ImportError: No module named numpy
|
||||
>>>
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Extensions in Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The recommended way of working with extensions such as ``py-numpy``
|
||||
above is through :ref:`Environments <environments>`. For example,
|
||||
the following creates an environment in the current working directory
|
||||
with a filesystem view in the ``./view`` directory:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create --with-view view --dir .
|
||||
$ spack -e . add py-numpy
|
||||
$ spack -e . concretize
|
||||
$ spack -e . install
|
||||
|
||||
We recommend environments for two reasons. Firstly, environments
|
||||
can be activated (requires :ref:`shell-support`):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate .
|
||||
|
||||
which sets all the right environment variables such as ``PATH`` and
|
||||
``PYTHONPATH``. This ensures that
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python
|
||||
>>> import numpy
|
||||
|
||||
works. Secondly, even without shell support, the view ensures
|
||||
that Python can locate its extensions:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ./view/bin/python
|
||||
>>> import numpy
|
||||
|
||||
See :ref:`environments` for a more in-depth description of Spack
|
||||
environments and customizations to views.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Using ``spack load``
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A more traditional way of using Spack and extensions is ``spack load``
|
||||
(requires :ref:`shell-support`). This will add the extension to ``PYTHONPATH``
|
||||
in your current shell, and Python itself will be available in the ``PATH``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load py-numpy
|
||||
$ python
|
||||
>>> import numpy
|
||||
|
||||
The loaded packages can be checked using ``spack find --loaded``
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Loading Extensions via Modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Apart from ``spack env activate`` and ``spack load``, you can load numpy
|
||||
through your environment modules (using ``environment-modules`` or
|
||||
``lmod``). This will also add the extension to the ``PYTHONPATH`` in
|
||||
your current shell.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ module load <name of numpy module>
|
||||
|
||||
If you do not know the name of the specific numpy module you wish to
|
||||
load, you can use the ``spack module tcl|lmod loads`` command to get
|
||||
the name of the module from the Spack spec.
|
||||
|
||||
-----------------------
|
||||
Filesystem requirements
|
||||
-----------------------
|
||||
|
@@ -21,86 +21,23 @@ is the following:
|
||||
Reuse already installed packages
|
||||
--------------------------------
|
||||
|
||||
The ``reuse`` attribute controls how aggressively Spack reuses binary packages during concretization. The
|
||||
attribute can either be a single value, or an object for more complex configurations.
|
||||
|
||||
In the former case ("single value") it allows Spack to:
|
||||
|
||||
1. Reuse installed packages and buildcaches for all the specs to be concretized, when ``true``
|
||||
2. Reuse installed packages and buildcaches only for the dependencies of the root specs, when ``dependencies``
|
||||
3. Disregard reusing installed packages and buildcaches, when ``false``
|
||||
|
||||
In case a finer control over which specs are reused is needed, then the value of this attribute can be
|
||||
an object, with the following keys:
|
||||
|
||||
1. ``roots``: if ``true`` root specs are reused, if ``false`` only dependencies of root specs are reused
|
||||
2. ``from``: list of sources from which reused specs are taken
|
||||
|
||||
Each source in ``from`` is itself an object:
|
||||
|
||||
.. list-table:: Attributes for a source or reusable specs
|
||||
:header-rows: 1
|
||||
|
||||
* - Attribute name
|
||||
- Description
|
||||
* - type (mandatory, string)
|
||||
- Can be ``local``, ``buildcache``, or ``external``
|
||||
* - include (optional, list of specs)
|
||||
- If present, reusable specs must match at least one of the constraint in the list
|
||||
* - exclude (optional, list of specs)
|
||||
- If present, reusable specs must not match any of the constraint in the list.
|
||||
|
||||
For instance, the following configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
from:
|
||||
- type: local
|
||||
include:
|
||||
- "%gcc"
|
||||
- "%clang"
|
||||
|
||||
tells the concretizer to reuse all specs compiled with either ``gcc`` or ``clang``, that are installed
|
||||
in the local store. Any spec from remote buildcaches is disregarded.
|
||||
|
||||
To reduce the boilerplate in configuration files, default values for the ``include`` and
|
||||
``exclude`` options can be pushed up one level:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
include:
|
||||
- "%gcc"
|
||||
from:
|
||||
- type: local
|
||||
- type: buildcache
|
||||
- type: local
|
||||
include:
|
||||
- "foo %oneapi"
|
||||
|
||||
In the example above we reuse all specs compiled with ``gcc`` from the local store
|
||||
and remote buildcaches, and we also reuse ``foo %oneapi``. Note that the last source of
|
||||
specs override the default ``include`` attribute.
|
||||
|
||||
For one-off concretizations, the are command line arguments for each of the simple "single value"
|
||||
configurations. This means a user can:
|
||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``).
|
||||
You can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack install --reuse <spec>
|
||||
|
||||
to enable reuse for a single installation, or:
|
||||
to enable reuse for a single installation, and you can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install --fresh <spec>
|
||||
|
||||
to do a fresh install if ``reuse`` is enabled by default.
|
||||
``reuse: dependencies`` is the default.
|
||||
|
||||
.. seealso::
|
||||
|
||||
|
@@ -718,45 +718,23 @@ command-line tool, or C/C++/Fortran program with optional Python
|
||||
modules? The former should be prepended with ``py-``, while the
|
||||
latter should not.
|
||||
|
||||
""""""""""""""""""""""""""""""
|
||||
``extends`` vs. ``depends_on``
|
||||
""""""""""""""""""""""""""""""
|
||||
""""""""""""""""""""""
|
||||
extends vs. depends_on
|
||||
""""""""""""""""""""""
|
||||
|
||||
This is very similar to the naming dilemma above, with a slight twist.
|
||||
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
||||
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
||||
that the extension and extendee share the same prefix in views.
|
||||
This allows the user to import a Python module without
|
||||
having to add that module to ``PYTHONPATH``.
|
||||
|
||||
Additionally, ``extends("python")`` adds a dependency on the package
|
||||
``python-venv``. This improves isolation from the system, whether
|
||||
it's during the build or at runtime: user and system site packages
|
||||
cannot accidentally be used by any package that ``extends("python")``.
|
||||
|
||||
As a rule of thumb: if a package does not install any Python modules
|
||||
of its own, and merely puts a Python script in the ``bin`` directory,
|
||||
then there is no need for ``extends``. If the package installs modules
|
||||
in the ``site-packages`` directory, it requires ``extends``.
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
Executing ``python`` during the build
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Whenever you need to execute a Python command or pass the path of the
|
||||
Python interpreter to the build system, it is best to use the global
|
||||
variable ``python`` directly. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@run_before("install")
|
||||
def recythonize(self):
|
||||
python("setup.py", "clean") # use the `python` global
|
||||
|
||||
As mentioned in the previous section, ``extends("python")`` adds an
|
||||
automatic dependency on ``python-venv``, which is a virtual environment
|
||||
that guarantees build isolation. The ``python`` global always refers to
|
||||
the correct Python interpreter, whether the package uses ``extends("python")``
|
||||
or ``depends_on("python")``.
|
||||
When deciding between ``extends`` and ``depends_on``, the best rule of
|
||||
thumb is to check the installation prefix. If Python libraries are
|
||||
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
|
||||
should use ``extends``. If Python libraries are installed elsewhere
|
||||
or the only files that get installed reside in ``<prefix>/bin``, then
|
||||
don't use ``extends``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Alternatives to Spack
|
||||
|
@@ -150,7 +150,7 @@ this can expose you to attacks. Use at your own risk.
|
||||
--------------------
|
||||
|
||||
Path to custom certificats for SSL verification. The value can be a
|
||||
filesytem path, or an environment variable that expands to an absolute file path.
|
||||
filesytem path, or an environment variable that expands to a file path.
|
||||
The default value is set to the environment variable ``SSL_CERT_FILE``
|
||||
to use the same syntax used by many other applications that automatically
|
||||
detect custom certificates.
|
||||
@@ -160,9 +160,6 @@ in the subprocess calling ``curl``.
|
||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
||||
will work.
|
||||
In all cases the expanded path must be absolute for Spack to use the certificates.
|
||||
Certificates relative to an environment can be created by prepending the path variable
|
||||
with the Spack configuration variable``$env``.
|
||||
|
||||
--------------------
|
||||
``checksum``
|
||||
|
@@ -194,15 +194,15 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Operating System
|
||||
- Base Image
|
||||
- Spack Image
|
||||
* - Ubuntu 18.04
|
||||
- ``ubuntu:18.04``
|
||||
- ``spack/ubuntu-bionic``
|
||||
* - Ubuntu 20.04
|
||||
- ``ubuntu:20.04``
|
||||
- ``spack/ubuntu-focal``
|
||||
* - Ubuntu 22.04
|
||||
- ``ubuntu:22.04``
|
||||
- ``spack/ubuntu-jammy``
|
||||
* - Ubuntu 24.04
|
||||
- ``ubuntu:24.04``
|
||||
- ``spack/ubuntu-noble``
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
@@ -227,6 +227,12 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Rocky Linux 9
|
||||
- ``rockylinux:9``
|
||||
- ``spack/rockylinux9``
|
||||
* - Fedora Linux 37
|
||||
- ``fedora:37``
|
||||
- ``spack/fedora37``
|
||||
* - Fedora Linux 38
|
||||
- ``fedora:38``
|
||||
- ``spack/fedora38``
|
||||
* - Fedora Linux 39
|
||||
- ``fedora:39``
|
||||
- ``spack/fedora39``
|
||||
|
@@ -142,8 +142,12 @@ user's prompt to begin with the environment name in brackets.
|
||||
$ spack env activate -p myenv
|
||||
[myenv] $ ...
|
||||
|
||||
The ``activate`` command can also be used to create a new environment if it does not already
|
||||
exist.
|
||||
The ``activate`` command can also be used to create a new environment, if it is
|
||||
not already defined, by adding the ``--create`` flag. Managed and anonymous
|
||||
environments, anonymous environments are explained in the next section,
|
||||
can both be created using the same flags that `spack env create` accepts.
|
||||
If an environment already exists then spack will simply activate it and ignore the
|
||||
create specific flags.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -172,36 +176,21 @@ environment will remove the view from the user environment.
|
||||
Anonymous Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Apart from managed environments, Spack also supports anonymous environments.
|
||||
|
||||
Anonymous environments can be placed in any directory of choice.
|
||||
|
||||
.. note::
|
||||
|
||||
When uninstalling packages, Spack asks the user to confirm the removal of packages
|
||||
that are still used in a managed environment. This is not the case for anonymous
|
||||
environments.
|
||||
|
||||
To create an anonymous environment, use one of the following commands:
|
||||
Any directory can be treated as an environment if it contains a file
|
||||
``spack.yaml``. To load an anonymous environment, use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create --dir my_env
|
||||
$ spack env create ./my_env
|
||||
$ spack env activate -d /path/to/directory
|
||||
|
||||
As a shorthand, you can also create an anonymous environment upon activation if it does not
|
||||
already exist:
|
||||
Anonymous specs can be created in place using the command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate --create ./my_env
|
||||
|
||||
For convenience, Spack can also place an anonymous environment in a temporary directory for you:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate --temp
|
||||
$ spack env create -d .
|
||||
|
||||
In this case Spack simply creates a ``spack.yaml`` file in the requested
|
||||
directory.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Environment Sensitive Commands
|
||||
@@ -460,125 +449,6 @@ Sourcing that file in Bash will make the environment available to the
|
||||
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
||||
file may also be copied out of the environment, renamed, etc.
|
||||
|
||||
|
||||
.. _environment_include_concrete:
|
||||
|
||||
------------------------------
|
||||
Included Concrete Environments
|
||||
------------------------------
|
||||
|
||||
Spack environments can create an environment based off of information in already
|
||||
established environments. You can think of it as a combination of existing
|
||||
environments. It will gather information from the existing environment's
|
||||
``spack.lock`` and use that during the creation of this included concrete
|
||||
environment. When an included concrete environment is created it will generate
|
||||
a ``spack.lock`` file for the newly created environment.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Creating included environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
To create a combined concrete environment, you must have at least one existing
|
||||
concrete environment. You will use the command ``spack env create`` with the
|
||||
argument ``--include-concrete`` followed by the name or path of the environment
|
||||
you'd like to include. Here is an example of how to create a combined environment
|
||||
from the command line.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv
|
||||
$ spack -e myenv add python
|
||||
$ spack -e myenv concretize
|
||||
$ spack env create --include-concrete myenv included_env
|
||||
|
||||
|
||||
You can also include an environment directly in the ``spack.yaml`` file. It
|
||||
involves adding the ``include_concrete`` heading in the yaml followed by the
|
||||
absolute path to the independent environments.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs: []
|
||||
concretizer:
|
||||
unify: true
|
||||
include_concrete:
|
||||
- /absolute/path/to/environment1
|
||||
- /absolute/path/to/environment2
|
||||
|
||||
|
||||
Once the ``spack.yaml`` has been updated you must concretize the environment to
|
||||
get the concrete specs from the included environments.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Updating an included environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
If changes were made to the base environment and you want that reflected in the
|
||||
included environment you will need to reconcretize both the base environment and the
|
||||
included environment for the change to be implemented. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv
|
||||
$ spack -e myenv add python
|
||||
$ spack -e myenv concretize
|
||||
$ spack env create --include-concrete myenv included_env
|
||||
|
||||
|
||||
$ spack -e myenv find
|
||||
==> In environment myenv
|
||||
==> Root specs
|
||||
python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
|
||||
$ spack -e included_env find
|
||||
==> In environment included_env
|
||||
==> No root specs
|
||||
==> Included specs
|
||||
python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
Here we see that ``included_env`` has access to the python package through
|
||||
the ``myenv`` environment. But if we were to add another spec to ``myenv``,
|
||||
``included_env`` will not be able to access the new information.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv add perl
|
||||
$ spack -e myenv concretize
|
||||
$ spack -e myenv find
|
||||
==> In environment myenv
|
||||
==> Root specs
|
||||
perl python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
|
||||
$ spack -e included_env find
|
||||
==> In environment included_env
|
||||
==> No root specs
|
||||
==> Included specs
|
||||
python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
It isn't until you run the ``spack concretize`` command that the combined
|
||||
environment will get the updated information from the reconcretized base environmennt.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e included_env concretize
|
||||
$ spack -e included_env find
|
||||
==> In environment included_env
|
||||
==> No root specs
|
||||
==> Included specs
|
||||
perl python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
.. _environment-configuration:
|
||||
|
||||
------------------------
|
||||
@@ -930,7 +800,6 @@ For example, the following environment has three root packages:
|
||||
This allows for a much-needed reduction in redundancy between packages
|
||||
and constraints.
|
||||
|
||||
|
||||
----------------
|
||||
Filesystem Views
|
||||
----------------
|
||||
@@ -1164,7 +1033,7 @@ other targets to depend on the environment installation.
|
||||
|
||||
A typical workflow is as follows:
|
||||
|
||||
.. code-block:: console
|
||||
.. code:: console
|
||||
|
||||
spack env create -d .
|
||||
spack -e . add perl
|
||||
@@ -1257,7 +1126,7 @@ its dependencies. This can be useful when certain flags should only apply to
|
||||
dependencies. Below we show a use case where a spec is installed with verbose
|
||||
output (``spack install --verbose``) while its dependencies are installed silently:
|
||||
|
||||
.. code-block:: console
|
||||
.. code:: console
|
||||
|
||||
$ spack env depfile -o Makefile
|
||||
|
||||
@@ -1279,7 +1148,7 @@ This can be accomplished through the generated ``[<prefix>/]SPACK_PACKAGE_IDS``
|
||||
variable. Assuming we have an active and concrete environment, we generate the
|
||||
associated ``Makefile`` with a prefix ``example``:
|
||||
|
||||
.. code-block:: console
|
||||
.. code:: console
|
||||
|
||||
$ spack env depfile -o env.mk --make-prefix example
|
||||
|
||||
|
@@ -478,13 +478,6 @@ prefix, you can add them to the ``extra_attributes`` field. Similarly,
|
||||
all other fields from the compilers config can be added to the
|
||||
``extra_attributes`` field for an external representing a compiler.
|
||||
|
||||
Note that the format for the ``paths`` field in the
|
||||
``extra_attributes`` section is different than in the ``compilers``
|
||||
config. For compilers configured as external packages, the section is
|
||||
named ``compilers`` and the dictionary maps language names (``c``,
|
||||
``cxx``, ``fortran``) to paths, rather than using the names ``cc``,
|
||||
``fc``, and ``f77``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
@@ -500,10 +493,11 @@ named ``compilers`` and the dictionary maps language names (``c``,
|
||||
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
compilers:
|
||||
c: /usr/bin/clang-with-suffix
|
||||
paths:
|
||||
cc: /usr/bin/clang-with-suffix
|
||||
cxx: /usr/bin/clang++-with-extra-info
|
||||
fortran: /usr/bin/gfortran
|
||||
fc: /usr/bin/gfortran
|
||||
f77: /usr/bin/gfortran
|
||||
extra_rpaths:
|
||||
- /usr/lib/llvm/
|
||||
|
||||
@@ -1578,8 +1572,6 @@ Microsoft Visual Studio
|
||||
"""""""""""""""""""""""
|
||||
|
||||
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
|
||||
Spack additionally requires that the Windows SDK (including WGL) to be installed as part of your
|
||||
visual studio installation as it is required to build many packages from source.
|
||||
|
||||
We require several specific components to be included in the Visual Studio installation.
|
||||
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
|
||||
@@ -1587,7 +1579,6 @@ depending on installation type (Professional, Build Tools, etc.) The other requ
|
||||
"C++ CMake tools for Windows," which can be selected from among the optional packages.
|
||||
This provides CMake and Ninja for use during Spack configuration.
|
||||
|
||||
|
||||
If you already have Visual Studio installed, you can make sure these components are installed by
|
||||
rerunning the installer. Next to your installation, select "Modify" and look at the
|
||||
"Installation details" pane on the right.
|
||||
|
@@ -4,10 +4,10 @@ sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
pygments==2.18.0
|
||||
pygments==2.17.2
|
||||
urllib3==2.2.1
|
||||
pytest==8.2.0
|
||||
pytest==8.1.1
|
||||
isort==5.13.2
|
||||
black==24.4.2
|
||||
black==24.4.0
|
||||
flake8==7.0.0
|
||||
mypy==1.10.0
|
||||
mypy==1.9.0
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
|
||||
* Version: 0.2.3 (commit 7b8fe60b69e2861e7dac104bc1c183decfcd3daf)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
2
lib/spack/external/archspec/__init__.py
vendored
2
lib/spack/external/archspec/__init__.py
vendored
@@ -1,3 +1,3 @@
|
||||
"""Init file to avoid namespace packages"""
|
||||
|
||||
__version__ = "0.2.4"
|
||||
__version__ = "0.2.3"
|
||||
|
9
lib/spack/external/archspec/cpu/__init__.py
vendored
9
lib/spack/external/archspec/cpu/__init__.py
vendored
@@ -5,10 +5,9 @@
|
||||
"""The "cpu" package permits to query and compare different
|
||||
CPU microarchitectures.
|
||||
"""
|
||||
from .detect import brand_string, host
|
||||
from .detect import host
|
||||
from .microarchitecture import (
|
||||
TARGETS,
|
||||
InvalidCompilerVersion,
|
||||
Microarchitecture,
|
||||
UnsupportedMicroarchitecture,
|
||||
generic_microarchitecture,
|
||||
@@ -16,12 +15,10 @@
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"brand_string",
|
||||
"host",
|
||||
"TARGETS",
|
||||
"InvalidCompilerVersion",
|
||||
"Microarchitecture",
|
||||
"UnsupportedMicroarchitecture",
|
||||
"TARGETS",
|
||||
"generic_microarchitecture",
|
||||
"host",
|
||||
"version_components",
|
||||
]
|
||||
|
42
lib/spack/external/archspec/cpu/detect.py
vendored
42
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -155,31 +155,6 @@ def _is_bit_set(self, register: int, bit: int) -> bool:
|
||||
mask = 1 << bit
|
||||
return register & mask > 0
|
||||
|
||||
def brand_string(self) -> Optional[str]:
|
||||
"""Returns the brand string, if available."""
|
||||
if self.highest_extension_support < 0x80000004:
|
||||
return None
|
||||
|
||||
r1 = self.cpuid.registers_for(eax=0x80000002, ecx=0)
|
||||
r2 = self.cpuid.registers_for(eax=0x80000003, ecx=0)
|
||||
r3 = self.cpuid.registers_for(eax=0x80000004, ecx=0)
|
||||
result = struct.pack(
|
||||
"IIIIIIIIIIII",
|
||||
r1.eax,
|
||||
r1.ebx,
|
||||
r1.ecx,
|
||||
r1.edx,
|
||||
r2.eax,
|
||||
r2.ebx,
|
||||
r2.ecx,
|
||||
r2.edx,
|
||||
r3.eax,
|
||||
r3.ebx,
|
||||
r3.ecx,
|
||||
r3.edx,
|
||||
).decode("utf-8")
|
||||
return result.strip("\x00")
|
||||
|
||||
|
||||
@detection(operating_system="Windows")
|
||||
def cpuid_info():
|
||||
@@ -199,8 +174,8 @@ def _check_output(args, env):
|
||||
|
||||
|
||||
WINDOWS_MAPPING = {
|
||||
"AMD64": X86_64,
|
||||
"ARM64": AARCH64,
|
||||
"AMD64": "x86_64",
|
||||
"ARM64": "aarch64",
|
||||
}
|
||||
|
||||
|
||||
@@ -434,16 +409,3 @@ def compatibility_check_for_riscv64(info, target):
|
||||
return (target == arch_root or arch_root in target.ancestors) and (
|
||||
target.name == info.name or target.vendor == "generic"
|
||||
)
|
||||
|
||||
|
||||
def brand_string() -> Optional[str]:
|
||||
"""Returns the brand string of the host, if detected, or None."""
|
||||
if platform.system() == "Darwin":
|
||||
return _check_output(
|
||||
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
|
||||
).strip()
|
||||
|
||||
if host().family == X86_64:
|
||||
return CpuidInfoCollector().brand_string()
|
||||
|
||||
return None
|
||||
|
@@ -208,8 +208,6 @@ def optimization_flags(self, compiler, version):
|
||||
"""Returns a string containing the optimization flags that needs
|
||||
to be used to produce code optimized for this micro-architecture.
|
||||
|
||||
The version is expected to be a string of dot separated digits.
|
||||
|
||||
If there is no information on the compiler passed as argument the
|
||||
function returns an empty string. If it is known that the compiler
|
||||
version we want to use does not support this architecture the function
|
||||
@@ -218,11 +216,6 @@ def optimization_flags(self, compiler, version):
|
||||
Args:
|
||||
compiler (str): name of the compiler to be used
|
||||
version (str): version of the compiler to be used
|
||||
|
||||
Raises:
|
||||
UnsupportedMicroarchitecture: if the requested compiler does not support
|
||||
this micro-architecture.
|
||||
ValueError: if the version doesn't match the expected format
|
||||
"""
|
||||
# If we don't have information on compiler at all return an empty string
|
||||
if compiler not in self.family.compilers:
|
||||
@@ -239,14 +232,6 @@ def optimization_flags(self, compiler, version):
|
||||
msg = msg.format(compiler, best_target, best_target.family)
|
||||
raise UnsupportedMicroarchitecture(msg)
|
||||
|
||||
# Check that the version matches the expected format
|
||||
if not re.match(r"^(?:\d+\.)*\d+$", version):
|
||||
msg = (
|
||||
"invalid format for the compiler version argument. "
|
||||
"Only dot separated digits are allowed."
|
||||
)
|
||||
raise InvalidCompilerVersion(msg)
|
||||
|
||||
# If we have information on this compiler we need to check the
|
||||
# version being used
|
||||
compiler_info = self.compilers[compiler]
|
||||
@@ -307,7 +292,7 @@ def generic_microarchitecture(name):
|
||||
Args:
|
||||
name (str): name of the micro-architecture
|
||||
"""
|
||||
return Microarchitecture(name, parents=[], vendor="generic", features=set(), compilers={})
|
||||
return Microarchitecture(name, parents=[], vendor="generic", features=[], compilers={})
|
||||
|
||||
|
||||
def version_components(version):
|
||||
@@ -382,15 +367,7 @@ def fill_target_from_dict(name, data, targets):
|
||||
TARGETS = LazyDictionary(_known_microarchitectures)
|
||||
|
||||
|
||||
class ArchspecError(Exception):
|
||||
"""Base class for errors within archspec"""
|
||||
|
||||
|
||||
class UnsupportedMicroarchitecture(ArchspecError, ValueError):
|
||||
class UnsupportedMicroarchitecture(ValueError):
|
||||
"""Raised if a compiler version does not support optimization for a given
|
||||
micro-architecture.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidCompilerVersion(ArchspecError, ValueError):
|
||||
"""Raised when an invalid format is used for compiler versions in archspec."""
|
||||
|
@@ -2937,6 +2937,8 @@
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
@@ -3064,6 +3066,8 @@
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"sveaes",
|
||||
@@ -3077,7 +3081,8 @@
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16",
|
||||
"dgh"
|
||||
"dgh",
|
||||
"bti"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
|
@@ -98,10 +98,3 @@ def path_filter_caller(*args, **kwargs):
|
||||
if _func:
|
||||
return holder_func(_func)
|
||||
return holder_func
|
||||
|
||||
|
||||
def sanitize_win_longpath(path: str) -> str:
|
||||
"""Strip Windows extended path prefix from strings
|
||||
Returns sanitized string.
|
||||
no-op if extended path prefix is not present"""
|
||||
return path.lstrip("\\\\?\\")
|
||||
|
@@ -187,18 +187,12 @@ def polite_filename(filename: str) -> str:
|
||||
return _polite_antipattern().sub("_", filename)
|
||||
|
||||
|
||||
def getuid() -> Union[str, int]:
|
||||
"""Returns os getuid on non Windows
|
||||
On Windows returns 0 for admin users, login string otherwise
|
||||
This is in line with behavior from get_owner_uid which
|
||||
always returns the login string on Windows
|
||||
"""
|
||||
def getuid():
|
||||
if sys.platform == "win32":
|
||||
import ctypes
|
||||
|
||||
# If not admin, use the string name of the login as a unique ID
|
||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||
return os.getlogin()
|
||||
return 1
|
||||
return 0
|
||||
else:
|
||||
return os.getuid()
|
||||
@@ -219,15 +213,6 @@ def _win_rename(src, dst):
|
||||
os.replace(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def msdos_escape_parens(path):
|
||||
"""MS-DOS interprets parens as grouping parameters even in a quoted string"""
|
||||
if sys.platform == "win32":
|
||||
return path.replace("(", "^(").replace(")", "^)")
|
||||
else:
|
||||
return path
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
@@ -568,13 +553,7 @@ def exploding_archive_handler(tarball_container, stage):
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def get_owner_uid(path, err_msg=None) -> Union[str, int]:
|
||||
"""Returns owner UID of path destination
|
||||
On non Windows this is the value of st_uid
|
||||
On Windows this is the login string associated with the
|
||||
owning user.
|
||||
|
||||
"""
|
||||
def get_owner_uid(path, err_msg=None):
|
||||
if not os.path.exists(path):
|
||||
mkdirp(path, mode=stat.S_IRWXU)
|
||||
|
||||
@@ -2450,10 +2429,9 @@ def add_library_dependent(self, *dest):
|
||||
"""
|
||||
for pth in dest:
|
||||
if os.path.isfile(pth):
|
||||
new_pth = pathlib.Path(pth).parent
|
||||
self._additional_library_dependents.add(pathlib.Path(pth).parent)
|
||||
else:
|
||||
new_pth = pathlib.Path(pth)
|
||||
self._additional_library_dependents.add(new_pth)
|
||||
self._additional_library_dependents.add(pathlib.Path(pth))
|
||||
|
||||
@property
|
||||
def rpaths(self):
|
||||
|
@@ -11,7 +11,7 @@
|
||||
|
||||
from llnl.util import lang, tty
|
||||
|
||||
from ..path import sanitize_win_longpath, system_path_filter
|
||||
from ..path import system_path_filter
|
||||
|
||||
if sys.platform == "win32":
|
||||
from win32file import CreateHardLink
|
||||
@@ -247,9 +247,9 @@ def _windows_create_junction(source: str, link: str):
|
||||
out, err = proc.communicate()
|
||||
tty.debug(out.decode())
|
||||
if proc.returncode != 0:
|
||||
err_str = err.decode()
|
||||
tty.error(err_str)
|
||||
raise SymlinkError("Make junction command returned a non-zero return code.", err_str)
|
||||
err = err.decode()
|
||||
tty.error(err)
|
||||
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
||||
|
||||
|
||||
def _windows_create_hard_link(path: str, link: str):
|
||||
@@ -269,14 +269,14 @@ def _windows_create_hard_link(path: str, link: str):
|
||||
CreateHardLink(link, path)
|
||||
|
||||
|
||||
def readlink(path: str, *, dir_fd=None):
|
||||
def readlink(path: str):
|
||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||
if _windows_is_hardlink(path):
|
||||
return _windows_read_hard_link(path)
|
||||
elif _windows_is_junction(path):
|
||||
return _windows_read_junction(path)
|
||||
else:
|
||||
return sanitize_win_longpath(os.readlink(path, dir_fd=dir_fd))
|
||||
return os.readlink(path)
|
||||
|
||||
|
||||
def _windows_read_hard_link(link: str) -> str:
|
||||
|
@@ -59,7 +59,6 @@
|
||||
|
||||
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
@@ -102,29 +101,9 @@ def __init__(self, message):
|
||||
# Mapping from color arguments to values for tty.set_color
|
||||
color_when_values = {"always": True, "auto": None, "never": False}
|
||||
|
||||
|
||||
def _color_when_value(when):
|
||||
"""Raise a ValueError for an invalid color setting.
|
||||
|
||||
Valid values are 'always', 'never', and 'auto', or equivalently,
|
||||
True, False, and None.
|
||||
"""
|
||||
if when in color_when_values:
|
||||
return color_when_values[when]
|
||||
elif when not in color_when_values.values():
|
||||
raise ValueError("Invalid color setting: %s" % when)
|
||||
return when
|
||||
|
||||
|
||||
def _color_from_environ() -> Optional[bool]:
|
||||
try:
|
||||
return _color_when_value(os.environ.get("SPACK_COLOR", "auto"))
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
#: When `None` colorize when stdout is tty, when `True` or `False` always or never colorize resp.
|
||||
_force_color = _color_from_environ()
|
||||
# Force color; None: Only color if stdout is a tty
|
||||
# True: Always colorize output, False: Never colorize output
|
||||
_force_color = None
|
||||
|
||||
|
||||
def try_enable_terminal_color_on_windows():
|
||||
@@ -185,6 +164,19 @@ def _err_check(result, func, args):
|
||||
debug("Unable to support color on Windows terminal")
|
||||
|
||||
|
||||
def _color_when_value(when):
|
||||
"""Raise a ValueError for an invalid color setting.
|
||||
|
||||
Valid values are 'always', 'never', and 'auto', or equivalently,
|
||||
True, False, and None.
|
||||
"""
|
||||
if when in color_when_values:
|
||||
return color_when_values[when]
|
||||
elif when not in color_when_values.values():
|
||||
raise ValueError("Invalid color setting: %s" % when)
|
||||
return when
|
||||
|
||||
|
||||
def get_color_when():
|
||||
"""Return whether commands should print color or not."""
|
||||
if _force_color is not None:
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.23.0.dev0"
|
||||
__version__ = "0.22.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -254,8 +254,8 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
|
||||
@config_packages
|
||||
def _deprecated_preferences(error_cls):
|
||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
|
||||
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
|
||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.22)"""
|
||||
# TODO (v0.22): remove this audit as the attributes will not be allowed in config
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
|
||||
|
@@ -29,7 +29,6 @@
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd
|
||||
@@ -659,7 +658,7 @@ def get_buildfile_manifest(spec):
|
||||
# 2. paths are used as strings.
|
||||
for rel_path in visitor.symlinks:
|
||||
abs_path = os.path.join(root, rel_path)
|
||||
link = readlink(abs_path)
|
||||
link = os.readlink(abs_path)
|
||||
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
||||
data["link_to_relocate"].append(rel_path)
|
||||
|
||||
@@ -2002,7 +2001,6 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
with spack.util.path.filter_padding():
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
spec.package.windows_establish_runtime_linkage()
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||
|
||||
|
@@ -5,13 +5,7 @@
|
||||
"""Function and classes needed to bootstrap Spack itself."""
|
||||
|
||||
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
|
||||
from .core import (
|
||||
all_core_root_specs,
|
||||
ensure_clingo_importable_or_raise,
|
||||
ensure_core_dependencies,
|
||||
ensure_gpg_in_path_or_raise,
|
||||
ensure_patchelf_in_path_or_raise,
|
||||
)
|
||||
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||
from .status import status_message
|
||||
|
||||
@@ -19,8 +13,6 @@
|
||||
"is_bootstrapping",
|
||||
"ensure_bootstrap_configuration",
|
||||
"ensure_core_dependencies",
|
||||
"ensure_gpg_in_path_or_raise",
|
||||
"ensure_clingo_importable_or_raise",
|
||||
"ensure_patchelf_in_path_or_raise",
|
||||
"all_core_root_specs",
|
||||
"ensure_environment_dependencies",
|
||||
|
@@ -54,14 +54,10 @@ def _try_import_from_store(
|
||||
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
# previously bootstrapped specs may not have a python-venv dependency.
|
||||
if candidate_spec.dependencies("python-venv"):
|
||||
python, *_ = candidate_spec.dependencies("python-venv")
|
||||
else:
|
||||
python, *_ = candidate_spec.dependencies("python")
|
||||
pkg = candidate_spec["python"].package
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||
]
|
||||
path_before = list(sys.path)
|
||||
|
||||
|
@@ -270,6 +270,10 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||
# This is needed to help the old concretizer taking the `setuptools` dependency
|
||||
# only when bootstrapping from sources on Python 3.12
|
||||
if spec_for_current_python() == "python@3.12":
|
||||
concrete_spec.constrain("+force_setuptools")
|
||||
|
||||
if module == "clingo":
|
||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||
@@ -534,41 +538,6 @@ def ensure_patchelf_in_path_or_raise() -> spack.util.executable.Executable:
|
||||
)
|
||||
|
||||
|
||||
def ensure_winsdk_external_or_raise() -> None:
|
||||
"""Ensure the Windows SDK + WGL are available on system
|
||||
If both of these package are found, the Spack user or bootstrap
|
||||
configuration (depending on where Spack is running)
|
||||
will be updated to include all versions and variants detected.
|
||||
If either the WDK or WSDK are not found, this method will raise
|
||||
a RuntimeError.
|
||||
|
||||
**NOTE:** This modifies the Spack config in the current scope,
|
||||
either user or environment depending on the calling context.
|
||||
This is different from all other current bootstrap dependency
|
||||
checks.
|
||||
"""
|
||||
if set(["win-sdk", "wgl"]).issubset(spack.config.get("packages").keys()):
|
||||
return
|
||||
externals = spack.detection.by_path(["win-sdk", "wgl"])
|
||||
if not set(["win-sdk", "wgl"]) == externals.keys():
|
||||
missing_packages_lst = []
|
||||
if "wgl" not in externals:
|
||||
missing_packages_lst.append("wgl")
|
||||
if "win-sdk" not in externals:
|
||||
missing_packages_lst.append("win-sdk")
|
||||
missing_packages = " & ".join(missing_packages_lst)
|
||||
raise RuntimeError(
|
||||
f"Unable to find the {missing_packages}, please install these packages \
|
||||
via the Visual Studio installer \
|
||||
before proceeding with Spack or provide the path to a non standard install with \
|
||||
'spack external find --path'"
|
||||
)
|
||||
# wgl/sdk are not required for bootstrapping Spack, but
|
||||
# are required for building anything non trivial
|
||||
# add to user config so they can be used by subsequent Spack ops
|
||||
spack.detection.update_configuration(externals, buildable=False)
|
||||
|
||||
|
||||
def ensure_core_dependencies() -> None:
|
||||
"""Ensure the presence of all the core dependencies."""
|
||||
if sys.platform.lower() == "linux":
|
||||
|
@@ -3,11 +3,13 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Bootstrap non-core Spack dependencies from an environment."""
|
||||
import glob
|
||||
import hashlib
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from typing import Iterable, List
|
||||
import warnings
|
||||
from typing import List
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -26,16 +28,6 @@
|
||||
class BootstrapEnvironment(spack.environment.Environment):
|
||||
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
if not self.spack_yaml().exists():
|
||||
self._write_spack_yaml_file()
|
||||
super().__init__(self.environment_root())
|
||||
|
||||
# Remove python package roots created before python-venv was introduced
|
||||
for s in self.concrete_roots():
|
||||
if "python" in s.package.extendees and not s.dependencies("python-venv"):
|
||||
self.deconcretize(s)
|
||||
|
||||
@classmethod
|
||||
def spack_dev_requirements(cls) -> List[str]:
|
||||
"""Spack development requirements"""
|
||||
@@ -67,19 +59,31 @@ def view_root(cls) -> pathlib.Path:
|
||||
return cls.environment_root().joinpath("view")
|
||||
|
||||
@classmethod
|
||||
def bin_dir(cls) -> pathlib.Path:
|
||||
"""Paths to be added to PATH"""
|
||||
return cls.view_root().joinpath("bin")
|
||||
def pythonpaths(cls) -> List[str]:
|
||||
"""Paths to be added to sys.path or PYTHONPATH"""
|
||||
python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}"
|
||||
glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**"))
|
||||
result = glob.glob(glob_expr)
|
||||
if not result:
|
||||
msg = f"Cannot find any Python path in {cls.view_root()}"
|
||||
warnings.warn(msg)
|
||||
return result
|
||||
|
||||
def python_dirs(self) -> Iterable[pathlib.Path]:
|
||||
python = next(s for s in self.all_specs_generator() if s.name == "python-venv").package
|
||||
return {self.view_root().joinpath(p) for p in (python.platlib, python.purelib)}
|
||||
@classmethod
|
||||
def bin_dirs(cls) -> List[pathlib.Path]:
|
||||
"""Paths to be added to PATH"""
|
||||
return [cls.view_root().joinpath("bin")]
|
||||
|
||||
@classmethod
|
||||
def spack_yaml(cls) -> pathlib.Path:
|
||||
"""Environment spack.yaml file"""
|
||||
return cls.environment_root().joinpath("spack.yaml")
|
||||
|
||||
def __init__(self) -> None:
|
||||
if not self.spack_yaml().exists():
|
||||
self._write_spack_yaml_file()
|
||||
super().__init__(self.environment_root())
|
||||
|
||||
def update_installations(self) -> None:
|
||||
"""Update the installations of this environment."""
|
||||
log_enabled = tty.is_debug() or tty.is_verbose()
|
||||
@@ -96,13 +100,21 @@ def update_installations(self) -> None:
|
||||
self.install_all()
|
||||
self.write(regenerate=True)
|
||||
|
||||
def load(self) -> None:
|
||||
"""Update PATH and sys.path."""
|
||||
# Make executables available (shouldn't need PYTHONPATH)
|
||||
os.environ["PATH"] = f"{self.bin_dir()}{os.pathsep}{os.environ.get('PATH', '')}"
|
||||
|
||||
# Spack itself imports pytest
|
||||
sys.path.extend(str(p) for p in self.python_dirs())
|
||||
def update_syspath_and_environ(self) -> None:
|
||||
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
||||
the environment view.
|
||||
"""
|
||||
# Do minimal modifications to sys.path and environment variables. In particular, pay
|
||||
# attention to have the smallest PYTHONPATH / sys.path possible, since that may impact
|
||||
# the performance of the current interpreter
|
||||
sys.path.extend(self.pythonpaths())
|
||||
os.environ["PATH"] = os.pathsep.join(
|
||||
[str(x) for x in self.bin_dirs()] + os.environ.get("PATH", "").split(os.pathsep)
|
||||
)
|
||||
os.environ["PYTHONPATH"] = os.pathsep.join(
|
||||
os.environ.get("PYTHONPATH", "").split(os.pathsep)
|
||||
+ [str(x) for x in self.pythonpaths()]
|
||||
)
|
||||
|
||||
def _write_spack_yaml_file(self) -> None:
|
||||
tty.msg(
|
||||
@@ -152,4 +164,4 @@ def ensure_environment_dependencies() -> None:
|
||||
_add_externals_if_missing()
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_installations()
|
||||
env.load()
|
||||
env.update_syspath_and_environ()
|
||||
|
@@ -39,11 +39,16 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
|
||||
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
|
||||
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
|
||||
``find_python_hints`` for context."""
|
||||
if not getattr(pkg, "find_python_hints", False) or not pkg.spec.dependencies(
|
||||
"python", dt.BUILD | dt.LINK
|
||||
):
|
||||
if not getattr(pkg, "find_python_hints", False):
|
||||
return
|
||||
python_executable = pkg.spec["python"].command.path
|
||||
pythons = pkg.spec.dependencies("python", dt.BUILD | dt.LINK)
|
||||
if len(pythons) != 1:
|
||||
return
|
||||
try:
|
||||
python_executable = pythons[0].package.command.path
|
||||
except RuntimeError:
|
||||
return
|
||||
|
||||
args.extend(
|
||||
[
|
||||
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
||||
|
@@ -1,144 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, List, Sequence, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import classproperty
|
||||
|
||||
import spack.compiler
|
||||
import spack.package_base
|
||||
|
||||
# Local "type" for type hints
|
||||
Path = Union[str, pathlib.Path]
|
||||
|
||||
|
||||
class CompilerPackage(spack.package_base.PackageBase):
|
||||
"""A Package mixin for all common logic for packages that implement compilers"""
|
||||
|
||||
# TODO: how do these play nicely with other tags
|
||||
tags: Sequence[str] = ["compiler"]
|
||||
|
||||
#: Optional suffix regexes for searching for this type of compiler.
|
||||
#: Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
|
||||
#: version suffix for gcc.
|
||||
compiler_suffixes: List[str] = [r"-.*"]
|
||||
|
||||
#: Optional prefix regexes for searching for this compiler
|
||||
compiler_prefixes: List[str] = []
|
||||
|
||||
#: Compiler argument(s) that produces version information
|
||||
#: If multiple arguments, the earlier arguments must produce errors when invalid
|
||||
compiler_version_argument: Union[str, Tuple[str]] = "-dumpversion"
|
||||
|
||||
#: Regex used to extract version from compiler's output
|
||||
compiler_version_regex: str = "(.*)"
|
||||
|
||||
#: Static definition of languages supported by this class
|
||||
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
||||
|
||||
def __init__(self, spec: "spack.spec.Spec"):
|
||||
super().__init__(spec)
|
||||
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||
msg += f" supports: {self.supported_languages}, valid values: {self.compiler_languages}"
|
||||
assert set(self.supported_languages) <= set(self.compiler_languages), msg
|
||||
|
||||
@property
|
||||
def supported_languages(self) -> Sequence[str]:
|
||||
"""Dynamic definition of languages supported by this package"""
|
||||
return self.compiler_languages
|
||||
|
||||
@classproperty
|
||||
def compiler_names(cls) -> Sequence[str]:
|
||||
"""Construct list of compiler names from per-language names"""
|
||||
names = []
|
||||
for language in cls.compiler_languages:
|
||||
names.extend(getattr(cls, f"{language}_names"))
|
||||
return names
|
||||
|
||||
@classproperty
|
||||
def executables(cls) -> Sequence[str]:
|
||||
"""Construct executables for external detection from names, prefixes, and suffixes."""
|
||||
regexp_fmt = r"^({0}){1}({2})$"
|
||||
prefixes = [""] + cls.compiler_prefixes
|
||||
suffixes = [""] + cls.compiler_suffixes
|
||||
if sys.platform == "win32":
|
||||
ext = r"\.(?:exe|bat)"
|
||||
suffixes += [suf + ext for suf in suffixes]
|
||||
return [
|
||||
regexp_fmt.format(prefix, re.escape(name), suffix)
|
||||
for prefix, name, suffix in itertools.product(prefixes, cls.compiler_names, suffixes)
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def determine_version(cls, exe: Path):
|
||||
version_argument = cls.compiler_version_argument
|
||||
if isinstance(version_argument, str):
|
||||
version_argument = (version_argument,)
|
||||
|
||||
for va in version_argument:
|
||||
try:
|
||||
output = spack.compiler.get_compiler_version_output(exe, va)
|
||||
match = re.search(cls.compiler_version_regex, output)
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
except spack.util.executable.ProcessError:
|
||||
pass
|
||||
except Exception as e:
|
||||
tty.debug(
|
||||
f"[{__file__}] Cannot detect a valid version for the executable "
|
||||
f"{str(exe)}, for package '{cls.name}': {e}"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def compiler_bindir(cls, prefix: Path) -> Path:
|
||||
"""Overridable method for the location of the compiler bindir within the preifx"""
|
||||
return os.path.join(prefix, "bin")
|
||||
|
||||
@classmethod
|
||||
def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
|
||||
"""Compute the paths to compiler executables associated with this package
|
||||
|
||||
This is a helper method for ``determine_variants`` to compute the ``extra_attributes``
|
||||
to include with each spec object."""
|
||||
# There are often at least two copies (not symlinks) of each compiler executable in the
|
||||
# same directory: one with a canonical name, e.g. "gfortran", and another one with the
|
||||
# target prefix, e.g. "x86_64-pc-linux-gnu-gfortran". There also might be a copy of "gcc"
|
||||
# with the version suffix, e.g. "x86_64-pc-linux-gnu-gcc-6.3.0". To ensure the consistency
|
||||
# of values in the "paths" dictionary (i.e. we prefer all of them to reference copies
|
||||
# with canonical names if possible), we iterate over the executables in the reversed sorted
|
||||
# order:
|
||||
# First pass over languages identifies exes that are perfect matches for canonical names
|
||||
# Second pass checks for names with prefix/suffix
|
||||
# Second pass is sorted by language name length because longer named languages
|
||||
# e.g. cxx can often contain the names of shorter named languages
|
||||
# e.g. c (e.g. clang/clang++)
|
||||
paths = {}
|
||||
exes = sorted(exes, reverse=True)
|
||||
languages = {
|
||||
lang: getattr(cls, f"{lang}_names")
|
||||
for lang in sorted(cls.compiler_languages, key=len, reverse=True)
|
||||
}
|
||||
for exe in exes:
|
||||
for lang, names in languages.items():
|
||||
if os.path.basename(exe) in names:
|
||||
paths[lang] = exe
|
||||
break
|
||||
else:
|
||||
for lang, names in languages.items():
|
||||
if any(name in os.path.basename(exe) for name in names):
|
||||
paths[lang] = exe
|
||||
break
|
||||
|
||||
return paths
|
||||
|
||||
@classmethod
|
||||
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
||||
# path determination is separated so it can be reused in subclasses
|
||||
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
|
@@ -145,7 +145,7 @@ def install(self, pkg, spec, prefix):
|
||||
opts += self.nmake_install_args()
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
||||
opts.append(self.define("PREFIX", prefix))
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
||||
|
@@ -138,21 +138,16 @@ def view_file_conflicts(self, view, merge_map):
|
||||
return conflicts
|
||||
|
||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||
# Patch up shebangs if the package extends Python and we put a Python interpreter in the
|
||||
# view.
|
||||
if not self.extendee_spec:
|
||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||
|
||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||
|
||||
if python.external:
|
||||
# Patch up shebangs to the python linked in the view only if python is built by Spack.
|
||||
if not self.extendee_spec or self.extendee_spec.external:
|
||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||
|
||||
# We only patch shebangs in the bin directory.
|
||||
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
|
||||
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
|
||||
bin_dir = self.spec.prefix.bin
|
||||
|
||||
bin_dir = self.spec.prefix.bin
|
||||
python_prefix = self.extendee_spec.prefix
|
||||
for src, dst in merge_map.items():
|
||||
if skip_if_exists and os.path.lexists(dst):
|
||||
continue
|
||||
@@ -173,7 +168,7 @@ def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||
copied_files[(s.st_dev, s.st_ino)] = dst
|
||||
shutil.copy2(src, dst)
|
||||
fs.filter_file(
|
||||
python.prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
||||
python_prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
||||
)
|
||||
else:
|
||||
view.link(src, dst)
|
||||
@@ -204,13 +199,14 @@ def remove_files_from_view(self, view, merge_map):
|
||||
ignore_namespace = True
|
||||
|
||||
bin_dir = self.spec.prefix.bin
|
||||
global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec)
|
||||
|
||||
to_remove = []
|
||||
for src, dst in merge_map.items():
|
||||
if ignore_namespace and namespace_init(dst):
|
||||
continue
|
||||
|
||||
if not fs.path_contains_subdirectory(src, bin_dir):
|
||||
if global_view or not fs.path_contains_subdirectory(src, bin_dir):
|
||||
to_remove.append(dst)
|
||||
else:
|
||||
os.remove(dst)
|
||||
@@ -366,12 +362,6 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
|
||||
@property
|
||||
def python_spec(self):
|
||||
"""Get python-venv if it exists or python otherwise."""
|
||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||
return python
|
||||
|
||||
@property
|
||||
def headers(self) -> HeaderList:
|
||||
"""Discover header files in platlib."""
|
||||
@@ -381,9 +371,8 @@ def headers(self) -> HeaderList:
|
||||
|
||||
# Headers should only be in include or platlib, but no harm in checking purelib too
|
||||
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
||||
python = self.python_spec
|
||||
platlib = self.prefix.join(python.package.platlib).join(name)
|
||||
purelib = self.prefix.join(python.package.purelib).join(name)
|
||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
||||
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
|
||||
|
||||
headers_list = map(fs.find_all_headers, [include, platlib, purelib])
|
||||
headers = functools.reduce(operator.add, headers_list)
|
||||
@@ -402,9 +391,8 @@ def libs(self) -> LibraryList:
|
||||
name = self.spec.name[3:]
|
||||
|
||||
# Libraries should only be in platlib, but no harm in checking purelib too
|
||||
python = self.python_spec
|
||||
platlib = self.prefix.join(python.package.platlib).join(name)
|
||||
purelib = self.prefix.join(python.package.purelib).join(name)
|
||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
||||
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
|
||||
|
||||
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
|
||||
libs_list = map(find_all_libraries, [platlib, purelib])
|
||||
@@ -516,8 +504,6 @@ def global_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
|
||||
|
||||
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Install everything from build directory."""
|
||||
pip = spec["python"].command
|
||||
pip.add_default_arg("-m", "pip")
|
||||
|
||||
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
||||
|
||||
@@ -533,6 +519,14 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
else:
|
||||
args.append(".")
|
||||
|
||||
pip = spec["python"].command
|
||||
# Hide user packages, since we don't have build isolation. This is
|
||||
# necessary because pip / setuptools may run hooks from arbitrary
|
||||
# packages during the build. There is no equivalent variable to hide
|
||||
# system packages, so this is not reliable for external Python.
|
||||
pip.add_default_env("PYTHONNOUSERSITE", "1")
|
||||
pip.add_default_arg("-m")
|
||||
pip.add_default_arg("pip")
|
||||
with fs.working_dir(self.build_directory):
|
||||
pip(*args)
|
||||
|
||||
|
@@ -1478,12 +1478,6 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||
|
||||
|
||||
def win_quote(quote_str: str) -> str:
|
||||
if IS_WINDOWS:
|
||||
quote_str = f'"{quote_str}"'
|
||||
return quote_str
|
||||
|
||||
|
||||
def download_and_extract_artifacts(url, work_dir):
|
||||
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||
and extract the contents into the given work_dir
|
||||
@@ -1948,9 +1942,9 @@ def compose_command_err_handling(args):
|
||||
# but we need to handle EXEs (git, etc) ourselves
|
||||
catch_exe_failure = (
|
||||
"""
|
||||
if ($LASTEXITCODE -ne 0){{
|
||||
throw 'Command {} has failed'
|
||||
}}
|
||||
if ($LASTEXITCODE -ne 0){
|
||||
throw "Command {} has failed"
|
||||
}
|
||||
"""
|
||||
if IS_WINDOWS
|
||||
else ""
|
||||
@@ -2182,13 +2176,13 @@ def __init__(self, ci_cdash):
|
||||
def args(self):
|
||||
return [
|
||||
"--cdash-upload-url",
|
||||
win_quote(self.upload_url),
|
||||
self.upload_url,
|
||||
"--cdash-build",
|
||||
win_quote(self.build_name),
|
||||
self.build_name,
|
||||
"--cdash-site",
|
||||
win_quote(self.site),
|
||||
self.site,
|
||||
"--cdash-buildstamp",
|
||||
win_quote(self.build_stamp),
|
||||
self.build_stamp,
|
||||
]
|
||||
|
||||
@property # type: ignore
|
||||
|
@@ -31,6 +31,7 @@
|
||||
level = "long"
|
||||
|
||||
SPACK_COMMAND = "spack"
|
||||
MAKE_COMMAND = "make"
|
||||
INSTALL_FAIL_CODE = 1
|
||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||
|
||||
@@ -39,12 +40,6 @@ def deindent(desc):
|
||||
return desc.replace(" ", "")
|
||||
|
||||
|
||||
def unicode_escape(path: str) -> str:
|
||||
"""Returns transformed path with any unicode
|
||||
characters replaced with their corresponding escapes"""
|
||||
return path.encode("unicode-escape").decode("utf-8")
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
||||
@@ -556,35 +551,75 @@ def ci_rebuild(args):
|
||||
# No hash match anywhere means we need to rebuild spec
|
||||
|
||||
# Start with spack arguments
|
||||
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose", "install"]
|
||||
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose"]
|
||||
|
||||
config = cfg.get("config")
|
||||
if not config["verify_ssl"]:
|
||||
spack_cmd.append("-k")
|
||||
|
||||
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
|
||||
install_args = []
|
||||
|
||||
can_verify = spack_ci.can_verify_binaries()
|
||||
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
||||
if not verify_binaries:
|
||||
install_args.append("--no-check-signature")
|
||||
|
||||
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
|
||||
slash_hash = "/{}".format(job_spec.dag_hash())
|
||||
|
||||
# Arguments when installing dependencies from cache
|
||||
deps_install_args = install_args
|
||||
|
||||
# Arguments when installing the root from sources
|
||||
deps_install_args = install_args + ["--only=dependencies"]
|
||||
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||
|
||||
root_install_args = install_args + [
|
||||
"--keep-stage",
|
||||
"--only=package",
|
||||
"--use-buildcache=package:never,dependencies:only",
|
||||
]
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
root_install_args.extend(cdash_handler.args())
|
||||
root_install_args.append(slash_hash)
|
||||
|
||||
# ["x", "y"] -> "'x' 'y'"
|
||||
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
|
||||
|
||||
commands = [
|
||||
# apparently there's a race when spack bootstraps? do it up front once
|
||||
[SPACK_COMMAND, "-e", unicode_escape(env.path), "bootstrap", "now"],
|
||||
spack_cmd + deps_install_args + [slash_hash],
|
||||
spack_cmd + root_install_args + [slash_hash],
|
||||
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
|
||||
[
|
||||
SPACK_COMMAND,
|
||||
"-e",
|
||||
env.path,
|
||||
"env",
|
||||
"depfile",
|
||||
"-o",
|
||||
"Makefile",
|
||||
"--use-buildcache=package:never,dependencies:only",
|
||||
slash_hash, # limit to spec we're building
|
||||
],
|
||||
[
|
||||
# --output-sync requires GNU make 4.x.
|
||||
# Old make errors when you pass it a flag it doesn't recognize,
|
||||
# but it doesn't error or warn when you set unrecognized flags in
|
||||
# this variable.
|
||||
"export",
|
||||
"GNUMAKEFLAGS=--output-sync=recurse",
|
||||
],
|
||||
[
|
||||
MAKE_COMMAND,
|
||||
"SPACK={}".format(args_to_string(spack_cmd)),
|
||||
"SPACK_COLOR=always",
|
||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
||||
"-j$(nproc)",
|
||||
"install-deps/{}".format(
|
||||
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
|
||||
"{name}-{version}-{hash}"
|
||||
)
|
||||
),
|
||||
],
|
||||
spack_cmd + ["install"] + root_install_args,
|
||||
]
|
||||
|
||||
tty.debug("Installing {0} from source".format(job_spec.name))
|
||||
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
||||
|
||||
|
@@ -563,13 +563,12 @@ def add_concretizer_args(subparser):
|
||||
help="reuse installed packages/buildcaches when possible",
|
||||
)
|
||||
subgroup.add_argument(
|
||||
"--fresh-roots",
|
||||
"--reuse-deps",
|
||||
action=ConfigSetAction,
|
||||
dest="concretizer:reuse",
|
||||
const="dependencies",
|
||||
default=None,
|
||||
help="concretize with fresh roots and reused dependencies",
|
||||
help="reuse installed dependencies only",
|
||||
)
|
||||
subgroup.add_argument(
|
||||
"--deprecated",
|
||||
|
@@ -10,13 +10,13 @@
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
|
||||
import llnl.string as string
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common
|
||||
@@ -61,7 +61,14 @@
|
||||
#
|
||||
def env_create_setup_parser(subparser):
|
||||
"""create a new environment"""
|
||||
subparser.add_argument("env_name", metavar="env", help="name or directory of environment")
|
||||
subparser.add_argument(
|
||||
"env_name",
|
||||
metavar="env",
|
||||
help=(
|
||||
"name of managed environment or directory of the anonymous env "
|
||||
"(when using --dir/-d) to activate"
|
||||
),
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
||||
)
|
||||
@@ -87,9 +94,6 @@ def env_create_setup_parser(subparser):
|
||||
default=None,
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--include-concrete", action="append", help="name of old environment to copy specs from"
|
||||
)
|
||||
|
||||
|
||||
def env_create(args):
|
||||
@@ -107,32 +111,19 @@ def env_create(args):
|
||||
# the environment should not include a view.
|
||||
with_view = None
|
||||
|
||||
include_concrete = None
|
||||
if hasattr(args, "include_concrete"):
|
||||
include_concrete = args.include_concrete
|
||||
|
||||
env = _env_create(
|
||||
args.env_name,
|
||||
init_file=args.envfile,
|
||||
dir=args.dir or os.path.sep in args.env_name or args.env_name in (".", ".."),
|
||||
dir=args.dir,
|
||||
with_view=with_view,
|
||||
keep_relative=args.keep_relative,
|
||||
include_concrete=include_concrete,
|
||||
)
|
||||
|
||||
# Generate views, only really useful for environments created from spack.lock files.
|
||||
env.regenerate_views()
|
||||
|
||||
|
||||
def _env_create(
|
||||
name_or_path: str,
|
||||
*,
|
||||
init_file: Optional[str] = None,
|
||||
dir: bool = False,
|
||||
with_view: Optional[str] = None,
|
||||
keep_relative: bool = False,
|
||||
include_concrete: Optional[List[str]] = None,
|
||||
):
|
||||
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
||||
"""Create a new environment, with an optional yaml description.
|
||||
|
||||
Arguments:
|
||||
@@ -144,31 +135,22 @@ def _env_create(
|
||||
keep_relative (bool): if True, develop paths are copied verbatim into
|
||||
the new environment file, otherwise they may be made absolute if the
|
||||
new environment is in a different location
|
||||
include_concrete (list): list of the included concrete environments
|
||||
"""
|
||||
if not dir:
|
||||
env = ev.create(
|
||||
name_or_path,
|
||||
init_file=init_file,
|
||||
with_view=with_view,
|
||||
keep_relative=keep_relative,
|
||||
include_concrete=include_concrete,
|
||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||
)
|
||||
tty.msg(
|
||||
colorize(
|
||||
f"Created environment @c{{{cescape(name_or_path)}}} in: @c{{{cescape(env.path)}}}"
|
||||
)
|
||||
)
|
||||
else:
|
||||
env = ev.create_in_dir(
|
||||
name_or_path,
|
||||
init_file=init_file,
|
||||
with_view=with_view,
|
||||
keep_relative=keep_relative,
|
||||
include_concrete=include_concrete,
|
||||
)
|
||||
tty.msg(colorize(f"Created independent environment in: @c{{{cescape(env.path)}}}"))
|
||||
tty.msg(f"Activate with: {colorize(f'@c{{spack env activate {cescape(name_or_path)}}}')}")
|
||||
tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
|
||||
tty.msg("You can activate this environment with:")
|
||||
tty.msg(" spack env activate %s" % (name_or_path))
|
||||
return env
|
||||
|
||||
env = ev.create_in_dir(
|
||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||
)
|
||||
tty.msg("Created environment in %s" % env.path)
|
||||
tty.msg("You can activate this environment with:")
|
||||
tty.msg(" spack env activate %s" % env.path)
|
||||
return env
|
||||
|
||||
|
||||
@@ -454,12 +436,6 @@ def env_remove_setup_parser(subparser):
|
||||
"""remove an existing environment"""
|
||||
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
subparser.add_argument(
|
||||
"-f",
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="remove the environment even if it is included in another environment",
|
||||
)
|
||||
|
||||
|
||||
def env_remove(args):
|
||||
@@ -469,35 +445,13 @@ def env_remove(args):
|
||||
and manifests embedded in repositories should be removed manually.
|
||||
"""
|
||||
read_envs = []
|
||||
valid_envs = []
|
||||
bad_envs = []
|
||||
invalid_envs = []
|
||||
|
||||
for env_name in ev.all_environment_names():
|
||||
for env_name in args.rm_env:
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
valid_envs.append(env_name)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
read_envs.append(env)
|
||||
read_envs.append(env)
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
invalid_envs.append(env_name)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
bad_envs.append(env_name)
|
||||
|
||||
# Check if env is linked to another before trying to remove
|
||||
for name in valid_envs:
|
||||
# don't check if environment is included to itself
|
||||
if name == env_name:
|
||||
continue
|
||||
environ = ev.Environment(ev.root(name))
|
||||
if ev.root(env_name) in environ.included_concrete_envs:
|
||||
msg = f'Environment "{env_name}" is being used by environment "{name}"'
|
||||
if args.force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
tty.die(msg)
|
||||
bad_envs.append(env_name)
|
||||
|
||||
if not args.yes_to_all:
|
||||
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
||||
|
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import copy
|
||||
import sys
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -272,27 +271,6 @@ def root_decorator(spec, string):
|
||||
|
||||
print()
|
||||
|
||||
if env.included_concrete_envs:
|
||||
tty.msg("Included specs")
|
||||
|
||||
# Root specs cannot be displayed with prefixes, since those are not
|
||||
# set for abstract specs. Same for hashes
|
||||
root_args = copy.copy(args)
|
||||
root_args.paths = False
|
||||
|
||||
# Roots are displayed with variants, etc. so that we can see
|
||||
# specifically what the user asked for.
|
||||
cmd.display_specs(
|
||||
env.included_user_specs,
|
||||
root_args,
|
||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||
namespace=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
variants=True,
|
||||
)
|
||||
print()
|
||||
|
||||
if args.show_concretized:
|
||||
tty.msg("Concretized roots")
|
||||
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)
|
||||
|
@@ -23,7 +23,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.22"
|
||||
tutorial_branch = "releases/v0.21"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
@@ -214,6 +214,8 @@ def unit_test(parser, args, unknown_args):
|
||||
|
||||
# Ensure clingo is available before switching to the
|
||||
# mock configuration used by unit tests
|
||||
# Note: skip on windows here because for the moment,
|
||||
# clingo is wholly unsupported from bootstrap
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
|
@@ -20,7 +20,6 @@
|
||||
|
||||
import spack.compilers
|
||||
import spack.error
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.libc
|
||||
@@ -684,8 +683,8 @@ def __str__(self):
|
||||
|
||||
@contextlib.contextmanager
|
||||
def compiler_environment(self):
|
||||
# Avoid modifying os.environ if possible.
|
||||
if not self.modules and not self.environment:
|
||||
# yield immediately if no modules
|
||||
if not self.modules:
|
||||
yield
|
||||
return
|
||||
|
||||
@@ -702,9 +701,13 @@ def compiler_environment(self):
|
||||
spack.util.module_cmd.load_module(module)
|
||||
|
||||
# apply other compiler environment changes
|
||||
spack.schema.environment.parse(self.environment).apply_modifications()
|
||||
env = spack.util.environment.EnvironmentModifications()
|
||||
env.extend(spack.schema.environment.parse(self.environment))
|
||||
env.apply_modifications()
|
||||
|
||||
yield
|
||||
except BaseException:
|
||||
raise
|
||||
finally:
|
||||
# Restore environment regardless of whether inner code succeeded
|
||||
os.environ.clear()
|
||||
|
@@ -164,56 +164,33 @@ def _compiler_config_from_package_config(config):
|
||||
|
||||
|
||||
def _compiler_config_from_external(config):
|
||||
extra_attributes_key = "extra_attributes"
|
||||
compilers_key = "compilers"
|
||||
c_key, cxx_key, fortran_key = "c", "cxx", "fortran"
|
||||
|
||||
# Allow `@x.y.z` instead of `@=x.y.z`
|
||||
spec = spack.spec.parse_with_version_concrete(config["spec"])
|
||||
|
||||
# use str(spec.versions) to allow `@x.y.z` instead of `@=x.y.z`
|
||||
compiler_spec = spack.spec.CompilerSpec(
|
||||
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
||||
)
|
||||
|
||||
err_header = f"The external spec '{spec}' cannot be used as a compiler"
|
||||
extra_attributes = config.get("extra_attributes", {})
|
||||
prefix = config.get("prefix", None)
|
||||
|
||||
# If extra_attributes is not there I might not want to use this entry as a compiler,
|
||||
# therefore just leave a debug message, but don't be loud with a warning.
|
||||
if extra_attributes_key not in config:
|
||||
tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key")
|
||||
compiler_class = class_for_compiler_name(compiler_spec.name)
|
||||
paths = extra_attributes.get("paths", {})
|
||||
compiler_langs = ["cc", "cxx", "fc", "f77"]
|
||||
for lang in compiler_langs:
|
||||
if paths.setdefault(lang, None):
|
||||
continue
|
||||
|
||||
if not prefix:
|
||||
continue
|
||||
|
||||
# Check for files that satisfy the naming scheme for this compiler
|
||||
bindir = os.path.join(prefix, "bin")
|
||||
for f, regex in itertools.product(os.listdir(bindir), compiler_class.search_regexps(lang)):
|
||||
if regex.match(f):
|
||||
paths[lang] = os.path.join(bindir, f)
|
||||
|
||||
if all(v is None for v in paths.values()):
|
||||
return None
|
||||
extra_attributes = config[extra_attributes_key]
|
||||
|
||||
# If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler
|
||||
if compilers_key not in extra_attributes:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'"
|
||||
)
|
||||
return None
|
||||
attribute_compilers = extra_attributes[compilers_key]
|
||||
|
||||
if c_key not in attribute_compilers:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the C compiler path under "
|
||||
f"'{extra_attributes_key}:{compilers_key}'"
|
||||
)
|
||||
return None
|
||||
c_compiler = attribute_compilers[c_key]
|
||||
|
||||
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
|
||||
if cxx_key not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
|
||||
|
||||
if fortran_key not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
|
||||
|
||||
# compilers format has cc/fc/f77, externals format has "c/fortran"
|
||||
paths = {
|
||||
"cc": c_compiler,
|
||||
"cxx": attribute_compilers.get(cxx_key, None),
|
||||
"fc": attribute_compilers.get(fortran_key, None),
|
||||
"f77": attribute_compilers.get(fortran_key, None),
|
||||
}
|
||||
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
|
@@ -96,8 +96,6 @@ def verbose_flag(self):
|
||||
|
||||
openmp_flag = "-fopenmp"
|
||||
|
||||
# C++ flags based on CMake Modules/Compiler/Clang.cmake
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.real_version < Version("3.3"):
|
||||
@@ -122,24 +120,6 @@ def cxx17_flag(self):
|
||||
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def cxx20_flag(self):
|
||||
if self.real_version < Version("5.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C++20 standard", "cxx20_flag", "< 5.0")
|
||||
elif self.real_version < Version("11.0"):
|
||||
return "-std=c++2a"
|
||||
else:
|
||||
return "-std=c++20"
|
||||
|
||||
@property
|
||||
def cxx23_flag(self):
|
||||
if self.real_version < Version("12.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C++23 standard", "cxx23_flag", "< 12.0")
|
||||
elif self.real_version < Version("17.0"):
|
||||
return "-std=c++2b"
|
||||
else:
|
||||
return "-std=c++23"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
return "-std=c99"
|
||||
@@ -162,10 +142,7 @@ def c17_flag(self):
|
||||
def c23_flag(self):
|
||||
if self.real_version < Version("9.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
|
||||
elif self.real_version < Version("18.0"):
|
||||
return "-std=c2x"
|
||||
else:
|
||||
return "-std=c23"
|
||||
return "-std=c2x"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
|
@@ -74,10 +74,6 @@ class Concretizer:
|
||||
#: during concretization. Used for testing and for mirror creation
|
||||
check_for_compiler_existence = None
|
||||
|
||||
#: Packages that the old concretizer cannot deal with correctly, and cannot build anyway.
|
||||
#: Those will not be considered as providers for virtuals.
|
||||
non_buildable_packages = {"glibc", "musl"}
|
||||
|
||||
def __init__(self, abstract_spec=None):
|
||||
if Concretizer.check_for_compiler_existence is None:
|
||||
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||
@@ -117,11 +113,7 @@ def _valid_virtuals_and_externals(self, spec):
|
||||
pref_key = lambda spec: 0 # no-op pref key
|
||||
|
||||
if spec.virtual:
|
||||
candidates = [
|
||||
s
|
||||
for s in spack.repo.PATH.providers_for(spec)
|
||||
if s.name not in self.non_buildable_packages
|
||||
]
|
||||
candidates = spack.repo.PATH.providers_for(spec)
|
||||
if not candidates:
|
||||
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
|
||||
|
||||
|
@@ -34,6 +34,28 @@
|
||||
"image": "docker.io/fedora:39"
|
||||
}
|
||||
},
|
||||
"fedora:38": {
|
||||
"bootstrap": {
|
||||
"template": "container/fedora.dockerfile",
|
||||
"image": "docker.io/fedora:38"
|
||||
},
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora38",
|
||||
"final": {
|
||||
"image": "docker.io/fedora:38"
|
||||
}
|
||||
},
|
||||
"fedora:37": {
|
||||
"bootstrap": {
|
||||
"template": "container/fedora.dockerfile",
|
||||
"image": "docker.io/fedora:37"
|
||||
},
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora37",
|
||||
"final": {
|
||||
"image": "docker.io/fedora:37"
|
||||
}
|
||||
},
|
||||
"rockylinux:9": {
|
||||
"bootstrap": {
|
||||
"template": "container/rockylinux_9.dockerfile",
|
||||
@@ -116,13 +138,6 @@
|
||||
},
|
||||
"os_package_manager": "apt"
|
||||
},
|
||||
"ubuntu:24.04": {
|
||||
"bootstrap": {
|
||||
"template": "container/ubuntu_2404.dockerfile"
|
||||
},
|
||||
"os_package_manager": "apt",
|
||||
"build": "spack/ubuntu-noble"
|
||||
},
|
||||
"ubuntu:22.04": {
|
||||
"bootstrap": {
|
||||
"template": "container/ubuntu_2204.dockerfile"
|
||||
@@ -136,6 +151,13 @@
|
||||
},
|
||||
"build": "spack/ubuntu-focal",
|
||||
"os_package_manager": "apt"
|
||||
},
|
||||
"ubuntu:18.04": {
|
||||
"bootstrap": {
|
||||
"template": "container/ubuntu_1804.dockerfile"
|
||||
},
|
||||
"os_package_manager": "apt",
|
||||
"build": "spack/ubuntu-bionic"
|
||||
}
|
||||
},
|
||||
"os_package_managers": {
|
||||
|
@@ -662,7 +662,6 @@ def _execute_redistribute(
|
||||
@directive(("extendees", "dependencies"))
|
||||
def extends(spec, when=None, type=("build", "run"), patches=None):
|
||||
"""Same as depends_on, but also adds this package to the extendee list.
|
||||
In case of Python, also adds a dependency on python-venv.
|
||||
|
||||
keyword arguments can be passed to extends() so that extension
|
||||
packages can pass parameters to the extendee's extension
|
||||
@@ -678,11 +677,6 @@ def _execute_extends(pkg):
|
||||
_depends_on(pkg, spec, when=when, type=type, patches=patches)
|
||||
spec_obj = spack.spec.Spec(spec)
|
||||
|
||||
# When extending python, also add a dependency on python-venv. This is done so that
|
||||
# Spack environment views are Python virtual environments.
|
||||
if spec_obj.name == "python" and not pkg.name == "python-venv":
|
||||
_depends_on(pkg, "python-venv", when=when, type=("build", "run"))
|
||||
|
||||
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
|
||||
pkg.extendees[spec_obj.name] = (spec_obj, None)
|
||||
|
||||
|
@@ -34,9 +34,6 @@
|
||||
* ``spec``: a string representation of the abstract spec that was concretized
|
||||
|
||||
4. ``concrete_specs``: a dictionary containing the specs in the environment.
|
||||
5. ``include_concrete`` (dictionary): an optional dictionary that includes the roots
|
||||
and concrete specs from the included environments, keyed by the path to that
|
||||
environment
|
||||
|
||||
Compatibility
|
||||
-------------
|
||||
@@ -53,37 +50,26 @@
|
||||
- ``v2``
|
||||
- ``v3``
|
||||
- ``v4``
|
||||
- ``v5``
|
||||
* - ``v0.12:0.14``
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
-
|
||||
-
|
||||
* - ``v0.15:0.16``
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
-
|
||||
* - ``v0.17``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
* - ``v0.18:``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
* - ``v0.22:``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
|
||||
Version 1
|
||||
---------
|
||||
@@ -348,118 +334,6 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Version 5
|
||||
---------
|
||||
|
||||
Version 5 doesn't change the top-level lockfile format, but an optional dictionary is
|
||||
added. The dictionary has the ``root`` and ``concrete_specs`` of the included
|
||||
environments, which are keyed by the path to that environment. Since this is optional
|
||||
if the environment does not have any included environments ``include_concrete`` will
|
||||
not be a part of the lockfile.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": 5,
|
||||
"specfile-version": 3
|
||||
},
|
||||
"roots": [
|
||||
{
|
||||
"hash": "<dag_hash 1>",
|
||||
"spec": "<abstract spec 1>"
|
||||
},
|
||||
{
|
||||
"hash": "<dag_hash 2>",
|
||||
"spec": "<abstract spec 2>"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"<dag_hash 1>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_1",
|
||||
"hash": "<dag_hash for depname_1>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_2",
|
||||
"hash": "<dag_hash for depname_2>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 1>",
|
||||
},
|
||||
"<daghash 2>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_3",
|
||||
"hash": "<dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_4",
|
||||
"hash": "<dag_hash for depname_4>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 2>"
|
||||
}
|
||||
}
|
||||
"include_concrete": {
|
||||
"<path to environment>": {
|
||||
"roots": [
|
||||
{
|
||||
"hash": "<dag_hash 1>",
|
||||
"spec": "<abstract spec 1>"
|
||||
},
|
||||
{
|
||||
"hash": "<dag_hash 2>",
|
||||
"spec": "<abstract spec 2>"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"<dag_hash 1>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_1",
|
||||
"hash": "<dag_hash for depname_1>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_2",
|
||||
"hash": "<dag_hash for depname_2>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 1>",
|
||||
},
|
||||
"<daghash 2>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_3",
|
||||
"hash": "<dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_4",
|
||||
"hash": "<dag_hash for depname_4>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 2>"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
from .environment import (
|
||||
|
@@ -16,7 +16,7 @@
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
from typing import Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -159,8 +159,6 @@ def default_manifest_yaml():
|
||||
default_view_name = "default"
|
||||
# Default behavior to link all packages into views (vs. only root packages)
|
||||
default_view_link = "all"
|
||||
# The name for any included concrete specs
|
||||
included_concrete_name = "include_concrete"
|
||||
|
||||
|
||||
def installed_specs():
|
||||
@@ -295,7 +293,6 @@ def create(
|
||||
init_file: Optional[Union[str, pathlib.Path]] = None,
|
||||
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
||||
keep_relative: bool = False,
|
||||
include_concrete: Optional[List[str]] = None,
|
||||
) -> "Environment":
|
||||
"""Create a managed environment in Spack and returns it.
|
||||
|
||||
@@ -312,15 +309,10 @@ def create(
|
||||
string, it specifies the path to the view
|
||||
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
||||
otherwise they are made absolute
|
||||
include_concrete: list of concrete environment names/paths to be included
|
||||
"""
|
||||
environment_dir = environment_dir_from_name(name, exists_ok=False)
|
||||
return create_in_dir(
|
||||
environment_dir,
|
||||
init_file=init_file,
|
||||
with_view=with_view,
|
||||
keep_relative=keep_relative,
|
||||
include_concrete=include_concrete,
|
||||
environment_dir, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||
)
|
||||
|
||||
|
||||
@@ -329,7 +321,6 @@ def create_in_dir(
|
||||
init_file: Optional[Union[str, pathlib.Path]] = None,
|
||||
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
||||
keep_relative: bool = False,
|
||||
include_concrete: Optional[List[str]] = None,
|
||||
) -> "Environment":
|
||||
"""Create an environment in the directory passed as input and returns it.
|
||||
|
||||
@@ -343,7 +334,6 @@ def create_in_dir(
|
||||
string, it specifies the path to the view
|
||||
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
||||
otherwise they are made absolute
|
||||
include_concrete: concrete environment names/paths to be included
|
||||
"""
|
||||
initialize_environment_dir(root, envfile=init_file)
|
||||
|
||||
@@ -356,12 +346,6 @@ def create_in_dir(
|
||||
if with_view is not None:
|
||||
manifest.set_default_view(with_view)
|
||||
|
||||
if include_concrete is not None:
|
||||
set_included_envs_to_env_paths(include_concrete)
|
||||
validate_included_envs_exists(include_concrete)
|
||||
validate_included_envs_concrete(include_concrete)
|
||||
manifest.set_include_concrete(include_concrete)
|
||||
|
||||
manifest.flush()
|
||||
|
||||
except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e:
|
||||
@@ -435,67 +419,6 @@ def ensure_env_root_path_exists():
|
||||
fs.mkdirp(env_root_path())
|
||||
|
||||
|
||||
def set_included_envs_to_env_paths(include_concrete: List[str]) -> None:
|
||||
"""If the included environment(s) is the environment name
|
||||
it is replaced by the path to the environment
|
||||
|
||||
Args:
|
||||
include_concrete: list of env name or path to env"""
|
||||
|
||||
for i, env_name in enumerate(include_concrete):
|
||||
if is_env_dir(env_name):
|
||||
include_concrete[i] = env_name
|
||||
elif exists(env_name):
|
||||
include_concrete[i] = root(env_name)
|
||||
|
||||
|
||||
def validate_included_envs_exists(include_concrete: List[str]) -> None:
|
||||
"""Checks that all of the included environments exist
|
||||
|
||||
Args:
|
||||
include_concrete: list of already existing concrete environments to include
|
||||
|
||||
Raises:
|
||||
SpackEnvironmentError: if any of the included environments do not exist
|
||||
"""
|
||||
|
||||
missing_envs = set()
|
||||
|
||||
for i, env_name in enumerate(include_concrete):
|
||||
if not is_env_dir(env_name):
|
||||
missing_envs.add(env_name)
|
||||
|
||||
if missing_envs:
|
||||
msg = "The following environment(s) are missing: {0}".format(", ".join(missing_envs))
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
|
||||
def validate_included_envs_concrete(include_concrete: List[str]) -> None:
|
||||
"""Checks that all of the included environments are concrete
|
||||
|
||||
Args:
|
||||
include_concrete: list of already existing concrete environments to include
|
||||
|
||||
Raises:
|
||||
SpackEnvironmentError: if any of the included environments are not concrete
|
||||
"""
|
||||
|
||||
non_concrete_envs = set()
|
||||
|
||||
for env_path in include_concrete:
|
||||
if not os.path.exists(Environment(env_path).lock_path):
|
||||
non_concrete_envs.add(Environment(env_path).name)
|
||||
|
||||
if non_concrete_envs:
|
||||
msg = "The following environment(s) are not concrete: {0}\n" "Please run:".format(
|
||||
", ".join(non_concrete_envs)
|
||||
)
|
||||
for env in non_concrete_envs:
|
||||
msg += f"\n\t`spack -e {env} concretize`"
|
||||
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
|
||||
def all_environment_names():
|
||||
"""List the names of environments that currently exist."""
|
||||
# just return empty if the env path does not exist. A read-only
|
||||
@@ -898,18 +821,6 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
||||
self.specs_by_hash: Dict[str, Spec] = {}
|
||||
#: Repository for this environment (memoized)
|
||||
self._repo = None
|
||||
|
||||
#: Environment paths for concrete (lockfile) included environments
|
||||
self.included_concrete_envs: List[str] = []
|
||||
#: First-level included concretized spec data from/to the lockfile.
|
||||
self.included_concrete_spec_data: Dict[str, Dict[str, List[str]]] = {}
|
||||
#: User specs from included environments from the last concretization
|
||||
self.included_concretized_user_specs: Dict[str, List[Spec]] = {}
|
||||
#: Roots from included environments with the last concretization, in order
|
||||
self.included_concretized_order: Dict[str, List[str]] = {}
|
||||
#: Concretized specs by hash from the included environments
|
||||
self.included_specs_by_hash: Dict[str, Dict[str, Spec]] = {}
|
||||
|
||||
#: Previously active environment
|
||||
self._previous_active = None
|
||||
self._dev_specs = None
|
||||
@@ -947,7 +858,7 @@ def _read(self):
|
||||
|
||||
if os.path.exists(self.lock_path):
|
||||
with open(self.lock_path) as f:
|
||||
read_lock_version = self._read_lockfile(f)["_meta"]["lockfile-version"]
|
||||
read_lock_version = self._read_lockfile(f)
|
||||
|
||||
if read_lock_version == 1:
|
||||
tty.debug(f"Storing backup of {self.lock_path} at {self._lock_backup_v1_path}")
|
||||
@@ -1015,20 +926,6 @@ def add_view(name, values):
|
||||
if self.views == dict():
|
||||
self.views[default_view_name] = ViewDescriptor(self.path, self.view_path_default)
|
||||
|
||||
def _process_concrete_includes(self):
|
||||
"""Extract and load into memory included concrete spec data."""
|
||||
self.included_concrete_envs = self.manifest[TOP_LEVEL_KEY].get(included_concrete_name, [])
|
||||
|
||||
if self.included_concrete_envs:
|
||||
if os.path.exists(self.lock_path):
|
||||
with open(self.lock_path) as f:
|
||||
data = self._read_lockfile(f)
|
||||
|
||||
if included_concrete_name in data:
|
||||
self.included_concrete_spec_data = data[included_concrete_name]
|
||||
else:
|
||||
self.include_concrete_envs()
|
||||
|
||||
def _construct_state_from_manifest(self):
|
||||
"""Set up user specs and views from the manifest file."""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
@@ -1045,31 +942,6 @@ def _construct_state_from_manifest(self):
|
||||
self.spec_lists[user_speclist_name] = user_specs
|
||||
|
||||
self._process_view(spack.config.get("view", True))
|
||||
self._process_concrete_includes()
|
||||
|
||||
def all_concretized_user_specs(self) -> List[Spec]:
|
||||
"""Returns all of the concretized user specs of the environment and
|
||||
its included environment(s)."""
|
||||
concretized_user_specs = self.concretized_user_specs[:]
|
||||
for included_specs in self.included_concretized_user_specs.values():
|
||||
for included in included_specs:
|
||||
# Don't duplicate included spec(s)
|
||||
if included not in concretized_user_specs:
|
||||
concretized_user_specs.append(included)
|
||||
|
||||
return concretized_user_specs
|
||||
|
||||
def all_concretized_orders(self) -> List[str]:
|
||||
"""Returns all of the concretized order of the environment and
|
||||
its included environment(s)."""
|
||||
concretized_order = self.concretized_order[:]
|
||||
for included_concretized_order in self.included_concretized_order.values():
|
||||
for included in included_concretized_order:
|
||||
# Don't duplicate included spec(s)
|
||||
if included not in concretized_order:
|
||||
concretized_order.append(included)
|
||||
|
||||
return concretized_order
|
||||
|
||||
@property
|
||||
def user_specs(self):
|
||||
@@ -1094,26 +966,6 @@ def _read_dev_specs(self):
|
||||
dev_specs[name] = local_entry
|
||||
return dev_specs
|
||||
|
||||
@property
|
||||
def included_user_specs(self) -> SpecList:
|
||||
"""Included concrete user (or root) specs from last concretization."""
|
||||
spec_list = SpecList()
|
||||
|
||||
if not self.included_concrete_envs:
|
||||
return spec_list
|
||||
|
||||
def add_root_specs(included_concrete_specs):
|
||||
# add specs from the include *and* any nested includes it may have
|
||||
for env, info in included_concrete_specs.items():
|
||||
for root_list in info["roots"]:
|
||||
spec_list.add(root_list["spec"])
|
||||
|
||||
if "include_concrete" in info:
|
||||
add_root_specs(info["include_concrete"])
|
||||
|
||||
add_root_specs(self.included_concrete_spec_data)
|
||||
return spec_list
|
||||
|
||||
def clear(self, re_read=False):
|
||||
"""Clear the contents of the environment
|
||||
|
||||
@@ -1125,15 +977,9 @@ def clear(self, re_read=False):
|
||||
self.spec_lists[user_speclist_name] = SpecList()
|
||||
|
||||
self._dev_specs = {}
|
||||
self.concretized_order = [] # roots of last concretize, in order
|
||||
self.concretized_user_specs = [] # user specs from last concretize
|
||||
self.concretized_order = [] # roots of last concretize, in order
|
||||
self.specs_by_hash = {} # concretized specs by hash
|
||||
|
||||
self.included_concrete_spec_data = {} # concretized specs from lockfile of included envs
|
||||
self.included_concretized_order = {} # root specs of the included envs, keyed by env path
|
||||
self.included_concretized_user_specs = {} # user specs from last concretize's included env
|
||||
self.included_specs_by_hash = {} # concretized specs by hash from the included envs
|
||||
|
||||
self.invalidate_repository_cache()
|
||||
self._previous_active = None # previously active environment
|
||||
if not re_read:
|
||||
@@ -1187,55 +1033,6 @@ def scope_name(self):
|
||||
"""Name of the config scope of this environment's manifest file."""
|
||||
return self.manifest.scope_name
|
||||
|
||||
def include_concrete_envs(self):
|
||||
"""Copy and save the included envs' specs internally"""
|
||||
|
||||
lockfile_meta = None
|
||||
root_hash_seen = set()
|
||||
concrete_hash_seen = set()
|
||||
self.included_concrete_spec_data = {}
|
||||
|
||||
for env_path in self.included_concrete_envs:
|
||||
# Check that environment exists
|
||||
if not is_env_dir(env_path):
|
||||
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
|
||||
|
||||
env = Environment(env_path)
|
||||
|
||||
with open(env.lock_path) as f:
|
||||
lockfile_as_dict = env._read_lockfile(f)
|
||||
|
||||
# Lockfile_meta must match each env and use at least format version 5
|
||||
if lockfile_meta is None:
|
||||
lockfile_meta = lockfile_as_dict["_meta"]
|
||||
elif lockfile_meta != lockfile_as_dict["_meta"]:
|
||||
raise SpackEnvironmentError("All lockfile _meta values must match")
|
||||
elif lockfile_meta["lockfile-version"] < 5:
|
||||
raise SpackEnvironmentError("The lockfile format must be at version 5 or higher")
|
||||
|
||||
# Copy unique root specs from env
|
||||
self.included_concrete_spec_data[env_path] = {"roots": []}
|
||||
for root_dict in lockfile_as_dict["roots"]:
|
||||
if root_dict["hash"] not in root_hash_seen:
|
||||
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
|
||||
root_hash_seen.add(root_dict["hash"])
|
||||
|
||||
# Copy unique concrete specs from env
|
||||
for concrete_spec in lockfile_as_dict["concrete_specs"]:
|
||||
if concrete_spec not in concrete_hash_seen:
|
||||
self.included_concrete_spec_data[env_path].update(
|
||||
{"concrete_specs": lockfile_as_dict["concrete_specs"]}
|
||||
)
|
||||
concrete_hash_seen.add(concrete_spec)
|
||||
|
||||
if "include_concrete" in lockfile_as_dict.keys():
|
||||
self.included_concrete_spec_data[env_path]["include_concrete"] = lockfile_as_dict[
|
||||
"include_concrete"
|
||||
]
|
||||
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
self.write()
|
||||
|
||||
def destroy(self):
|
||||
"""Remove this environment from Spack entirely."""
|
||||
shutil.rmtree(self.path)
|
||||
@@ -1435,10 +1232,6 @@ def concretize(self, force=False, tests=False):
|
||||
for spec in set(self.concretized_user_specs) - set(self.user_specs):
|
||||
self.deconcretize(spec, concrete=False)
|
||||
|
||||
# If a combined env, check updated spec is in the linked envs
|
||||
if self.included_concrete_envs:
|
||||
self.include_concrete_envs()
|
||||
|
||||
# Pick the right concretization strategy
|
||||
if self.unify == "when_possible":
|
||||
return self._concretize_together_where_possible(tests=tests)
|
||||
@@ -1622,7 +1415,7 @@ def _concretize_separately(self, tests=False):
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
if spack.config.get("config:concretizer", "clingo") == "clingo":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
|
||||
# Ensure all the indexes have been built or updated, since
|
||||
# otherwise the processes in the pool may timeout on waiting
|
||||
@@ -1911,14 +1704,8 @@ def _partition_roots_by_install_status(self):
|
||||
of per spec."""
|
||||
installed, uninstalled = [], []
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
for concretized_hash in self.all_concretized_orders():
|
||||
if concretized_hash in self.specs_by_hash:
|
||||
spec = self.specs_by_hash[concretized_hash]
|
||||
else:
|
||||
for env_path in self.included_specs_by_hash.keys():
|
||||
if concretized_hash in self.included_specs_by_hash[env_path]:
|
||||
spec = self.included_specs_by_hash[env_path][concretized_hash]
|
||||
break
|
||||
for concretized_hash in self.concretized_order:
|
||||
spec = self.specs_by_hash[concretized_hash]
|
||||
if not spec.installed or (
|
||||
spec.satisfies("dev_path=*") or spec.satisfies("^dev_path=*")
|
||||
):
|
||||
@@ -1998,14 +1785,8 @@ def added_specs(self):
|
||||
|
||||
def concretized_specs(self):
|
||||
"""Tuples of (user spec, concrete spec) for all concrete specs."""
|
||||
for s, h in zip(self.all_concretized_user_specs(), self.all_concretized_orders()):
|
||||
if h in self.specs_by_hash:
|
||||
yield (s, self.specs_by_hash[h])
|
||||
else:
|
||||
for env_path in self.included_specs_by_hash.keys():
|
||||
if h in self.included_specs_by_hash[env_path]:
|
||||
yield (s, self.included_specs_by_hash[env_path][h])
|
||||
break
|
||||
for s, h in zip(self.concretized_user_specs, self.concretized_order):
|
||||
yield (s, self.specs_by_hash[h])
|
||||
|
||||
def concrete_roots(self):
|
||||
"""Same as concretized_specs, except it returns the list of concrete
|
||||
@@ -2134,7 +1915,8 @@ def _get_environment_specs(self, recurse_dependencies=True):
|
||||
If these specs appear under different user_specs, only one copy
|
||||
is added to the list returned.
|
||||
"""
|
||||
specs = [self.specs_by_hash[h] for h in self.all_concretized_orders()]
|
||||
specs = [self.specs_by_hash[h] for h in self.concretized_order]
|
||||
|
||||
if recurse_dependencies:
|
||||
specs.extend(
|
||||
traverse.traverse_nodes(
|
||||
@@ -2179,76 +1961,31 @@ def _to_lockfile_dict(self):
|
||||
"concrete_specs": concrete_specs,
|
||||
}
|
||||
|
||||
if self.included_concrete_envs:
|
||||
data[included_concrete_name] = self.included_concrete_spec_data
|
||||
|
||||
return data
|
||||
|
||||
def _read_lockfile(self, file_or_json):
|
||||
"""Read a lockfile from a file or from a raw string."""
|
||||
lockfile_dict = sjson.load(file_or_json)
|
||||
self._read_lockfile_dict(lockfile_dict)
|
||||
return lockfile_dict
|
||||
|
||||
def set_included_concretized_user_specs(
|
||||
self,
|
||||
env_name: str,
|
||||
env_info: Dict[str, Dict[str, Any]],
|
||||
included_json_specs_by_hash: Dict[str, Dict[str, Any]],
|
||||
) -> Dict[str, Dict[str, Any]]:
|
||||
"""Sets all of the concretized user specs from included environments
|
||||
to include those from nested included environments.
|
||||
|
||||
Args:
|
||||
env_name: the name (technically the path) of the included environment
|
||||
env_info: included concrete environment data
|
||||
included_json_specs_by_hash: concrete spec data keyed by hash
|
||||
|
||||
Returns: updated specs_by_hash
|
||||
"""
|
||||
self.included_concretized_order[env_name] = []
|
||||
self.included_concretized_user_specs[env_name] = []
|
||||
|
||||
def add_specs(name, info, specs_by_hash):
|
||||
# Add specs from the environment as well as any of its nested
|
||||
# environments.
|
||||
for root_info in info["roots"]:
|
||||
self.included_concretized_order[name].append(root_info["hash"])
|
||||
self.included_concretized_user_specs[name].append(Spec(root_info["spec"]))
|
||||
if "concrete_specs" in info:
|
||||
specs_by_hash.update(info["concrete_specs"])
|
||||
|
||||
if included_concrete_name in info:
|
||||
for included_name, included_info in info[included_concrete_name].items():
|
||||
if included_name not in self.included_concretized_order:
|
||||
self.included_concretized_order[included_name] = []
|
||||
self.included_concretized_user_specs[included_name] = []
|
||||
add_specs(included_name, included_info, specs_by_hash)
|
||||
|
||||
add_specs(env_name, env_info, included_json_specs_by_hash)
|
||||
return included_json_specs_by_hash
|
||||
return lockfile_dict["_meta"]["lockfile-version"]
|
||||
|
||||
def _read_lockfile_dict(self, d):
|
||||
"""Read a lockfile dictionary into this environment."""
|
||||
self.specs_by_hash = {}
|
||||
self.included_specs_by_hash = {}
|
||||
self.included_concretized_user_specs = {}
|
||||
self.included_concretized_order = {}
|
||||
|
||||
roots = d["roots"]
|
||||
self.concretized_user_specs = [Spec(r["spec"]) for r in roots]
|
||||
self.concretized_order = [r["hash"] for r in roots]
|
||||
json_specs_by_hash = d["concrete_specs"]
|
||||
included_json_specs_by_hash = {}
|
||||
|
||||
if included_concrete_name in d:
|
||||
for env_name, env_info in d[included_concrete_name].items():
|
||||
included_json_specs_by_hash.update(
|
||||
self.set_included_concretized_user_specs(
|
||||
env_name, env_info, included_json_specs_by_hash
|
||||
)
|
||||
)
|
||||
# Track specs by their lockfile key. Currently spack uses the finest
|
||||
# grained hash as the lockfile key, while older formats used the build
|
||||
# hash or a previous incarnation of the DAG hash (one that did not
|
||||
# include build deps or package hash).
|
||||
specs_by_hash = {}
|
||||
|
||||
# Track specs by their DAG hash, allows handling DAG hash collisions
|
||||
first_seen = {}
|
||||
current_lockfile_format = d["_meta"]["lockfile-version"]
|
||||
try:
|
||||
reader = READER_CLS[current_lockfile_format]
|
||||
@@ -2261,39 +1998,6 @@ def _read_lockfile_dict(self, d):
|
||||
msg += " You need to use a newer Spack version."
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
first_seen, self.concretized_order = self.filter_specs(
|
||||
reader, json_specs_by_hash, self.concretized_order
|
||||
)
|
||||
|
||||
for spec_dag_hash in self.concretized_order:
|
||||
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
|
||||
|
||||
if any(self.included_concretized_order.values()):
|
||||
first_seen = {}
|
||||
|
||||
for env_name, concretized_order in self.included_concretized_order.items():
|
||||
filtered_spec, self.included_concretized_order[env_name] = self.filter_specs(
|
||||
reader, included_json_specs_by_hash, concretized_order
|
||||
)
|
||||
first_seen.update(filtered_spec)
|
||||
|
||||
for env_path, spec_hashes in self.included_concretized_order.items():
|
||||
self.included_specs_by_hash[env_path] = {}
|
||||
for spec_dag_hash in spec_hashes:
|
||||
self.included_specs_by_hash[env_path].update(
|
||||
{spec_dag_hash: first_seen[spec_dag_hash]}
|
||||
)
|
||||
|
||||
def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||
# Track specs by their lockfile key. Currently spack uses the finest
|
||||
# grained hash as the lockfile key, while older formats used the build
|
||||
# hash or a previous incarnation of the DAG hash (one that did not
|
||||
# include build deps or package hash).
|
||||
specs_by_hash = {}
|
||||
|
||||
# Track specs by their DAG hash, allows handling DAG hash collisions
|
||||
first_seen = {}
|
||||
|
||||
# First pass: Put each spec in the map ignoring dependencies
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
spec = reader.from_node_dict(node_dict)
|
||||
@@ -2316,8 +2020,7 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||
# keep. This is only required as long as we support older lockfile
|
||||
# formats where the mapping from DAG hash to lockfile key is possibly
|
||||
# one-to-many.
|
||||
|
||||
for lockfile_key in order_concretized:
|
||||
for lockfile_key in self.concretized_order:
|
||||
for s in specs_by_hash[lockfile_key].traverse():
|
||||
if s.dag_hash() not in first_seen:
|
||||
first_seen[s.dag_hash()] = s
|
||||
@@ -2325,10 +2028,12 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||
# Now make sure concretized_order and our internal specs dict
|
||||
# contains the keys used by modern spack (i.e. the dag_hash
|
||||
# that includes build deps and package hash).
|
||||
self.concretized_order = [
|
||||
specs_by_hash[h_key].dag_hash() for h_key in self.concretized_order
|
||||
]
|
||||
|
||||
order_concretized = [specs_by_hash[h_key].dag_hash() for h_key in order_concretized]
|
||||
|
||||
return first_seen, order_concretized
|
||||
for spec_dag_hash in self.concretized_order:
|
||||
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
|
||||
|
||||
def write(self, regenerate: bool = True) -> None:
|
||||
"""Writes an in-memory environment to its location on disk.
|
||||
@@ -2341,7 +2046,7 @@ def write(self, regenerate: bool = True) -> None:
|
||||
regenerate: regenerate views and run post-write hooks as well as writing if True.
|
||||
"""
|
||||
self.manifest_uptodate_or_warn()
|
||||
if self.specs_by_hash or self.included_concrete_envs:
|
||||
if self.specs_by_hash:
|
||||
self.ensure_env_directory_exists(dot_env=True)
|
||||
self.update_environment_repository()
|
||||
self.manifest.flush()
|
||||
@@ -2840,19 +2545,6 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
self.changed = True
|
||||
|
||||
def set_include_concrete(self, include_concrete: List[str]) -> None:
|
||||
"""Sets the included concrete environments in the manifest to the value(s) passed as input.
|
||||
|
||||
Args:
|
||||
include_concrete: list of already existing concrete environments to include
|
||||
"""
|
||||
self.pristine_configuration[included_concrete_name] = []
|
||||
|
||||
for env_path in include_concrete:
|
||||
self.pristine_configuration[included_concrete_name].append(env_path)
|
||||
|
||||
self.changed = True
|
||||
|
||||
def add_definition(self, user_spec: str, list_name: str) -> None:
|
||||
"""Appends a user spec to the first active definition matching the name passed as argument.
|
||||
|
||||
@@ -3036,56 +2728,54 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
for i, config_path in enumerate(reversed(includes)):
|
||||
# allow paths to contain spack config/environment variables, etc.
|
||||
config_path = substitute_path_variables(config_path)
|
||||
|
||||
include_url = urllib.parse.urlparse(config_path)
|
||||
|
||||
# If scheme is not valid, config_path is not a url
|
||||
# of a type Spack is generally aware
|
||||
if spack.util.url.validate_scheme(include_url.scheme):
|
||||
# Transform file:// URLs to direct includes.
|
||||
if include_url.scheme == "file":
|
||||
config_path = urllib.request.url2pathname(include_url.path)
|
||||
# Transform file:// URLs to direct includes.
|
||||
if include_url.scheme == "file":
|
||||
config_path = urllib.request.url2pathname(include_url.path)
|
||||
|
||||
# Any other URL should be fetched.
|
||||
elif include_url.scheme in ("http", "https", "ftp"):
|
||||
# Stage any remote configuration file(s)
|
||||
staged_configs = (
|
||||
os.listdir(self.config_stage_dir)
|
||||
if os.path.exists(self.config_stage_dir)
|
||||
else []
|
||||
# Any other URL should be fetched.
|
||||
elif include_url.scheme in ("http", "https", "ftp"):
|
||||
# Stage any remote configuration file(s)
|
||||
staged_configs = (
|
||||
os.listdir(self.config_stage_dir)
|
||||
if os.path.exists(self.config_stage_dir)
|
||||
else []
|
||||
)
|
||||
remote_path = urllib.request.url2pathname(include_url.path)
|
||||
basename = os.path.basename(remote_path)
|
||||
if basename in staged_configs:
|
||||
# Do NOT re-stage configuration files over existing
|
||||
# ones with the same name since there is a risk of
|
||||
# losing changes (e.g., from 'spack config update').
|
||||
tty.warn(
|
||||
"Will not re-stage configuration from {0} to avoid "
|
||||
"losing changes to the already staged file of the "
|
||||
"same name.".format(remote_path)
|
||||
)
|
||||
remote_path = urllib.request.url2pathname(include_url.path)
|
||||
basename = os.path.basename(remote_path)
|
||||
if basename in staged_configs:
|
||||
# Do NOT re-stage configuration files over existing
|
||||
# ones with the same name since there is a risk of
|
||||
# losing changes (e.g., from 'spack config update').
|
||||
tty.warn(
|
||||
"Will not re-stage configuration from {0} to avoid "
|
||||
"losing changes to the already staged file of the "
|
||||
"same name.".format(remote_path)
|
||||
)
|
||||
|
||||
# Recognize the configuration stage directory
|
||||
# is flattened to ensure a single copy of each
|
||||
# configuration file.
|
||||
config_path = self.config_stage_dir
|
||||
if basename.endswith(".yaml"):
|
||||
config_path = os.path.join(config_path, basename)
|
||||
else:
|
||||
staged_path = spack.config.fetch_remote_configs(
|
||||
config_path, str(self.config_stage_dir), skip_existing=True
|
||||
)
|
||||
if not staged_path:
|
||||
raise SpackEnvironmentError(
|
||||
"Unable to fetch remote configuration {0}".format(config_path)
|
||||
)
|
||||
config_path = staged_path
|
||||
|
||||
elif include_url.scheme:
|
||||
raise ValueError(
|
||||
f"Unsupported URL scheme ({include_url.scheme}) for "
|
||||
f"environment include: {config_path}"
|
||||
# Recognize the configuration stage directory
|
||||
# is flattened to ensure a single copy of each
|
||||
# configuration file.
|
||||
config_path = self.config_stage_dir
|
||||
if basename.endswith(".yaml"):
|
||||
config_path = os.path.join(config_path, basename)
|
||||
else:
|
||||
staged_path = spack.config.fetch_remote_configs(
|
||||
config_path, str(self.config_stage_dir), skip_existing=True
|
||||
)
|
||||
if not staged_path:
|
||||
raise SpackEnvironmentError(
|
||||
"Unable to fetch remote configuration {0}".format(config_path)
|
||||
)
|
||||
config_path = staged_path
|
||||
|
||||
elif include_url.scheme:
|
||||
raise ValueError(
|
||||
f"Unsupported URL scheme ({include_url.scheme}) for "
|
||||
f"environment include: {config_path}"
|
||||
)
|
||||
|
||||
# treat relative paths as relative to the environment
|
||||
if not os.path.isabs(config_path):
|
||||
|
@@ -12,10 +12,6 @@
|
||||
def post_install(spec, explicit):
|
||||
# Push package to all buildcaches with autopush==True
|
||||
|
||||
# Do nothing if spec is an external package
|
||||
if spec.external:
|
||||
return
|
||||
|
||||
# Do nothing if package was not installed from source
|
||||
pkg = spec.package
|
||||
if pkg.installed_from_binary_cache:
|
||||
|
@@ -1,8 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
def post_install(spec, explicit=None):
|
||||
spec.package.windows_establish_runtime_linkage()
|
@@ -488,7 +488,6 @@ def _process_binary_cache_tarball(
|
||||
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
||||
pkg.windows_establish_runtime_linkage()
|
||||
|
||||
if hasattr(pkg, "_post_buildcache_install_hook"):
|
||||
pkg._post_buildcache_install_hook()
|
||||
@@ -1699,6 +1698,10 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
|
||||
pkg, build_process, install_args
|
||||
)
|
||||
# Currently this is how RPATH-like behavior is achieved on Windows, after install
|
||||
# establish runtime linkage via Windows Runtime link object
|
||||
# Note: this is a no-op on non Windows platforms
|
||||
pkg.windows_establish_runtime_linkage()
|
||||
# Note: PARENT of the build process adds the new package to
|
||||
# the database, so that we don't need to re-read from file.
|
||||
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
|
||||
|
@@ -427,7 +427,7 @@ def make_argument_parser(**kwargs):
|
||||
parser.add_argument(
|
||||
"--color",
|
||||
action="store",
|
||||
default=None,
|
||||
default=os.environ.get("SPACK_COLOR", "auto"),
|
||||
choices=("always", "never", "auto"),
|
||||
help="when to colorize output (default: auto)",
|
||||
)
|
||||
@@ -622,8 +622,7 @@ def setup_main_options(args):
|
||||
# with color
|
||||
color.try_enable_terminal_color_on_windows()
|
||||
# when to use color (takes always, auto, or never)
|
||||
if args.color is not None:
|
||||
color.set_color_when(args.color)
|
||||
color.set_color_when(args.color)
|
||||
|
||||
|
||||
def allows_unknown_args(command):
|
||||
|
@@ -398,7 +398,7 @@ def create_opener():
|
||||
opener = urllib.request.OpenerDirector()
|
||||
for handler in [
|
||||
urllib.request.UnknownHandler(),
|
||||
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
|
||||
urllib.request.HTTPSHandler(),
|
||||
spack.util.web.SpackHTTPDefaultErrorHandler(),
|
||||
urllib.request.HTTPRedirectHandler(),
|
||||
urllib.request.HTTPErrorProcessor(),
|
||||
@@ -418,27 +418,18 @@ def ensure_status(request: urllib.request.Request, response: HTTPResponse, statu
|
||||
)
|
||||
|
||||
|
||||
def default_retry(f, retries: int = 5, sleep=None):
|
||||
def default_retry(f, retries: int = 3, sleep=None):
|
||||
sleep = sleep or time.sleep
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
for i in range(retries):
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
except (urllib.error.URLError, TimeoutError) as e:
|
||||
except urllib.error.HTTPError as e:
|
||||
# Retry on internal server errors, and rate limit errors
|
||||
# Potentially this could take into account the Retry-After header
|
||||
# if registries support it
|
||||
if i + 1 != retries and (
|
||||
(
|
||||
isinstance(e, urllib.error.HTTPError)
|
||||
and (500 <= e.code < 600 or e.code == 429)
|
||||
)
|
||||
or (
|
||||
isinstance(e, urllib.error.URLError) and isinstance(e.reason, TimeoutError)
|
||||
)
|
||||
or isinstance(e, TimeoutError)
|
||||
):
|
||||
if i + 1 != retries and (500 <= e.code < 600 or e.code == 429):
|
||||
# Exponential backoff
|
||||
sleep(2**i)
|
||||
continue
|
||||
|
@@ -39,7 +39,6 @@
|
||||
)
|
||||
from spack.build_systems.cargo import CargoPackage
|
||||
from spack.build_systems.cmake import CMakePackage, generator
|
||||
from spack.build_systems.compiler import CompilerPackage
|
||||
from spack.build_systems.cuda import CudaPackage
|
||||
from spack.build_systems.generic import Package
|
||||
from spack.build_systems.gnu import GNUMirrorPackage
|
||||
|
@@ -1240,7 +1240,7 @@ def install_test_root(self):
|
||||
"""Return the install test root directory."""
|
||||
tty.warn(
|
||||
"The 'pkg.install_test_root' property is deprecated with removal "
|
||||
"expected v0.23. Use 'install_test_root(pkg)' instead."
|
||||
"expected v0.22. Use 'install_test_root(pkg)' instead."
|
||||
)
|
||||
return install_test_root(self)
|
||||
|
||||
@@ -1898,7 +1898,7 @@ def cache_extra_test_sources(self, srcs):
|
||||
"""
|
||||
msg = (
|
||||
"'pkg.cache_extra_test_sources(srcs) is deprecated with removal "
|
||||
"expected in v0.23. Use 'cache_extra_test_sources(pkg, srcs)' "
|
||||
"expected in v0.22. Use 'cache_extra_test_sources(pkg, srcs)' "
|
||||
"instead."
|
||||
)
|
||||
warnings.warn(msg)
|
||||
@@ -2555,12 +2555,7 @@ class PackageStillNeededError(InstallError):
|
||||
"""Raised when package is still needed by another on uninstall."""
|
||||
|
||||
def __init__(self, spec, dependents):
|
||||
spec_fmt = spack.spec.DEFAULT_FORMAT + " /{hash:7}"
|
||||
dep_fmt = "{name}{@versions} /{hash:7}"
|
||||
super().__init__(
|
||||
f"Cannot uninstall {spec.format(spec_fmt)}, "
|
||||
f"needed by {[dep.format(dep_fmt) for dep in dependents]}"
|
||||
)
|
||||
super().__init__("Cannot uninstall %s" % spec)
|
||||
self.spec = spec
|
||||
self.dependents = dependents
|
||||
|
||||
|
@@ -16,7 +16,7 @@
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
@@ -25,7 +25,6 @@
|
||||
import spack.store
|
||||
import spack.util.elf as elf
|
||||
import spack.util.executable as executable
|
||||
import spack.util.path
|
||||
|
||||
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
|
||||
|
||||
@@ -614,7 +613,7 @@ def relocate_links(links, prefix_to_prefix):
|
||||
"""Relocate links to a new install prefix."""
|
||||
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
|
||||
for link in links:
|
||||
old_target = readlink(link)
|
||||
old_target = os.readlink(link)
|
||||
match = regex.match(old_target)
|
||||
|
||||
# No match.
|
||||
|
@@ -27,7 +27,7 @@
|
||||
from spack.error import SpackError
|
||||
from spack.util.crypto import checksum
|
||||
from spack.util.log_parse import parse_log_events
|
||||
from spack.util.web import ssl_create_default_context
|
||||
from spack.util.web import urllib_ssl_cert_handler
|
||||
|
||||
from .base import Reporter
|
||||
from .extract import extract_test_parts
|
||||
@@ -428,7 +428,7 @@ def upload(self, filename):
|
||||
# Compute md5 checksum for the contents of this file.
|
||||
md5sum = checksum(hashlib.md5, filename, block_size=8192)
|
||||
|
||||
opener = build_opener(HTTPSHandler(context=ssl_create_default_context()))
|
||||
opener = build_opener(HTTPSHandler(context=urllib_ssl_cert_handler()))
|
||||
with open(filename, "rb") as f:
|
||||
params_dict = {
|
||||
"build": self.buildname,
|
||||
|
@@ -9,40 +9,13 @@
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
LIST_OF_SPECS = {"type": "array", "items": {"type": "string"}}
|
||||
|
||||
properties: Dict[str, Any] = {
|
||||
"concretizer": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"reuse": {
|
||||
"oneOf": [
|
||||
{"type": "boolean"},
|
||||
{"type": "string", "enum": ["dependencies"]},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"roots": {"type": "boolean"},
|
||||
"include": LIST_OF_SPECS,
|
||||
"exclude": LIST_OF_SPECS,
|
||||
"from": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": ["local", "buildcache", "external"],
|
||||
},
|
||||
"include": LIST_OF_SPECS,
|
||||
"exclude": LIST_OF_SPECS,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
"oneOf": [{"type": "boolean"}, {"type": "string", "enum": ["dependencies"]}]
|
||||
},
|
||||
"enable_node_namespace": {"type": "boolean"},
|
||||
"targets": {
|
||||
|
@@ -35,7 +35,6 @@
|
||||
{
|
||||
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
"specs": spec_list_schema,
|
||||
"include_concrete": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
},
|
||||
),
|
||||
}
|
||||
|
@@ -141,7 +141,7 @@
|
||||
"deprecatedProperties": {
|
||||
"properties": ["version"],
|
||||
"message": "setting version preferences in the 'all' section of packages.yaml "
|
||||
"is deprecated and will be removed in v0.23\n\n\tThese preferences "
|
||||
"is deprecated and will be removed in v0.22\n\n\tThese preferences "
|
||||
"will be ignored by Spack. You can set them only in package-specific sections "
|
||||
"of the same file.\n",
|
||||
"error": False,
|
||||
@@ -197,7 +197,7 @@
|
||||
"properties": ["target", "compiler", "providers"],
|
||||
"message": "setting 'compiler:', 'target:' or 'provider:' preferences in "
|
||||
"a package-specific section of packages.yaml is deprecated, and will be "
|
||||
"removed in v0.23.\n\n\tThese preferences will be ignored by Spack, and "
|
||||
"removed in v0.22.\n\n\tThese preferences will be ignored by Spack, and "
|
||||
"can be set only in the 'all' section of the same file. "
|
||||
"You can run:\n\n\t\t$ spack audit configs\n\n\tto get better diagnostics, "
|
||||
"including files:lines where the deprecated attributes are used.\n\n"
|
||||
|
@@ -6,7 +6,6 @@
|
||||
import collections.abc
|
||||
import copy
|
||||
import enum
|
||||
import functools
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
@@ -809,22 +808,12 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
||||
A tuple of the solve result, the timer for the different phases of the
|
||||
solve, and the internal statistics from clingo.
|
||||
"""
|
||||
# avoid circular import
|
||||
import spack.bootstrap
|
||||
|
||||
output = output or DEFAULT_OUTPUT_CONFIGURATION
|
||||
timer = spack.util.timer.Timer()
|
||||
|
||||
# Initialize the control object for the solver
|
||||
self.control = control or default_clingo_control()
|
||||
|
||||
# ensure core deps are present on Windows
|
||||
# needs to modify active config scope, so cannot be run within
|
||||
# bootstrap config scope
|
||||
if sys.platform == "win32":
|
||||
tty.debug("Ensuring basic dependencies {win-sdk, wgl} available")
|
||||
spack.bootstrap.core.ensure_winsdk_external_or_raise()
|
||||
|
||||
timer.start("setup")
|
||||
asp_problem = setup.setup(specs, reuse=reuse, allow_deprecated=allow_deprecated)
|
||||
if output.out is not None:
|
||||
@@ -944,26 +933,14 @@ class ConcreteSpecsByHash(collections.abc.Mapping):
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.data: Dict[str, spack.spec.Spec] = {}
|
||||
self.explicit: Set[str] = set()
|
||||
|
||||
def __getitem__(self, dag_hash: str) -> spack.spec.Spec:
|
||||
return self.data[dag_hash]
|
||||
|
||||
def explicit_items(self) -> Iterator[Tuple[str, spack.spec.Spec]]:
|
||||
"""Iterate on items that have been added explicitly, and not just as a dependency
|
||||
of other nodes.
|
||||
"""
|
||||
for h, s in self.items():
|
||||
# We need to make an exception for gcc-runtime, until we can splice it.
|
||||
if h in self.explicit or s.name == "gcc-runtime":
|
||||
yield h, s
|
||||
|
||||
def add(self, spec: spack.spec.Spec) -> bool:
|
||||
"""Adds a new concrete spec to the mapping. Returns True if the spec was just added,
|
||||
False if the spec was already in the mapping.
|
||||
|
||||
Calling this function marks the spec as added explicitly.
|
||||
|
||||
Args:
|
||||
spec: spec to be added
|
||||
|
||||
@@ -978,7 +955,6 @@ def add(self, spec: spack.spec.Spec) -> bool:
|
||||
raise ValueError(msg)
|
||||
|
||||
dag_hash = spec.dag_hash()
|
||||
self.explicit.add(dag_hash)
|
||||
if dag_hash in self.data:
|
||||
return False
|
||||
|
||||
@@ -1245,9 +1221,6 @@ def pkg_rules(self, pkg, tests):
|
||||
|
||||
def trigger_rules(self):
|
||||
"""Flushes all the trigger rules collected so far, and clears the cache."""
|
||||
if not self._trigger_cache:
|
||||
return
|
||||
|
||||
self.gen.h2("Trigger conditions")
|
||||
for name in self._trigger_cache:
|
||||
cache = self._trigger_cache[name]
|
||||
@@ -1261,9 +1234,6 @@ def trigger_rules(self):
|
||||
|
||||
def effect_rules(self):
|
||||
"""Flushes all the effect rules collected so far, and clears the cache."""
|
||||
if not self._effect_cache:
|
||||
return
|
||||
|
||||
self.gen.h2("Imposed requirements")
|
||||
for name in self._effect_cache:
|
||||
cache = self._effect_cache[name]
|
||||
@@ -1426,6 +1396,7 @@ def condition(
|
||||
raise ValueError(f"Must provide a name for anonymous condition: '{required_spec}'")
|
||||
|
||||
with spec_with_name(required_spec, name):
|
||||
|
||||
# Check if we can emit the requirements before updating the condition ID counter.
|
||||
# In this way, if a condition can't be emitted but the exception is handled in the
|
||||
# caller, we won't emit partial facts.
|
||||
@@ -1643,31 +1614,6 @@ def external_packages(self):
|
||||
packages_yaml = _external_config_with_implicit_externals(spack.config.CONFIG)
|
||||
|
||||
self.gen.h1("External packages")
|
||||
spec_filters = []
|
||||
concretizer_yaml = spack.config.get("concretizer")
|
||||
reuse_yaml = concretizer_yaml.get("reuse")
|
||||
if isinstance(reuse_yaml, typing.Mapping):
|
||||
default_include = reuse_yaml.get("include", [])
|
||||
default_exclude = reuse_yaml.get("exclude", [])
|
||||
libc_externals = list(all_libcs())
|
||||
for source in reuse_yaml.get("from", []):
|
||||
if source["type"] != "external":
|
||||
continue
|
||||
|
||||
include = source.get("include", default_include)
|
||||
if include:
|
||||
# Since libcs are implicit externals, we need to implicitly include them
|
||||
include = include + libc_externals
|
||||
exclude = source.get("exclude", default_exclude)
|
||||
spec_filters.append(
|
||||
SpecFilter(
|
||||
factory=lambda: [],
|
||||
is_usable=lambda x: True,
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
)
|
||||
)
|
||||
|
||||
for pkg_name, data in packages_yaml.items():
|
||||
if pkg_name == "all":
|
||||
continue
|
||||
@@ -1676,6 +1622,7 @@ def external_packages(self):
|
||||
if pkg_name not in spack.repo.PATH:
|
||||
continue
|
||||
|
||||
self.gen.h2("External package: {0}".format(pkg_name))
|
||||
# Check if the external package is buildable. If it is
|
||||
# not then "external(<pkg>)" is a fact, unless we can
|
||||
# reuse an already installed spec.
|
||||
@@ -1685,17 +1632,7 @@ def external_packages(self):
|
||||
|
||||
# Read a list of all the specs for this package
|
||||
externals = data.get("externals", [])
|
||||
candidate_specs = [
|
||||
spack.spec.parse_with_version_concrete(x["spec"]) for x in externals
|
||||
]
|
||||
|
||||
external_specs = []
|
||||
if spec_filters:
|
||||
for current_filter in spec_filters:
|
||||
current_filter.factory = lambda: candidate_specs
|
||||
external_specs.extend(current_filter.selected_specs())
|
||||
else:
|
||||
external_specs.extend(candidate_specs)
|
||||
external_specs = [spack.spec.parse_with_version_concrete(x["spec"]) for x in externals]
|
||||
|
||||
# Order the external versions to prefer more recent versions
|
||||
# even if specs in packages.yaml are not ordered that way
|
||||
@@ -2089,7 +2026,7 @@ def _supported_targets(self, compiler_name, compiler_version, targets):
|
||||
try:
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore")
|
||||
target.optimization_flags(compiler_name, str(compiler_version))
|
||||
target.optimization_flags(compiler_name, compiler_version)
|
||||
supported.append(target)
|
||||
except archspec.cpu.UnsupportedMicroarchitecture:
|
||||
continue
|
||||
@@ -2366,7 +2303,7 @@ def register_concrete_spec(self, spec, possible):
|
||||
|
||||
def concrete_specs(self):
|
||||
"""Emit facts for reusable specs"""
|
||||
for h, spec in self.reusable_and_possible.explicit_items():
|
||||
for h, spec in self.reusable_and_possible.items():
|
||||
# this indicates that there is a spec like this installed
|
||||
self.gen.fact(fn.installed_hash(spec.name, h))
|
||||
# this describes what constraints it imposes on the solve
|
||||
@@ -3250,16 +3187,13 @@ class SpecBuilder:
|
||||
r"^.*_propagate$",
|
||||
r"^.*_satisfies$",
|
||||
r"^.*_set$",
|
||||
r"^compatible_libc$",
|
||||
r"^dependency_holds$",
|
||||
r"^external_conditions_hold$",
|
||||
r"^node_compiler$",
|
||||
r"^package_hash$",
|
||||
r"^root$",
|
||||
r"^track_dependencies$",
|
||||
r"^variant_default_value_from_cli$",
|
||||
r"^virtual_node$",
|
||||
r"^virtual_on_incoming_edges$",
|
||||
r"^virtual_root$",
|
||||
]
|
||||
)
|
||||
@@ -3630,165 +3564,25 @@ def _has_runtime_dependencies(spec: spack.spec.Spec) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
class SpecFilter:
|
||||
"""Given a method to produce a list of specs, this class can filter them according to
|
||||
different criteria.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
factory: Callable[[], List[spack.spec.Spec]],
|
||||
is_usable: Callable[[spack.spec.Spec], bool],
|
||||
include: List[str],
|
||||
exclude: List[str],
|
||||
) -> None:
|
||||
"""
|
||||
Args:
|
||||
factory: factory to produce a list of specs
|
||||
is_usable: predicate that takes a spec in input and returns False if the spec
|
||||
should not be considered for this filter, True otherwise.
|
||||
include: if present, a "good" spec must match at least one entry in the list
|
||||
exclude: if present, a "good" spec must not match any entry in the list
|
||||
"""
|
||||
self.factory = factory
|
||||
self.is_usable = is_usable
|
||||
self.include = include
|
||||
self.exclude = exclude
|
||||
|
||||
def is_selected(self, s: spack.spec.Spec) -> bool:
|
||||
if not self.is_usable(s):
|
||||
return False
|
||||
|
||||
if self.include and not any(s.satisfies(c) for c in self.include):
|
||||
return False
|
||||
|
||||
if self.exclude and any(s.satisfies(c) for c in self.exclude):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def selected_specs(self) -> List[spack.spec.Spec]:
|
||||
return [s for s in self.factory() if self.is_selected(s)]
|
||||
|
||||
@staticmethod
|
||||
def from_store(configuration, include, exclude) -> "SpecFilter":
|
||||
"""Constructs a filter that takes the specs from the current store."""
|
||||
packages = _external_config_with_implicit_externals(configuration)
|
||||
is_reusable = functools.partial(_is_reusable, packages=packages, local=True)
|
||||
factory = functools.partial(_specs_from_store, configuration=configuration)
|
||||
return SpecFilter(factory=factory, is_usable=is_reusable, include=include, exclude=exclude)
|
||||
|
||||
@staticmethod
|
||||
def from_buildcache(configuration, include, exclude) -> "SpecFilter":
|
||||
"""Constructs a filter that takes the specs from the configured buildcaches."""
|
||||
packages = _external_config_with_implicit_externals(configuration)
|
||||
is_reusable = functools.partial(_is_reusable, packages=packages, local=False)
|
||||
return SpecFilter(
|
||||
factory=_specs_from_mirror, is_usable=is_reusable, include=include, exclude=exclude
|
||||
)
|
||||
|
||||
|
||||
def _specs_from_store(configuration):
|
||||
store = spack.store.create(configuration)
|
||||
with store.db.read_transaction():
|
||||
return store.db.query(installed=True)
|
||||
|
||||
|
||||
def _specs_from_mirror():
|
||||
try:
|
||||
return spack.binary_distribution.update_cache_and_get_specs()
|
||||
except (spack.binary_distribution.FetchCacheError, IndexError):
|
||||
# this is raised when no mirrors had indices.
|
||||
# TODO: update mirror configuration so it can indicate that the
|
||||
# TODO: source cache (or any mirror really) doesn't have binaries.
|
||||
return []
|
||||
|
||||
|
||||
class ReuseStrategy(enum.Enum):
|
||||
ROOTS = enum.auto()
|
||||
DEPENDENCIES = enum.auto()
|
||||
NONE = enum.auto()
|
||||
|
||||
|
||||
class ReusableSpecsSelector:
|
||||
"""Selects specs that can be reused during concretization."""
|
||||
|
||||
def __init__(self, configuration: spack.config.Configuration) -> None:
|
||||
self.configuration = configuration
|
||||
self.store = spack.store.create(configuration)
|
||||
self.reuse_strategy = ReuseStrategy.ROOTS
|
||||
|
||||
reuse_yaml = self.configuration.get("concretizer:reuse", False)
|
||||
self.reuse_sources = []
|
||||
if not isinstance(reuse_yaml, typing.Mapping):
|
||||
if reuse_yaml is False:
|
||||
self.reuse_strategy = ReuseStrategy.NONE
|
||||
if reuse_yaml == "dependencies":
|
||||
self.reuse_strategy = ReuseStrategy.DEPENDENCIES
|
||||
self.reuse_sources.extend(
|
||||
[
|
||||
SpecFilter.from_store(
|
||||
configuration=self.configuration, include=[], exclude=[]
|
||||
),
|
||||
SpecFilter.from_buildcache(
|
||||
configuration=self.configuration, include=[], exclude=[]
|
||||
),
|
||||
]
|
||||
)
|
||||
else:
|
||||
roots = reuse_yaml.get("roots", True)
|
||||
if roots is True:
|
||||
self.reuse_strategy = ReuseStrategy.ROOTS
|
||||
else:
|
||||
self.reuse_strategy = ReuseStrategy.DEPENDENCIES
|
||||
default_include = reuse_yaml.get("include", [])
|
||||
default_exclude = reuse_yaml.get("exclude", [])
|
||||
default_sources = [{"type": "local"}, {"type": "buildcache"}]
|
||||
for source in reuse_yaml.get("from", default_sources):
|
||||
include = source.get("include", default_include)
|
||||
exclude = source.get("exclude", default_exclude)
|
||||
if source["type"] == "local":
|
||||
self.reuse_sources.append(
|
||||
SpecFilter.from_store(self.configuration, include=include, exclude=exclude)
|
||||
)
|
||||
elif source["type"] == "buildcache":
|
||||
self.reuse_sources.append(
|
||||
SpecFilter.from_buildcache(
|
||||
self.configuration, include=include, exclude=exclude
|
||||
)
|
||||
)
|
||||
|
||||
def reusable_specs(self, specs: List[spack.spec.Spec]) -> List[spack.spec.Spec]:
|
||||
if self.reuse_strategy == ReuseStrategy.NONE:
|
||||
return []
|
||||
|
||||
result = []
|
||||
for reuse_source in self.reuse_sources:
|
||||
result.extend(reuse_source.selected_specs())
|
||||
|
||||
# If we only want to reuse dependencies, remove the root specs
|
||||
if self.reuse_strategy == ReuseStrategy.DEPENDENCIES:
|
||||
result = [spec for spec in result if not any(root in spec for root in specs)]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class Solver:
|
||||
"""This is the main external interface class for solving.
|
||||
|
||||
It manages solver configuration and preferences in one place. It sets up the solve
|
||||
and passes the setup method to the driver, as well.
|
||||
|
||||
Properties of interest:
|
||||
|
||||
``reuse (bool)``
|
||||
Whether to try to reuse existing installs/binaries
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.driver = PyclingoDriver()
|
||||
self.selector = ReusableSpecsSelector(configuration=spack.config.CONFIG)
|
||||
if spack.platforms.host().name == "cray":
|
||||
msg = (
|
||||
"The Cray platform, i.e. 'platform=cray', will be removed in Spack v0.23. "
|
||||
"All Cray machines will be then detected as 'platform=linux'."
|
||||
)
|
||||
warnings.warn(msg)
|
||||
|
||||
# These properties are settable via spack configuration, and overridable
|
||||
# by setting them directly as properties.
|
||||
self.reuse = spack.config.get("concretizer:reuse", True)
|
||||
|
||||
@staticmethod
|
||||
def _check_input_and_extract_concrete_specs(specs):
|
||||
@@ -3802,6 +3596,39 @@ def _check_input_and_extract_concrete_specs(specs):
|
||||
spack.spec.Spec.ensure_valid_variants(s)
|
||||
return reusable
|
||||
|
||||
def _reusable_specs(self, specs):
|
||||
reusable_specs = []
|
||||
if self.reuse:
|
||||
packages = _external_config_with_implicit_externals(spack.config.CONFIG)
|
||||
# Specs from the local Database
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
reusable_specs.extend(
|
||||
s
|
||||
for s in spack.store.STORE.db.query(installed=True)
|
||||
if _is_reusable(s, packages, local=True)
|
||||
)
|
||||
|
||||
# Specs from buildcaches
|
||||
try:
|
||||
reusable_specs.extend(
|
||||
s
|
||||
for s in spack.binary_distribution.update_cache_and_get_specs()
|
||||
if _is_reusable(s, packages, local=False)
|
||||
)
|
||||
except (spack.binary_distribution.FetchCacheError, IndexError):
|
||||
# this is raised when no mirrors had indices.
|
||||
# TODO: update mirror configuration so it can indicate that the
|
||||
# TODO: source cache (or any mirror really) doesn't have binaries.
|
||||
pass
|
||||
|
||||
# If we only want to reuse dependencies, remove the root specs
|
||||
if self.reuse == "dependencies":
|
||||
reusable_specs = [
|
||||
spec for spec in reusable_specs if not any(root in spec for root in specs)
|
||||
]
|
||||
|
||||
return reusable_specs
|
||||
|
||||
def solve(
|
||||
self,
|
||||
specs,
|
||||
@@ -3827,7 +3654,7 @@ def solve(
|
||||
# Check upfront that the variants are admissible
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self.selector.reusable_specs(specs))
|
||||
reusable_specs.extend(self._reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
||||
result, _, _ = self.driver.solve(
|
||||
@@ -3856,7 +3683,7 @@ def solve_in_rounds(
|
||||
"""
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self.selector.reusable_specs(specs))
|
||||
reusable_specs.extend(self._reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
|
||||
# Tell clingo that we don't have to solve all the inputs at once
|
||||
|
@@ -1424,7 +1424,6 @@ opt_criterion(73, "deprecated versions used").
|
||||
#minimize{
|
||||
1@73+Priority,PackageNode
|
||||
: attr("deprecated", PackageNode, _),
|
||||
not external(PackageNode),
|
||||
build_priority(PackageNode, Priority)
|
||||
}.
|
||||
|
||||
|
@@ -195,7 +195,7 @@ def _bootstrap_clingo() -> ModuleType:
|
||||
import spack.bootstrap
|
||||
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
clingo_mod = importlib.import_module("clingo")
|
||||
|
||||
return clingo_mod
|
||||
|
@@ -1030,13 +1030,16 @@ def clear(self):
|
||||
self.edges.clear()
|
||||
|
||||
|
||||
def _command_default_handler(spec: "Spec"):
|
||||
def _command_default_handler(descriptor, spec, cls):
|
||||
"""Default handler when looking for the 'command' attribute.
|
||||
|
||||
Tries to search for ``spec.name`` in the ``spec.home.bin`` directory.
|
||||
|
||||
Parameters:
|
||||
spec: spec that is being queried
|
||||
descriptor (ForwardQueryToPackage): descriptor that triggered the call
|
||||
spec (Spec): spec that is being queried
|
||||
cls (type(spec)): type of spec, to match the signature of the
|
||||
descriptor ``__get__`` method
|
||||
|
||||
Returns:
|
||||
Executable: An executable of the command
|
||||
@@ -1049,17 +1052,22 @@ def _command_default_handler(spec: "Spec"):
|
||||
|
||||
if fs.is_exe(path):
|
||||
return spack.util.executable.Executable(path)
|
||||
raise RuntimeError(f"Unable to locate {spec.name} command in {home.bin}")
|
||||
else:
|
||||
msg = "Unable to locate {0} command in {1}"
|
||||
raise RuntimeError(msg.format(spec.name, home.bin))
|
||||
|
||||
|
||||
def _headers_default_handler(spec: "Spec"):
|
||||
def _headers_default_handler(descriptor, spec, cls):
|
||||
"""Default handler when looking for the 'headers' attribute.
|
||||
|
||||
Tries to search for ``*.h`` files recursively starting from
|
||||
``spec.package.home.include``.
|
||||
|
||||
Parameters:
|
||||
spec: spec that is being queried
|
||||
descriptor (ForwardQueryToPackage): descriptor that triggered the call
|
||||
spec (Spec): spec that is being queried
|
||||
cls (type(spec)): type of spec, to match the signature of the
|
||||
descriptor ``__get__`` method
|
||||
|
||||
Returns:
|
||||
HeaderList: The headers in ``prefix.include``
|
||||
@@ -1072,10 +1080,12 @@ def _headers_default_handler(spec: "Spec"):
|
||||
|
||||
if headers:
|
||||
return headers
|
||||
raise spack.error.NoHeadersError(f"Unable to locate {spec.name} headers in {home}")
|
||||
else:
|
||||
msg = "Unable to locate {0} headers in {1}"
|
||||
raise spack.error.NoHeadersError(msg.format(spec.name, home))
|
||||
|
||||
|
||||
def _libs_default_handler(spec: "Spec"):
|
||||
def _libs_default_handler(descriptor, spec, cls):
|
||||
"""Default handler when looking for the 'libs' attribute.
|
||||
|
||||
Tries to search for ``lib{spec.name}`` recursively starting from
|
||||
@@ -1083,7 +1093,10 @@ def _libs_default_handler(spec: "Spec"):
|
||||
``{spec.name}`` instead.
|
||||
|
||||
Parameters:
|
||||
spec: spec that is being queried
|
||||
descriptor (ForwardQueryToPackage): descriptor that triggered the call
|
||||
spec (Spec): spec that is being queried
|
||||
cls (type(spec)): type of spec, to match the signature of the
|
||||
descriptor ``__get__`` method
|
||||
|
||||
Returns:
|
||||
LibraryList: The libraries found
|
||||
@@ -1122,33 +1135,27 @@ def _libs_default_handler(spec: "Spec"):
|
||||
if libs:
|
||||
return libs
|
||||
|
||||
raise spack.error.NoLibrariesError(
|
||||
f"Unable to recursively locate {spec.name} libraries in {home}"
|
||||
)
|
||||
msg = "Unable to recursively locate {0} libraries in {1}"
|
||||
raise spack.error.NoLibrariesError(msg.format(spec.name, home))
|
||||
|
||||
|
||||
class ForwardQueryToPackage:
|
||||
"""Descriptor used to forward queries from Spec to Package"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
attribute_name: str,
|
||||
default_handler: Optional[Callable[["Spec"], Any]] = None,
|
||||
_indirect: bool = False,
|
||||
) -> None:
|
||||
def __init__(self, attribute_name, default_handler=None):
|
||||
"""Create a new descriptor.
|
||||
|
||||
Parameters:
|
||||
attribute_name: name of the attribute to be searched for in the Package instance
|
||||
default_handler: default function to be called if the attribute was not found in the
|
||||
Package instance
|
||||
_indirect: temporarily added to redirect a query to another package.
|
||||
attribute_name (str): name of the attribute to be
|
||||
searched for in the Package instance
|
||||
default_handler (callable, optional): default function to be
|
||||
called if the attribute was not found in the Package
|
||||
instance
|
||||
"""
|
||||
self.attribute_name = attribute_name
|
||||
self.default = default_handler
|
||||
self.indirect = _indirect
|
||||
|
||||
def __get__(self, instance: "SpecBuildInterface", cls):
|
||||
def __get__(self, instance, cls):
|
||||
"""Retrieves the property from Package using a well defined chain
|
||||
of responsibility.
|
||||
|
||||
@@ -1170,18 +1177,13 @@ def __get__(self, instance: "SpecBuildInterface", cls):
|
||||
indicating a query failure, e.g. that library files were not found in a
|
||||
'libs' query.
|
||||
"""
|
||||
# TODO: this indirection exist solely for `spec["python"].command` to actually return
|
||||
# spec["python-venv"].command. It should be removed when `python` is a virtual.
|
||||
if self.indirect and instance.indirect_spec:
|
||||
pkg = instance.indirect_spec.package
|
||||
else:
|
||||
pkg = instance.wrapped_obj.package
|
||||
pkg = instance.package
|
||||
try:
|
||||
query = instance.last_query
|
||||
except AttributeError:
|
||||
# There has been no query yet: this means
|
||||
# a spec is trying to access its own attributes
|
||||
_ = instance.wrapped_obj[instance.wrapped_obj.name] # NOQA: ignore=F841
|
||||
_ = instance[instance.name] # NOQA: ignore=F841
|
||||
query = instance.last_query
|
||||
|
||||
callbacks_chain = []
|
||||
@@ -1193,8 +1195,7 @@ def __get__(self, instance: "SpecBuildInterface", cls):
|
||||
callbacks_chain.append(lambda: getattr(pkg, self.attribute_name))
|
||||
# Final resort : default callback
|
||||
if self.default is not None:
|
||||
_default = self.default # make mypy happy
|
||||
callbacks_chain.append(lambda: _default(instance.wrapped_obj))
|
||||
callbacks_chain.append(lambda: self.default(self, instance, cls))
|
||||
|
||||
# Trigger the callbacks in order, the first one producing a
|
||||
# value wins
|
||||
@@ -1253,33 +1254,25 @@ def __set__(self, instance, value):
|
||||
class SpecBuildInterface(lang.ObjectWrapper):
|
||||
# home is available in the base Package so no default is needed
|
||||
home = ForwardQueryToPackage("home", default_handler=None)
|
||||
headers = ForwardQueryToPackage("headers", default_handler=_headers_default_handler)
|
||||
libs = ForwardQueryToPackage("libs", default_handler=_libs_default_handler)
|
||||
command = ForwardQueryToPackage(
|
||||
"command", default_handler=_command_default_handler, _indirect=True
|
||||
)
|
||||
|
||||
def __init__(self, spec: "Spec", name: str, query_parameters: List[str], _parent: "Spec"):
|
||||
command = ForwardQueryToPackage("command", default_handler=_command_default_handler)
|
||||
|
||||
headers = ForwardQueryToPackage("headers", default_handler=_headers_default_handler)
|
||||
|
||||
libs = ForwardQueryToPackage("libs", default_handler=_libs_default_handler)
|
||||
|
||||
def __init__(self, spec, name, query_parameters):
|
||||
super().__init__(spec)
|
||||
# Adding new attributes goes after super() call since the ObjectWrapper
|
||||
# resets __dict__ to behave like the passed object
|
||||
original_spec = getattr(spec, "wrapped_obj", spec)
|
||||
self.wrapped_obj = original_spec
|
||||
self.token = original_spec, name, query_parameters, _parent
|
||||
self.token = original_spec, name, query_parameters
|
||||
is_virtual = spack.repo.PATH.is_virtual(name)
|
||||
self.last_query = QueryState(
|
||||
name=name, extra_parameters=query_parameters, isvirtual=is_virtual
|
||||
)
|
||||
|
||||
# TODO: this ad-hoc logic makes `spec["python"].command` return
|
||||
# `spec["python-venv"].command` and should be removed when `python` is a virtual.
|
||||
self.indirect_spec = None
|
||||
if spec.name == "python":
|
||||
python_venvs = _parent.dependencies("python-venv")
|
||||
if not python_venvs:
|
||||
return
|
||||
self.indirect_spec = python_venvs[0]
|
||||
|
||||
def __reduce__(self):
|
||||
return SpecBuildInterface, self.token
|
||||
|
||||
@@ -3905,13 +3898,9 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
if not self._dependencies:
|
||||
return False
|
||||
|
||||
# If we arrived here, the lhs root node satisfies the rhs root node. Now we need to check
|
||||
# all the edges that have an abstract parent, and verify that they match some edge in the
|
||||
# lhs.
|
||||
#
|
||||
# It might happen that the rhs brings in concrete sub-DAGs. For those we don't need to
|
||||
# verify the edge properties, cause everything is encoded in the hash of the nodes that
|
||||
# will be verified later.
|
||||
# If we arrived here, then rhs is abstract. At the moment we don't care about the edge
|
||||
# structure of an abstract DAG, so we check if any edge could satisfy the properties
|
||||
# we ask for.
|
||||
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
|
||||
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
|
||||
# If we are checking for ^mpi we need to verify if there is any edge
|
||||
@@ -3921,10 +3910,6 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
|
||||
if not rhs_edge.virtuals:
|
||||
continue
|
||||
|
||||
# Skip edges from a concrete sub-DAG
|
||||
if rhs_edge.parent.concrete:
|
||||
continue
|
||||
|
||||
if not lhs_edges:
|
||||
# Construct a map of the link/run subDAG + direct "build" edges,
|
||||
# keyed by dependency name
|
||||
@@ -4144,7 +4129,7 @@ def version(self):
|
||||
raise spack.error.SpecError("Spec version is not concrete: " + str(self))
|
||||
return self.versions[0]
|
||||
|
||||
def __getitem__(self, name: str):
|
||||
def __getitem__(self, name):
|
||||
"""Get a dependency from the spec by its name. This call implicitly
|
||||
sets a query state in the package being retrieved. The behavior of
|
||||
packages may be influenced by additional query parameters that are
|
||||
@@ -4153,7 +4138,7 @@ def __getitem__(self, name: str):
|
||||
Note that if a virtual package is queried a copy of the Spec is
|
||||
returned while for non-virtual a reference is returned.
|
||||
"""
|
||||
query_parameters: List[str] = name.split(":")
|
||||
query_parameters = name.split(":")
|
||||
if len(query_parameters) > 2:
|
||||
raise KeyError("key has more than one ':' symbol. At most one is admitted.")
|
||||
|
||||
@@ -4176,7 +4161,7 @@ def __getitem__(self, name: str):
|
||||
)
|
||||
|
||||
try:
|
||||
child: Spec = next(
|
||||
value = next(
|
||||
itertools.chain(
|
||||
# Regular specs
|
||||
(x for x in order() if x.name == name),
|
||||
@@ -4193,9 +4178,9 @@ def __getitem__(self, name: str):
|
||||
raise KeyError(f"No spec with name {name} in {self}")
|
||||
|
||||
if self._concrete:
|
||||
return SpecBuildInterface(child, name, query_parameters, _parent=self)
|
||||
return SpecBuildInterface(value, name, query_parameters)
|
||||
|
||||
return child
|
||||
return value
|
||||
|
||||
def __contains__(self, spec):
|
||||
"""True if this spec or some dependency satisfies the spec.
|
||||
|
@@ -760,6 +760,7 @@ def test_ci_rebuild_mock_success(
|
||||
rebuild_env = create_rebuild_env(tmpdir, pkg_name, broken_tests)
|
||||
|
||||
monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "echo")
|
||||
monkeypatch.setattr(spack.cmd.ci, "MAKE_COMMAND", "echo")
|
||||
|
||||
with rebuild_env.env_dir.as_cwd():
|
||||
activate_rebuild_env(tmpdir, pkg_name, rebuild_env)
|
||||
@@ -842,6 +843,7 @@ def test_ci_rebuild(
|
||||
ci_cmd("rebuild", "--tests", fail_on_error=False)
|
||||
|
||||
monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "notcommand")
|
||||
monkeypatch.setattr(spack.cmd.ci, "MAKE_COMMAND", "notcommand")
|
||||
monkeypatch.setattr(spack.cmd.ci, "INSTALL_FAIL_CODE", 127)
|
||||
|
||||
with rebuild_env.env_dir.as_cwd():
|
||||
|
@@ -123,13 +123,7 @@ def test_root_and_dep_match_returns_root(mock_packages, mutable_mock_env_path):
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"arg,conf",
|
||||
[
|
||||
("--reuse", True),
|
||||
("--fresh", False),
|
||||
("--reuse-deps", "dependencies"),
|
||||
("--fresh-roots", "dependencies"),
|
||||
],
|
||||
"arg,conf", [("--reuse", True), ("--fresh", False), ("--reuse-deps", "dependencies")]
|
||||
)
|
||||
def test_concretizer_arguments(mutable_config, mock_packages, arg, conf):
|
||||
"""Ensure that ConfigSetAction is doing the right thing."""
|
||||
|
@@ -261,14 +261,15 @@ def test_compiler_list_empty(no_compilers_yaml, working_env, compilers_dir):
|
||||
[
|
||||
(
|
||||
{
|
||||
"spec": "gcc@=7.7.7 languages=c,cxx,fortran os=foobar target=x86_64",
|
||||
"spec": "gcc@=7.7.7 os=foobar target=x86_64",
|
||||
"prefix": "/path/to/fake",
|
||||
"modules": ["gcc/7.7.7", "foobar"],
|
||||
"extra_attributes": {
|
||||
"compilers": {
|
||||
"c": "/path/to/fake/gcc",
|
||||
"paths": {
|
||||
"cc": "/path/to/fake/gcc",
|
||||
"cxx": "/path/to/fake/g++",
|
||||
"fortran": "/path/to/fake/gfortran",
|
||||
"fc": "/path/to/fake/gfortran",
|
||||
"f77": "/path/to/fake/gfortran",
|
||||
},
|
||||
"flags": {"fflags": "-ffree-form"},
|
||||
},
|
||||
@@ -284,7 +285,26 @@ def test_compiler_list_empty(no_compilers_yaml, working_env, compilers_dir):
|
||||
\tmodules = ['gcc/7.7.7', 'foobar']
|
||||
\toperating system = foobar
|
||||
""",
|
||||
)
|
||||
),
|
||||
(
|
||||
{
|
||||
"spec": "gcc@7.7.7",
|
||||
"prefix": "{prefix}",
|
||||
"modules": ["gcc/7.7.7", "foobar"],
|
||||
"extra_attributes": {"flags": {"fflags": "-ffree-form"}},
|
||||
},
|
||||
"""gcc@7.7.7:
|
||||
\tpaths:
|
||||
\t\tcc = {compilers_dir}{sep}gcc-8{suffix}
|
||||
\t\tcxx = {compilers_dir}{sep}g++-8{suffix}
|
||||
\t\tf77 = {compilers_dir}{sep}gfortran-8{suffix}
|
||||
\t\tfc = {compilers_dir}{sep}gfortran-8{suffix}
|
||||
\tflags:
|
||||
\t\tfflags = ['-ffree-form']
|
||||
\tmodules = ['gcc/7.7.7', 'foobar']
|
||||
\toperating system = debian6
|
||||
""",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_compilers_shows_packages_yaml(
|
||||
|
@@ -60,27 +60,6 @@
|
||||
sep = os.sep
|
||||
|
||||
|
||||
def setup_combined_multiple_env():
|
||||
env("create", "test1")
|
||||
test1 = ev.read("test1")
|
||||
with test1:
|
||||
add("zlib")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
env("create", "test2")
|
||||
test2 = ev.read("test2")
|
||||
with test2:
|
||||
add("libelf")
|
||||
test2.concretize()
|
||||
test2.write()
|
||||
|
||||
env("create", "--include-concrete", "test1", "--include-concrete", "test2", "combined_env")
|
||||
combined = ev.read("combined_env")
|
||||
|
||||
return test1, test2, combined
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def environment_from_manifest(tmp_path):
|
||||
"""Returns a new environment named 'test' from the content of a manifest file."""
|
||||
@@ -390,29 +369,6 @@ def test_env_install_single_spec(install_mockery, mock_fetch):
|
||||
assert e.specs_by_hash[e.concretized_order[0]].name == "cmake-client"
|
||||
|
||||
|
||||
@pytest.mark.parametrize("unify", [True, False, "when_possible"])
|
||||
def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch):
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
combined.concretize()
|
||||
combined.write()
|
||||
|
||||
combined.unify = unify
|
||||
|
||||
with combined:
|
||||
install()
|
||||
|
||||
test1_roots = test1.concretized_order
|
||||
test2_roots = test2.concretized_order
|
||||
combined_included_roots = combined.included_concretized_order
|
||||
|
||||
for spec in combined.all_specs():
|
||||
assert spec.installed
|
||||
|
||||
assert test1_roots == combined_included_roots[test1.path]
|
||||
assert test2_roots == combined_included_roots[test2.path]
|
||||
|
||||
|
||||
def test_env_roots_marked_explicit(install_mockery, mock_fetch):
|
||||
install = SpackCommand("install")
|
||||
install("dependent-install")
|
||||
@@ -601,41 +557,6 @@ def test_remove_command():
|
||||
assert "mpileaks@" not in find("--show-concretized")
|
||||
|
||||
|
||||
def test_bad_remove_included_env():
|
||||
env("create", "test")
|
||||
test = ev.read("test")
|
||||
|
||||
with test:
|
||||
add("mpileaks")
|
||||
|
||||
test.concretize()
|
||||
test.write()
|
||||
|
||||
env("create", "--include-concrete", "test", "combined_env")
|
||||
|
||||
with pytest.raises(SpackCommandError):
|
||||
env("remove", "test")
|
||||
|
||||
|
||||
def test_force_remove_included_env():
|
||||
env("create", "test")
|
||||
test = ev.read("test")
|
||||
|
||||
with test:
|
||||
add("mpileaks")
|
||||
|
||||
test.concretize()
|
||||
test.write()
|
||||
|
||||
env("create", "--include-concrete", "test", "combined_env")
|
||||
|
||||
rm_output = env("remove", "-f", "-y", "test")
|
||||
list_output = env("list")
|
||||
|
||||
assert '"test" is being used by environment "combined_env"' in rm_output
|
||||
assert "test" not in list_output
|
||||
|
||||
|
||||
def test_environment_status(capsys, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
with capsys.disabled():
|
||||
@@ -1715,275 +1636,6 @@ def test_env_without_view_install(tmpdir, mock_stage, mock_fetch, install_mocker
|
||||
check_mpileaks_and_deps_in_view(view_dir)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("env_name", [True, False])
|
||||
def test_env_include_concrete_env_yaml(env_name):
|
||||
env("create", "test")
|
||||
test = ev.read("test")
|
||||
|
||||
with test:
|
||||
add("mpileaks")
|
||||
test.concretize()
|
||||
test.write()
|
||||
|
||||
environ = "test" if env_name else test.path
|
||||
|
||||
env("create", "--include-concrete", environ, "combined_env")
|
||||
|
||||
combined = ev.read("combined_env")
|
||||
combined_yaml = combined.manifest["spack"]
|
||||
|
||||
assert "include_concrete" in combined_yaml
|
||||
assert test.path in combined_yaml["include_concrete"]
|
||||
|
||||
|
||||
def test_env_bad_include_concrete_env():
|
||||
with pytest.raises(ev.SpackEnvironmentError):
|
||||
env("create", "--include-concrete", "nonexistant_env", "combined_env")
|
||||
|
||||
|
||||
def test_env_not_concrete_include_concrete_env():
|
||||
env("create", "test")
|
||||
test = ev.read("test")
|
||||
|
||||
with test:
|
||||
add("mpileaks")
|
||||
|
||||
with pytest.raises(ev.SpackEnvironmentError):
|
||||
env("create", "--include-concrete", "test", "combined_env")
|
||||
|
||||
|
||||
def test_env_multiple_include_concrete_envs():
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
combined_yaml = combined.manifest["spack"]
|
||||
|
||||
assert test1.path in combined_yaml["include_concrete"][0]
|
||||
assert test2.path in combined_yaml["include_concrete"][1]
|
||||
|
||||
# No local specs in the combined env
|
||||
assert not combined_yaml["specs"]
|
||||
|
||||
|
||||
def test_env_include_concrete_envs_lockfile():
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
combined_yaml = combined.manifest["spack"]
|
||||
|
||||
assert "include_concrete" in combined_yaml
|
||||
assert test1.path in combined_yaml["include_concrete"]
|
||||
|
||||
with open(combined.lock_path) as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert set(
|
||||
entry["hash"] for entry in lockfile_as_dict["include_concrete"][test1.path]["roots"]
|
||||
) == set(test1.specs_by_hash)
|
||||
assert set(
|
||||
entry["hash"] for entry in lockfile_as_dict["include_concrete"][test2.path]["roots"]
|
||||
) == set(test2.specs_by_hash)
|
||||
|
||||
|
||||
def test_env_include_concrete_add_env():
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
# crete new env & crecretize
|
||||
env("create", "new")
|
||||
new_env = ev.read("new")
|
||||
with new_env:
|
||||
add("mpileaks")
|
||||
|
||||
new_env.concretize()
|
||||
new_env.write()
|
||||
|
||||
# add new env to combined
|
||||
combined.included_concrete_envs.append(new_env.path)
|
||||
|
||||
# assert thing haven't changed yet
|
||||
with open(combined.lock_path) as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert new_env.path not in lockfile_as_dict["include_concrete"].keys()
|
||||
|
||||
# concretize combined env with new env
|
||||
combined.concretize()
|
||||
combined.write()
|
||||
|
||||
# assert changes
|
||||
with open(combined.lock_path) as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert new_env.path in lockfile_as_dict["include_concrete"].keys()
|
||||
|
||||
|
||||
def test_env_include_concrete_remove_env():
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
# remove test2 from combined
|
||||
combined.included_concrete_envs = [test1.path]
|
||||
|
||||
# assert test2 is still in combined's lockfile
|
||||
with open(combined.lock_path) as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert test2.path in lockfile_as_dict["include_concrete"].keys()
|
||||
|
||||
# reconcretize combined
|
||||
combined.concretize()
|
||||
combined.write()
|
||||
|
||||
# assert test2 is not in combined's lockfile
|
||||
with open(combined.lock_path) as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert test2.path not in lockfile_as_dict["include_concrete"].keys()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("unify", [True, False, "when_possible"])
|
||||
def test_env_include_concrete_env_reconcretized(unify):
|
||||
"""Double check to make sure that concrete_specs for the local specs is empty
|
||||
after recocnretizing.
|
||||
"""
|
||||
_, _, combined = setup_combined_multiple_env()
|
||||
|
||||
combined.unify = unify
|
||||
|
||||
with open(combined.lock_path) as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert not lockfile_as_dict["roots"]
|
||||
assert not lockfile_as_dict["concrete_specs"]
|
||||
|
||||
combined.concretize()
|
||||
combined.write()
|
||||
|
||||
with open(combined.lock_path) as f:
|
||||
lockfile_as_dict = combined._read_lockfile(f)
|
||||
|
||||
assert not lockfile_as_dict["roots"]
|
||||
assert not lockfile_as_dict["concrete_specs"]
|
||||
|
||||
|
||||
def test_concretize_include_concrete_env():
|
||||
test1, _, combined = setup_combined_multiple_env()
|
||||
|
||||
with test1:
|
||||
add("mpileaks")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
assert Spec("mpileaks") in test1.concretized_user_specs
|
||||
assert Spec("mpileaks") not in combined.included_concretized_user_specs[test1.path]
|
||||
|
||||
combined.concretize()
|
||||
combined.write()
|
||||
|
||||
assert Spec("mpileaks") in combined.included_concretized_user_specs[test1.path]
|
||||
|
||||
|
||||
def test_concretize_nested_include_concrete_envs():
|
||||
env("create", "test1")
|
||||
test1 = ev.read("test1")
|
||||
with test1:
|
||||
add("zlib")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
env("create", "--include-concrete", "test1", "test2")
|
||||
test2 = ev.read("test2")
|
||||
with test2:
|
||||
add("libelf")
|
||||
test2.concretize()
|
||||
test2.write()
|
||||
|
||||
env("create", "--include-concrete", "test2", "test3")
|
||||
test3 = ev.read("test3")
|
||||
|
||||
with open(test3.lock_path) as f:
|
||||
lockfile_as_dict = test3._read_lockfile(f)
|
||||
|
||||
assert test2.path in lockfile_as_dict["include_concrete"]
|
||||
assert test1.path in lockfile_as_dict["include_concrete"][test2.path]["include_concrete"]
|
||||
|
||||
assert Spec("zlib") in test3.included_concretized_user_specs[test1.path]
|
||||
|
||||
|
||||
def test_concretize_nested_included_concrete():
|
||||
"""Confirm that nested included environments use specs concretized at
|
||||
environment creation time and change with reconcretization."""
|
||||
env("create", "test1")
|
||||
test1 = ev.read("test1")
|
||||
with test1:
|
||||
add("zlib")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
# test2 should include test1 with zlib
|
||||
env("create", "--include-concrete", "test1", "test2")
|
||||
test2 = ev.read("test2")
|
||||
with test2:
|
||||
add("libelf")
|
||||
test2.concretize()
|
||||
test2.write()
|
||||
|
||||
assert Spec("zlib") in test2.included_concretized_user_specs[test1.path]
|
||||
|
||||
# Modify/re-concretize test1 to replace zlib with mpileaks
|
||||
with test1:
|
||||
remove("zlib")
|
||||
add("mpileaks")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
# test3 should include the latest concretization of test1
|
||||
env("create", "--include-concrete", "test1", "test3")
|
||||
test3 = ev.read("test3")
|
||||
with test3:
|
||||
add("callpath")
|
||||
test3.concretize()
|
||||
test3.write()
|
||||
|
||||
included_specs = test3.included_concretized_user_specs[test1.path]
|
||||
assert len(included_specs) == 1
|
||||
assert Spec("mpileaks") in included_specs
|
||||
|
||||
# The last concretization of test4's included environments should have test2
|
||||
# with the original concretized test1 spec and test3 with the re-concretized
|
||||
# test1 spec.
|
||||
env("create", "--include-concrete", "test2", "--include-concrete", "test3", "test4")
|
||||
test4 = ev.read("test4")
|
||||
|
||||
def included_included_spec(path1, path2):
|
||||
included_path1 = test4.included_concrete_spec_data[path1]
|
||||
included_path2 = included_path1["include_concrete"][path2]
|
||||
return included_path2["roots"][0]["spec"]
|
||||
|
||||
included_test2_test1 = included_included_spec(test2.path, test1.path)
|
||||
assert "zlib" in included_test2_test1
|
||||
|
||||
included_test3_test1 = included_included_spec(test3.path, test1.path)
|
||||
assert "mpileaks" in included_test3_test1
|
||||
|
||||
# test4's concretized specs should reflect the original concretization.
|
||||
concrete_specs = [s for s, _ in test4.concretized_specs()]
|
||||
expected = [Spec(s) for s in ["libelf", "zlib", "mpileaks", "callpath"]]
|
||||
assert all(s in concrete_specs for s in expected)
|
||||
|
||||
# Re-concretize test2 to reflect the new concretization of included test1
|
||||
# to remove zlib and write it out so it can be picked up by test4.
|
||||
# Re-concretize test4 to reflect the re-concretization of included test2
|
||||
# and ensure that its included specs are up-to-date
|
||||
test2.concretize()
|
||||
test2.write()
|
||||
test4.concretize()
|
||||
|
||||
concrete_specs = [s for s, _ in test4.concretized_specs()]
|
||||
assert Spec("zlib") not in concrete_specs
|
||||
|
||||
# Expecting mpileaks to appear only once
|
||||
expected = [Spec(s) for s in ["libelf", "mpileaks", "callpath"]]
|
||||
assert len(concrete_specs) == 3 and all(s in concrete_specs for s in expected)
|
||||
|
||||
|
||||
def test_env_config_view_default(
|
||||
environment_from_manifest, mock_stage, mock_fetch, install_mockery
|
||||
):
|
||||
@@ -3638,7 +3290,7 @@ def test_create_and_activate_managed(tmp_path):
|
||||
def test_create_and_activate_anonymous(tmp_path):
|
||||
with fs.working_dir(str(tmp_path)):
|
||||
env_dir = os.path.join(str(tmp_path), "foo")
|
||||
shell = env("activate", "--without-view", "--create", "--sh", env_dir)
|
||||
shell = env("activate", "--without-view", "--create", "--sh", "-d", env_dir)
|
||||
active_env_var = next(line for line in shell.splitlines() if ev.spack_env_var in line)
|
||||
assert str(env_dir) in active_env_var
|
||||
assert ev.is_env_dir(env_dir)
|
||||
|
@@ -33,23 +33,21 @@ def check_output(ni):
|
||||
packages = extensions("-s", "packages", "python")
|
||||
installed = extensions("-s", "installed", "python")
|
||||
assert "==> python@2.7.11" in output
|
||||
assert "==> 3 extensions" in output
|
||||
assert "==> 2 extensions" in output
|
||||
assert "py-extension1" in output
|
||||
assert "py-extension2" in output
|
||||
assert "python-venv" in output
|
||||
|
||||
assert "==> 3 extensions" in packages
|
||||
assert "==> 2 extensions" in packages
|
||||
assert "py-extension1" in packages
|
||||
assert "py-extension2" in packages
|
||||
assert "python-venv" in packages
|
||||
assert "installed" not in packages
|
||||
|
||||
assert f"{ni if ni else 'None'} installed" in output
|
||||
assert f"{ni if ni else 'None'} installed" in installed
|
||||
assert ("%s installed" % (ni if ni else "None")) in output
|
||||
assert ("%s installed" % (ni if ni else "None")) in installed
|
||||
|
||||
check_output(3)
|
||||
ext2.package.do_uninstall(force=True)
|
||||
check_output(2)
|
||||
ext2.package.do_uninstall(force=True)
|
||||
check_output(1)
|
||||
|
||||
|
||||
def test_extensions_no_arguments(mock_packages):
|
||||
|
@@ -349,87 +349,6 @@ def test_find_prefix_in_env(
|
||||
# Would throw error on regression
|
||||
|
||||
|
||||
def test_find_specs_include_concrete_env(mutable_mock_env_path, config, mutable_mock_repo, tmpdir):
|
||||
path = tmpdir.join("spack.yaml")
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(str(path), "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- mpileaks
|
||||
"""
|
||||
)
|
||||
env("create", "test1", "spack.yaml")
|
||||
|
||||
test1 = ev.read("test1")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(str(path), "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- libelf
|
||||
"""
|
||||
)
|
||||
env("create", "test2", "spack.yaml")
|
||||
|
||||
test2 = ev.read("test2")
|
||||
test2.concretize()
|
||||
test2.write()
|
||||
|
||||
env("create", "--include-concrete", "test1", "--include-concrete", "test2", "combined_env")
|
||||
|
||||
with ev.read("combined_env"):
|
||||
output = find()
|
||||
|
||||
assert "No root specs" in output
|
||||
assert "Included specs" in output
|
||||
assert "mpileaks" in output
|
||||
assert "libelf" in output
|
||||
|
||||
|
||||
def test_find_specs_nested_include_concrete_env(
|
||||
mutable_mock_env_path, config, mutable_mock_repo, tmpdir
|
||||
):
|
||||
path = tmpdir.join("spack.yaml")
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
with open(str(path), "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- mpileaks
|
||||
"""
|
||||
)
|
||||
env("create", "test1", "spack.yaml")
|
||||
|
||||
test1 = ev.read("test1")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
env("create", "--include-concrete", "test1", "test2")
|
||||
test2 = ev.read("test2")
|
||||
test2.add("libelf")
|
||||
test2.concretize()
|
||||
test2.write()
|
||||
|
||||
env("create", "--include-concrete", "test2", "test3")
|
||||
|
||||
with ev.read("test3"):
|
||||
output = find()
|
||||
|
||||
assert "No root specs" in output
|
||||
assert "Included specs" in output
|
||||
assert "mpileaks" in output
|
||||
assert "libelf" in output
|
||||
|
||||
|
||||
def test_find_loaded(database, working_env):
|
||||
output = find("--loaded", "--group")
|
||||
assert output == ""
|
||||
|
@@ -384,18 +384,9 @@ def test_clang_flags():
|
||||
unsupported_flag_test("cxx17_flag", "clang@3.4")
|
||||
supported_flag_test("cxx17_flag", "-std=c++1z", "clang@3.5")
|
||||
supported_flag_test("cxx17_flag", "-std=c++17", "clang@5.0")
|
||||
unsupported_flag_test("cxx20_flag", "clang@4.0")
|
||||
supported_flag_test("cxx20_flag", "-std=c++2a", "clang@5.0")
|
||||
supported_flag_test("cxx20_flag", "-std=c++20", "clang@11.0")
|
||||
unsupported_flag_test("cxx23_flag", "clang@11.0")
|
||||
supported_flag_test("cxx23_flag", "-std=c++2b", "clang@12.0")
|
||||
supported_flag_test("cxx23_flag", "-std=c++23", "clang@17.0")
|
||||
supported_flag_test("c99_flag", "-std=c99", "clang@3.3")
|
||||
unsupported_flag_test("c11_flag", "clang@2.0")
|
||||
supported_flag_test("c11_flag", "-std=c11", "clang@6.1.0")
|
||||
unsupported_flag_test("c23_flag", "clang@8.0")
|
||||
supported_flag_test("c23_flag", "-std=c2x", "clang@9.0")
|
||||
supported_flag_test("c23_flag", "-std=c23", "clang@18.0")
|
||||
supported_flag_test("cc_pic_flag", "-fPIC", "clang@3.3")
|
||||
supported_flag_test("cxx_pic_flag", "-fPIC", "clang@3.3")
|
||||
supported_flag_test("f77_pic_flag", "-fPIC", "clang@3.3")
|
||||
@@ -952,17 +943,3 @@ def test_detection_requires_c_compiler(detected_versions, expected_length):
|
||||
"""
|
||||
result = spack.compilers.make_compiler_list(detected_versions)
|
||||
assert len(result) == expected_length
|
||||
|
||||
|
||||
def test_compiler_environment(working_env):
|
||||
"""Test whether environment modifications from compilers are applied in compiler_environment"""
|
||||
os.environ.pop("TEST", None)
|
||||
compiler = Compiler(
|
||||
"gcc@=13.2.0",
|
||||
operating_system="ubuntu20.04",
|
||||
target="x86_64",
|
||||
paths=["/test/bin/gcc", "/test/bin/g++"],
|
||||
environment={"set": {"TEST": "yes"}},
|
||||
)
|
||||
with compiler.compiler_environment():
|
||||
assert os.environ["TEST"] == "yes"
|
||||
|
@@ -1244,11 +1244,10 @@ def test_variant_not_default(self):
|
||||
|
||||
@pytest.mark.regression("20055")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_custom_compiler_version(self, mutable_config, compiler_factory, monkeypatch):
|
||||
def test_custom_compiler_version(self, mutable_config, compiler_factory):
|
||||
mutable_config.set(
|
||||
"compilers", [compiler_factory(spec="gcc@10foo", operating_system="redhat6")]
|
||||
)
|
||||
monkeypatch.setattr(spack.compiler.Compiler, "real_version", "10.2.1")
|
||||
s = Spec("a %gcc@10foo os=redhat6").concretized()
|
||||
assert "%gcc@10foo" in s
|
||||
|
||||
@@ -2449,103 +2448,6 @@ def _default_libc(self):
|
||||
s = Spec("a %gcc@=13.2.0").concretized()
|
||||
assert s.satisfies("%gcc@13.2.0")
|
||||
|
||||
@pytest.mark.regression("43267")
|
||||
def test_spec_with_build_dep_from_json(self, tmp_path):
|
||||
"""Tests that we can correctly concretize a spec, when we express its dependency as a
|
||||
concrete spec to be read from JSON.
|
||||
|
||||
The bug was triggered by missing virtuals on edges that were trimmed from pure build
|
||||
dependencies.
|
||||
"""
|
||||
build_dep = Spec("dttop").concretized()
|
||||
json_file = tmp_path / "build.json"
|
||||
json_file.write_text(build_dep.to_json())
|
||||
s = Spec(f"dtuse ^{str(json_file)}").concretized()
|
||||
assert s["dttop"].dag_hash() == build_dep.dag_hash()
|
||||
|
||||
@pytest.mark.regression("44040")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_exclude_specs_from_reuse(self, monkeypatch):
|
||||
"""Tests that we can exclude a spec from reuse when concretizing, and that the spec
|
||||
is not added back to the solve as a dependency of another reusable spec.
|
||||
|
||||
The expected spec is:
|
||||
|
||||
o callpath@1.0
|
||||
|\
|
||||
| |\
|
||||
o | | mpich@3.0.4
|
||||
|/ /
|
||||
| o dyninst@8.2
|
||||
|/|
|
||||
| |\
|
||||
| | o libdwarf@20130729
|
||||
| |/|
|
||||
|/|/
|
||||
| o libelf@0.8.13
|
||||
|/
|
||||
o glibc@2.31
|
||||
"""
|
||||
# Prepare a mock mirror that returns an old version of dyninst
|
||||
request_str = "callpath ^mpich"
|
||||
reused = Spec(f"{request_str} ^dyninst@8.1.1").concretized()
|
||||
monkeypatch.setattr(spack.solver.asp, "_specs_from_mirror", lambda: [reused])
|
||||
|
||||
# Exclude dyninst from reuse, so we expect that the old version is not taken into account
|
||||
with spack.config.override(
|
||||
"concretizer:reuse", {"from": [{"type": "buildcache", "exclude": ["dyninst"]}]}
|
||||
):
|
||||
result = Spec(request_str).concretized()
|
||||
|
||||
assert result.dag_hash() != reused.dag_hash()
|
||||
assert result["mpich"].dag_hash() == reused["mpich"].dag_hash()
|
||||
assert result["dyninst"].dag_hash() != reused["dyninst"].dag_hash()
|
||||
assert result["dyninst"].satisfies("@=8.2")
|
||||
for dep in result["dyninst"].traverse(root=False):
|
||||
assert dep.dag_hash() == reused[dep.name].dag_hash()
|
||||
|
||||
@pytest.mark.regression("44091")
|
||||
@pytest.mark.parametrize(
|
||||
"included_externals",
|
||||
[
|
||||
["deprecated-versions"],
|
||||
# Try the empty list, to ensure that in that case everything will be included
|
||||
# since filtering should happen only when the list is non-empty
|
||||
[],
|
||||
],
|
||||
)
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_include_specs_from_externals_and_libcs(
|
||||
self, included_externals, mutable_config, tmp_path
|
||||
):
|
||||
"""Tests that when we include specs from externals, we always include libcs."""
|
||||
mutable_config.set(
|
||||
"packages",
|
||||
{
|
||||
"deprecated-versions": {
|
||||
"externals": [{"spec": "deprecated-versions@1.1.0", "prefix": str(tmp_path)}]
|
||||
}
|
||||
},
|
||||
)
|
||||
request_str = "deprecated-client"
|
||||
|
||||
# When using the external the version is selected even if deprecated
|
||||
with spack.config.override(
|
||||
"concretizer:reuse", {"from": [{"type": "external", "include": included_externals}]}
|
||||
):
|
||||
result = Spec(request_str).concretized()
|
||||
|
||||
assert result["deprecated-versions"].satisfies("@1.1.0")
|
||||
|
||||
# When excluding it, we pick the non-deprecated version
|
||||
with spack.config.override(
|
||||
"concretizer:reuse",
|
||||
{"from": [{"type": "external", "exclude": ["deprecated-versions"]}]},
|
||||
):
|
||||
result = Spec(request_str).concretized()
|
||||
|
||||
assert result["deprecated-versions"].satisfies("@1.0.0")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def duplicates_test_repository():
|
||||
@@ -2921,78 +2823,3 @@ def test_concretization_version_order():
|
||||
Version("develop"), # likely development version
|
||||
Version("2.0"), # deprecated
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.only_clingo("Original concretizer cannot reuse specs")
|
||||
@pytest.mark.parametrize(
|
||||
"roots,reuse_yaml,expected,not_expected,expected_length",
|
||||
[
|
||||
(
|
||||
["mpileaks"],
|
||||
{"roots": True, "include": ["^mpich"]},
|
||||
["^mpich"],
|
||||
["^mpich2", "^zmpi"],
|
||||
2,
|
||||
),
|
||||
(
|
||||
["mpileaks"],
|
||||
{"roots": True, "include": ["externaltest"]},
|
||||
["externaltest"],
|
||||
["^mpich", "^mpich2", "^zmpi"],
|
||||
1,
|
||||
),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("database", "mock_store")
|
||||
@pytest.mark.not_on_windows("Expected length is different on Windows")
|
||||
def test_filtering_reused_specs(
|
||||
roots, reuse_yaml, expected, not_expected, expected_length, mutable_config, monkeypatch
|
||||
):
|
||||
"""Tests that we can select which specs are to be reused, using constraints as filters"""
|
||||
# Assume all specs have a runtime dependency
|
||||
monkeypatch.setattr(spack.solver.asp, "_has_runtime_dependencies", lambda x: True)
|
||||
mutable_config.set("concretizer:reuse", reuse_yaml)
|
||||
selector = spack.solver.asp.ReusableSpecsSelector(mutable_config)
|
||||
specs = selector.reusable_specs(roots)
|
||||
|
||||
assert len(specs) == expected_length
|
||||
|
||||
for constraint in expected:
|
||||
assert all(x.satisfies(constraint) for x in specs)
|
||||
|
||||
for constraint in not_expected:
|
||||
assert all(not x.satisfies(constraint) for x in specs)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("database", "mock_store")
|
||||
@pytest.mark.parametrize(
|
||||
"reuse_yaml,expected_length",
|
||||
[({"from": [{"type": "local"}]}, 17), ({"from": [{"type": "buildcache"}]}, 0)],
|
||||
)
|
||||
@pytest.mark.not_on_windows("Expected length is different on Windows")
|
||||
def test_selecting_reused_sources(reuse_yaml, expected_length, mutable_config, monkeypatch):
|
||||
"""Tests that we can turn on/off sources of reusable specs"""
|
||||
# Assume all specs have a runtime dependency
|
||||
monkeypatch.setattr(spack.solver.asp, "_has_runtime_dependencies", lambda x: True)
|
||||
mutable_config.set("concretizer:reuse", reuse_yaml)
|
||||
selector = spack.solver.asp.ReusableSpecsSelector(mutable_config)
|
||||
specs = selector.reusable_specs(["mpileaks"])
|
||||
assert len(specs) == expected_length
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"specs,include,exclude,expected",
|
||||
[
|
||||
# "foo" discarded by include rules (everything compiled with GCC)
|
||||
(["cmake@3.27.9 %gcc", "foo %clang"], ["%gcc"], [], ["cmake@3.27.9 %gcc"]),
|
||||
# "cmake" discarded by exclude rules (everything compiled with GCC but cmake)
|
||||
(["cmake@3.27.9 %gcc", "foo %gcc"], ["%gcc"], ["cmake"], ["foo %gcc"]),
|
||||
],
|
||||
)
|
||||
def test_spec_filters(specs, include, exclude, expected):
|
||||
specs = [Spec(x) for x in specs]
|
||||
expected = [Spec(x) for x in expected]
|
||||
f = spack.solver.asp.SpecFilter(
|
||||
factory=lambda: specs, is_usable=lambda x: True, include=include, exclude=exclude
|
||||
)
|
||||
assert f.selected_specs() == expected
|
||||
|
@@ -763,28 +763,6 @@ def mutable_empty_config(tmpdir_factory, configuration_dir):
|
||||
yield cfg
|
||||
|
||||
|
||||
# From https://github.com/pytest-dev/pytest/issues/363#issuecomment-1335631998
|
||||
# Current suggested implementation from issue compatible with pytest >= 6.2
|
||||
# this may be subject to change as new versions of Pytest are released
|
||||
# and update the suggested solution
|
||||
@pytest.fixture(scope="session")
|
||||
def monkeypatch_session():
|
||||
with pytest.MonkeyPatch.context() as monkeypatch:
|
||||
yield monkeypatch
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def mock_wsdk_externals(monkeypatch_session):
|
||||
"""Skip check for required external packages on Windows during testing
|
||||
Note: In general this should cover this behavior for all tests,
|
||||
however any session scoped fixture involving concretization should
|
||||
include this fixture
|
||||
"""
|
||||
monkeypatch_session.setattr(
|
||||
spack.bootstrap.core, "ensure_winsdk_external_or_raise", _return_none
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def concretize_scope(mutable_config, tmpdir):
|
||||
"""Adds a scope for concretization preferences"""
|
||||
@@ -864,13 +842,7 @@ def _store_dir_and_cache(tmpdir_factory):
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def mock_store(
|
||||
tmpdir_factory,
|
||||
mock_wsdk_externals,
|
||||
mock_repo_path,
|
||||
mock_configuration_scopes,
|
||||
_store_dir_and_cache,
|
||||
):
|
||||
def mock_store(tmpdir_factory, mock_repo_path, mock_configuration_scopes, _store_dir_and_cache):
|
||||
"""Creates a read-only mock database with some packages installed note
|
||||
that the ref count for dyninst here will be 3, as it's recycled
|
||||
across each install.
|
||||
@@ -1694,7 +1666,7 @@ def mock_executable(tmp_path):
|
||||
"""Factory to create a mock executable in a temporary directory that
|
||||
output a custom string when run.
|
||||
"""
|
||||
shebang = "#!/bin/sh\n" if sys.platform != "win32" else "@ECHO OFF\n"
|
||||
shebang = "#!/bin/sh\n" if sys.platform != "win32" else "@ECHO OFF"
|
||||
|
||||
def _factory(name, output, subdir=("bin",)):
|
||||
executable_dir = tmp_path.joinpath(*subdir)
|
||||
|
@@ -17,7 +17,7 @@ def test_command(default_config, container_config_dir, capsys):
|
||||
with capsys.disabled():
|
||||
with fs.working_dir(container_config_dir):
|
||||
output = containerize()
|
||||
assert "FROM spack/ubuntu-jammy" in output
|
||||
assert "FROM spack/ubuntu-bionic" in output
|
||||
|
||||
|
||||
def test_listing_possible_os():
|
||||
|
@@ -14,7 +14,7 @@ def minimal_configuration():
|
||||
"specs": ["gromacs", "mpich", "fftw precision=float"],
|
||||
"container": {
|
||||
"format": "docker",
|
||||
"images": {"os": "ubuntu:22.04", "spack": "develop"},
|
||||
"images": {"os": "ubuntu:18.04", "spack": "develop"},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@@ -21,11 +21,11 @@ def test_build_and_run_images(minimal_configuration):
|
||||
|
||||
# Test the output of run property
|
||||
run = writer.run
|
||||
assert run.image == "ubuntu:22.04"
|
||||
assert run.image == "ubuntu:18.04"
|
||||
|
||||
# Test the output of the build property
|
||||
build = writer.build
|
||||
assert build.image == "spack/ubuntu-jammy:develop"
|
||||
assert build.image == "spack/ubuntu-bionic:develop"
|
||||
|
||||
|
||||
def test_packages(minimal_configuration):
|
||||
|
@@ -12,8 +12,8 @@
|
||||
@pytest.mark.parametrize(
|
||||
"image,spack_version,expected",
|
||||
[
|
||||
("ubuntu:22.04", "develop", ("spack/ubuntu-jammy", "develop")),
|
||||
("ubuntu:22.04", "0.14.0", ("spack/ubuntu-jammy", "0.14.0")),
|
||||
("ubuntu:18.04", "develop", ("spack/ubuntu-bionic", "develop")),
|
||||
("ubuntu:18.04", "0.14.0", ("spack/ubuntu-bionic", "0.14.0")),
|
||||
],
|
||||
)
|
||||
def test_build_info(image, spack_version, expected):
|
||||
@@ -21,7 +21,7 @@ def test_build_info(image, spack_version, expected):
|
||||
assert output == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize("image", ["ubuntu:22.04"])
|
||||
@pytest.mark.parametrize("image", ["ubuntu:18.04"])
|
||||
def test_package_info(image):
|
||||
pkg_manager = spack.container.images.os_package_manager_for(image)
|
||||
update, install, clean = spack.container.images.commands_for(pkg_manager)
|
||||
|
@@ -415,7 +415,7 @@ def mock_verify_locations(self, cafile, capath, cadata):
|
||||
|
||||
assert mock_cert == spack.config.get("config:ssl_certs", None)
|
||||
|
||||
ssl_context = spack.util.web.ssl_create_default_context()
|
||||
ssl_context = spack.util.web.urllib_ssl_cert_handler()
|
||||
assert ssl_context.verify_mode == ssl.CERT_REQUIRED
|
||||
|
||||
|
||||
|
@@ -12,8 +12,6 @@
|
||||
from gzip import GzipFile
|
||||
from typing import Callable, Dict, Tuple
|
||||
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
|
||||
class ChecksumWriter(io.BufferedIOBase):
|
||||
"""Checksum writer computes a checksum while writing to a file."""
|
||||
@@ -195,14 +193,12 @@ def reproducible_tarfile_from_prefix(
|
||||
file_info = tarfile.TarInfo(path_to_name(entry.path))
|
||||
|
||||
if entry.is_symlink():
|
||||
# strip off long path reg prefix on Windows
|
||||
link_dest = readlink(entry.path)
|
||||
file_info.linkname = link_dest
|
||||
file_info.type = tarfile.SYMTYPE
|
||||
file_info.linkname = os.readlink(entry.path)
|
||||
# According to POSIX: "the value of the file mode bits returned in the
|
||||
# st_mode field of the stat structure is unspecified." So we set it to
|
||||
# something sensible without lstat'ing the link.
|
||||
file_info.mode = 0o755
|
||||
file_info.type = tarfile.SYMTYPE
|
||||
tar.addfile(file_info)
|
||||
|
||||
elif entry.is_file(follow_symlinks=False):
|
||||
|
@@ -39,20 +39,6 @@ def add_default_arg(self, *args):
|
||||
"""Add default argument(s) to the command."""
|
||||
self.exe.extend(args)
|
||||
|
||||
def with_default_args(self, *args):
|
||||
"""Same as add_default_arg, but returns a copy of the executable."""
|
||||
new = self.copy()
|
||||
new.add_default_arg(*args)
|
||||
return new
|
||||
|
||||
def copy(self):
|
||||
"""Return a copy of this Executable."""
|
||||
new = Executable(self.exe[0])
|
||||
new.exe[:] = self.exe
|
||||
new.default_env.update(self.default_env)
|
||||
new.default_envmod.extend(self.default_envmod)
|
||||
return new
|
||||
|
||||
def add_default_env(self, key, value):
|
||||
"""Set an environment variable when the command is run.
|
||||
|
||||
|
@@ -8,8 +8,6 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
import llnl.util.filesystem
|
||||
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.util.executable
|
||||
@@ -62,7 +60,7 @@ def init(gnupghome=None, force=False):
|
||||
|
||||
# Set the executable objects for "gpg" and "gpgconf"
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_gpg_in_path_or_raise()
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
GPG, GPGCONF = _gpg(), _gpgconf()
|
||||
|
||||
GPG.add_default_env("GNUPGHOME", GNUPGHOME)
|
||||
@@ -387,7 +385,7 @@ def _socket_dir(gpgconf):
|
||||
os.mkdir(var_run_user)
|
||||
os.chmod(var_run_user, 0o777)
|
||||
|
||||
user_dir = os.path.join(var_run_user, str(llnl.util.filesystem.getuid()))
|
||||
user_dir = os.path.join(var_run_user, str(os.getuid()))
|
||||
|
||||
if not os.path.exists(user_dir):
|
||||
os.mkdir(user_dir)
|
||||
|
@@ -22,7 +22,7 @@ def _libc_from_ldd(ldd: str) -> Optional["spack.spec.Spec"]:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if not re.search(r"\b(?:gnu|glibc|arm)\b", stdout, re.IGNORECASE):
|
||||
if not re.search("gnu|glibc", stdout, re.IGNORECASE):
|
||||
return None
|
||||
|
||||
version_str = re.match(r".+\(.+\) (.+)", stdout)
|
||||
@@ -75,8 +75,8 @@ def libc_from_dynamic_linker(dynamic_linker: str) -> Optional["spack.spec.Spec"]
|
||||
return spec
|
||||
except Exception:
|
||||
return None
|
||||
elif re.search(r"\b(?:gnu|glibc|arm)\b", stdout, re.IGNORECASE):
|
||||
# output is like "ld.so (...) stable release version 2.33."
|
||||
elif re.search("gnu|glibc", stdout, re.IGNORECASE):
|
||||
# output is like "ld.so (...) stable release version 2.33." write a regex for it
|
||||
match = re.search(r"version (\d+\.\d+(?:\.\d+)?)", stdout)
|
||||
if not match:
|
||||
return None
|
||||
|
@@ -76,7 +76,14 @@ def is_path_instead_of_url(path_or_url):
|
||||
"""Historically some config files and spack commands used paths
|
||||
where urls should be used. This utility can be used to validate
|
||||
and promote paths to urls."""
|
||||
return not validate_scheme(urllib.parse.urlparse(path_or_url).scheme)
|
||||
scheme = urllib.parse.urlparse(path_or_url).scheme
|
||||
|
||||
# On non-Windows, no scheme means it's likely a path
|
||||
if not sys.platform == "win32":
|
||||
return not scheme
|
||||
|
||||
# On Windows, we may have drive letters.
|
||||
return "A" <= scheme <= "Z"
|
||||
|
||||
|
||||
def format(parsed_url):
|
||||
|
@@ -12,13 +12,12 @@
|
||||
import re
|
||||
import shutil
|
||||
import ssl
|
||||
import stat
|
||||
import sys
|
||||
import traceback
|
||||
import urllib.parse
|
||||
from html.parser import HTMLParser
|
||||
from pathlib import Path, PurePosixPath
|
||||
from typing import IO, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
from typing import IO, Dict, Iterable, List, Optional, Set, Union
|
||||
from urllib.error import HTTPError, URLError
|
||||
from urllib.request import HTTPSHandler, Request, build_opener
|
||||
|
||||
@@ -31,7 +30,7 @@
|
||||
import spack.util.path
|
||||
import spack.util.url as url_util
|
||||
|
||||
from .executable import CommandNotFoundError, Executable, which
|
||||
from .executable import CommandNotFoundError, which
|
||||
from .gcs import GCSBlob, GCSBucket, GCSHandler
|
||||
from .s3 import UrllibS3Handler, get_s3_session
|
||||
|
||||
@@ -61,56 +60,64 @@ def http_error_default(self, req, fp, code, msg, hdrs):
|
||||
raise DetailedHTTPError(req, code, msg, hdrs, fp)
|
||||
|
||||
|
||||
def custom_ssl_certs() -> Optional[Tuple[bool, str]]:
|
||||
"""Returns a tuple (is_file, path) if custom SSL certifates are configured and valid."""
|
||||
ssl_certs = spack.config.get("config:ssl_certs")
|
||||
if not ssl_certs:
|
||||
return None
|
||||
path = spack.util.path.substitute_path_variables(ssl_certs)
|
||||
if not os.path.isabs(path):
|
||||
tty.debug(f"certs: relative path not allowed: {path}")
|
||||
return None
|
||||
try:
|
||||
st = os.stat(path)
|
||||
except OSError as e:
|
||||
tty.debug(f"certs: error checking path {path}: {e}")
|
||||
return None
|
||||
|
||||
file_type = stat.S_IFMT(st.st_mode)
|
||||
|
||||
if file_type != stat.S_IFREG and file_type != stat.S_IFDIR:
|
||||
tty.debug(f"certs: not a file or directory: {path}")
|
||||
return None
|
||||
|
||||
return (file_type == stat.S_IFREG, path)
|
||||
dbg_msg_no_ssl_cert_config = (
|
||||
"config:ssl_certs not in configuration. "
|
||||
"Default cert configuation and environment will be used."
|
||||
)
|
||||
|
||||
|
||||
def ssl_create_default_context():
|
||||
"""Create the default SSL context for urllib with custom certificates if configured."""
|
||||
certs = custom_ssl_certs()
|
||||
if certs is None:
|
||||
return ssl.create_default_context()
|
||||
is_file, path = certs
|
||||
if is_file:
|
||||
tty.debug(f"urllib: certs: using cafile {path}")
|
||||
return ssl.create_default_context(cafile=path)
|
||||
def urllib_ssl_cert_handler():
|
||||
"""context for configuring ssl during urllib HTTPS operations"""
|
||||
custom_cert_var = spack.config.get("config:ssl_certs")
|
||||
if custom_cert_var:
|
||||
# custom certs will be a location, so expand env variables, paths etc
|
||||
certs = spack.util.path.canonicalize_path(custom_cert_var)
|
||||
tty.debug("URLLIB: Looking for custom SSL certs at {}".format(certs))
|
||||
if os.path.isfile(certs):
|
||||
tty.debug("URLLIB: Custom SSL certs file found at {}".format(certs))
|
||||
return ssl.create_default_context(cafile=certs)
|
||||
elif os.path.isdir(certs):
|
||||
tty.debug("URLLIB: Custom SSL certs directory found at {}".format(certs))
|
||||
return ssl.create_default_context(capath=certs)
|
||||
else:
|
||||
tty.debug("URLLIB: Custom SSL certs not found")
|
||||
return ssl.create_default_context()
|
||||
else:
|
||||
tty.debug(f"urllib: certs: using capath {path}")
|
||||
return ssl.create_default_context(capath=path)
|
||||
tty.debug(dbg_msg_no_ssl_cert_config)
|
||||
return ssl.create_default_context()
|
||||
|
||||
|
||||
def set_curl_env_for_ssl_certs(curl: Executable) -> None:
|
||||
"""configure curl to use custom certs in a file at runtime. See:
|
||||
https://curl.se/docs/sslcerts.html item 4"""
|
||||
certs = custom_ssl_certs()
|
||||
if certs is None:
|
||||
return
|
||||
is_file, path = certs
|
||||
if not is_file:
|
||||
tty.debug(f"curl: {path} is not a file: default certs will be used.")
|
||||
return
|
||||
tty.debug(f"curl: using CURL_CA_BUNDLE={path}")
|
||||
curl.add_default_env("CURL_CA_BUNDLE", path)
|
||||
# curl requires different strategies for custom certs at runtime depending on if certs
|
||||
# are stored as a file or a directory
|
||||
def append_curl_env_for_ssl_certs(curl):
|
||||
"""
|
||||
configure curl to use custom certs in a file at run time
|
||||
see: https://curl.se/docs/sslcerts.html item 4
|
||||
"""
|
||||
custom_cert_var = spack.config.get("config:ssl_certs")
|
||||
if custom_cert_var:
|
||||
# custom certs will be a location, so expand env variables, paths etc
|
||||
certs = spack.util.path.canonicalize_path(custom_cert_var)
|
||||
tty.debug("CURL: Looking for custom SSL certs file at {}".format(certs))
|
||||
if os.path.isfile(certs):
|
||||
tty.debug(
|
||||
"CURL: Configuring curl to use custom"
|
||||
" certs from {} by setting "
|
||||
"CURL_CA_BUNDLE".format(certs)
|
||||
)
|
||||
curl.add_default_env("CURL_CA_BUNDLE", certs)
|
||||
elif os.path.isdir(certs):
|
||||
tty.warn(
|
||||
"CURL config:ssl_certs"
|
||||
" is a directory but cURL only supports files. Default certs will be used instead."
|
||||
)
|
||||
else:
|
||||
tty.debug(
|
||||
"CURL config:ssl_certs "
|
||||
"resolves to {}. This is not a file so default certs will be used.".format(certs)
|
||||
)
|
||||
else:
|
||||
tty.debug(dbg_msg_no_ssl_cert_config)
|
||||
|
||||
|
||||
def _urlopen():
|
||||
@@ -120,7 +127,7 @@ def _urlopen():
|
||||
|
||||
# One opener with HTTPS ssl enabled
|
||||
with_ssl = build_opener(
|
||||
s3, gcs, HTTPSHandler(context=ssl_create_default_context()), error_handler
|
||||
s3, gcs, HTTPSHandler(context=urllib_ssl_cert_handler()), error_handler
|
||||
)
|
||||
|
||||
# One opener with HTTPS ssl disabled
|
||||
@@ -341,7 +348,7 @@ def _curl(curl=None):
|
||||
except CommandNotFoundError as exc:
|
||||
tty.error(str(exc))
|
||||
raise spack.error.FetchError("Missing required curl fetch method")
|
||||
set_curl_env_for_ssl_certs(curl)
|
||||
append_curl_env_for_ssl_certs(curl)
|
||||
return curl
|
||||
|
||||
|
||||
|
@@ -78,14 +78,24 @@ default:
|
||||
when: never
|
||||
- if: $SPACK_CI_ENABLE_STACKS =~ /.+/ && $SPACK_CI_STACK_NAME !~ $SPACK_CI_ENABLE_STACKS
|
||||
when: never
|
||||
- if: $CI_COMMIT_REF_NAME == "develop" || $CI_COMMIT_REF_NAME =~ /^releases\/v.*/
|
||||
# Pipelines on develop/release branches only rebuild what is missing from the mirror
|
||||
- if: $CI_COMMIT_REF_NAME == "develop"
|
||||
# Pipelines on develop only rebuild what is missing from the mirror
|
||||
when: always
|
||||
variables:
|
||||
SPACK_PIPELINE_TYPE: "spack_protected_branch"
|
||||
SPACK_COPY_BUILDCACHE: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}"
|
||||
SPACK_REQUIRE_SIGNING: "True"
|
||||
OIDC_TOKEN_AUDIENCE: "protected_binary_mirror"
|
||||
- if: $CI_COMMIT_REF_NAME =~ /^releases\/v.*/
|
||||
# Pipelines on release branches always rebuild everything
|
||||
when: always
|
||||
variables:
|
||||
SPACK_PIPELINE_TYPE: "spack_protected_branch"
|
||||
SPACK_COPY_BUILDCACHE: "${PROTECTED_MIRROR_PUSH_DOMAIN}/${CI_COMMIT_REF_NAME}"
|
||||
SPACK_PRUNE_UNTOUCHED: "False"
|
||||
SPACK_PRUNE_UP_TO_DATE: "False"
|
||||
SPACK_REQUIRE_SIGNING: "True"
|
||||
OIDC_TOKEN_AUDIENCE: "protected_binary_mirror"
|
||||
- if: $CI_COMMIT_TAG =~ /^develop-[\d]{4}-[\d]{2}-[\d]{2}$/ || $CI_COMMIT_TAG =~ /^v.*/
|
||||
# Pipelines on tags (release or dev snapshots) only copy binaries from one mirror to another
|
||||
when: always
|
||||
@@ -160,9 +170,6 @@ default:
|
||||
artifacts:
|
||||
paths:
|
||||
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
- "${CI_PROJECT_DIR}/tmp/_user_cache/cache/patches"
|
||||
- "${CI_PROJECT_DIR}/tmp/_user_cache/cache/providers"
|
||||
- "${CI_PROJECT_DIR}/tmp/_user_cache/cache/tags"
|
||||
variables:
|
||||
KUBERNETES_CPU_REQUEST: 4000m
|
||||
KUBERNETES_MEMORY_REQUEST: 16G
|
||||
|
@@ -13,7 +13,7 @@ ci:
|
||||
before_script-:
|
||||
- - cat /proc/loadavg || true
|
||||
- cat /proc/meminfo | grep 'MemTotal\|MemFree' || true
|
||||
- - touch ${SPACK_USER_CACHE_PATH}/cache/*/* # bump mtime of cache so it is not invalidated
|
||||
- - time spack list --count # ensure that spack's cache is populated
|
||||
- - spack env activate --without-view ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack compiler list
|
||||
- if [ -n "$SPACK_BUILD_JOBS" ]; then spack config add "config:build_jobs:$SPACK_BUILD_JOBS"; fi
|
||||
@@ -119,7 +119,7 @@ ci:
|
||||
# Disable local configs to avoid issues on shell runners
|
||||
SPACK_DISABLE_LOCAL_CONFIG: "1"
|
||||
before_script:
|
||||
- - export SPACK_USER_CACHE_PATH="${CI_PROJECT_DIR}/tmp/_user_cache/"
|
||||
- - export SPACK_USER_CACHE_PATH="${CI_PROJECT_DIR}/_user_cache/"
|
||||
- - uname -a || true
|
||||
- grep -E "vendor|model name" /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||
- nproc || true
|
||||
|
@@ -241,6 +241,7 @@ ci:
|
||||
- mvapich2
|
||||
- netlib-scalapack
|
||||
- omega-h
|
||||
- openblas
|
||||
- openjpeg
|
||||
- openmpi
|
||||
- openpmd-api
|
||||
@@ -393,6 +394,7 @@ ci:
|
||||
- ncurses
|
||||
- ninja
|
||||
- numactl
|
||||
- openblas
|
||||
- openjdk
|
||||
- openssh
|
||||
- openssl
|
||||
|
@@ -10,7 +10,6 @@ set -e
|
||||
# The best solution would be to have the compilers hash (or packages contents) be part of the
|
||||
# individual packages hashes. I don't see this at the moment.
|
||||
# Set to the latest tag including a recent oneapi compiler.
|
||||
# NOTE: If we update this spack version in the future make sure the compiler version also updates.
|
||||
spack_intel_compiler_commit="develop-2023-08-06"
|
||||
|
||||
set_pcluster_defaults() {
|
||||
@@ -24,9 +23,10 @@ set_pcluster_defaults() {
|
||||
|
||||
setup_spack() {
|
||||
spack compiler add --scope site
|
||||
# Do not add autotools/buildtools packages. These versions need to be managed by spack or it will
|
||||
spack external find --scope site
|
||||
# Remove all autotools/buildtools packages. These versions need to be managed by spack or it will
|
||||
# eventually end up in a version mismatch (e.g. when compiling gmp).
|
||||
spack external find --scope site --tag core-packages
|
||||
spack tags build-tools | xargs -I {} spack config --scope site rm packages:{}
|
||||
}
|
||||
|
||||
patch_compilers_yaml() {
|
||||
@@ -99,7 +99,7 @@ install_compilers() {
|
||||
# The compilers needs to be in the same install tree as the rest of the software such that the path
|
||||
# relocation works correctly. This holds the danger that this part will fail when the current spack gets
|
||||
# incompatible with the one in $spack_intel_compiler_commit. Therefore, we make intel installations optional
|
||||
# in package.yaml files and add a fallback `%gcc` version for each application.
|
||||
# in package.yaml files.
|
||||
if [ "x86_64" == "$(arch)" ]; then
|
||||
(
|
||||
CURRENT_SPACK_ROOT=${SPACK_ROOT}
|
||||
|
@@ -19,10 +19,7 @@ packages:
|
||||
llvm:
|
||||
variants: ~lldb
|
||||
mpas-model:
|
||||
require:
|
||||
- one_of:
|
||||
- "precision=single make_target=llvm %arm ^parallelio+pnetcdf"
|
||||
- "precision=single %gcc ^parallelio+pnetcdf"
|
||||
require: "precision=single make_target=llvm %arm ^parallelio+pnetcdf"
|
||||
mpich:
|
||||
require: "mpich pmi=pmi2 device=ch4 netmod=ofi +slurm"
|
||||
nvhpc:
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user