Compare commits

..

1 Commits

Author SHA1 Message Date
Wouter Deconinck
23197b78f9 feat: spack graph --mermaid 2024-05-04 16:13:12 -07:00
703 changed files with 4566 additions and 10496 deletions

View File

@@ -28,7 +28,7 @@ jobs:
run:
shell: ${{ matrix.system.shell }}
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: ${{inputs.python_version}}
@@ -61,7 +61,7 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
spack -d audit externals
./share/spack/qa/validate_last_exit.ps1
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
if: ${{ inputs.with_coverage == 'true' }}
with:
flags: unittests,audits

View File

@@ -1,8 +1,7 @@
#!/bin/bash
set -e
set -ex
source share/spack/setup-env.sh
$PYTHON bin/spack bootstrap disable github-actions-v0.4
$PYTHON bin/spack bootstrap disable spack-install
$PYTHON bin/spack $SPACK_FLAGS solve zlib
$PYTHON bin/spack -d solve zlib
tree $BOOTSTRAP/store
exit 0

View File

@@ -13,22 +13,118 @@ concurrency:
cancel-in-progress: true
jobs:
distros-clingo-sources:
fedora-clingo-sources:
runs-on: ubuntu-latest
container: ${{ matrix.image }}
strategy:
matrix:
image: ["fedora:latest", "opensuse/leap:latest"]
container: "fedora:latest"
steps:
- name: Setup Fedora
if: ${{ matrix.image == 'fedora:latest' }}
- name: Install dependencies
run: |
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch unzip which xz python3 python3-devel tree \
cmake bison bison-devel libstdc++-static
- name: Setup OpenSUSE
if: ${{ matrix.image == 'opensuse/leap:latest' }}
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup non-root user
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
useradd spack-test && mkdir -p ~spack-test
chown -R spack-test . ~spack-test
- name: Setup repo
shell: runuser -u spack-test -- bash {0}
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack external find cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
ubuntu-clingo-sources:
runs-on: ubuntu-latest
container: "ubuntu:latest"
steps:
- name: Install dependencies
env:
DEBIAN_FRONTEND: noninteractive
run: |
apt-get update -y && apt-get upgrade -y
apt-get install -y \
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
make patch unzip xz-utils python3 python3-dev tree \
cmake bison
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup non-root user
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
useradd spack-test && mkdir -p ~spack-test
chown -R spack-test . ~spack-test
- name: Setup repo
shell: runuser -u spack-test -- bash {0}
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack external find cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
ubuntu-clingo-binaries-and-patchelf:
runs-on: ubuntu-latest
container: "ubuntu:latest"
steps:
- name: Install dependencies
env:
DEBIAN_FRONTEND: noninteractive
run: |
apt-get update -y && apt-get upgrade -y
apt-get install -y \
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup non-root user
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
useradd spack-test && mkdir -p ~spack-test
chown -R spack-test . ~spack-test
- name: Setup repo
shell: runuser -u spack-test -- bash {0}
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack -d solve zlib
tree ~/.spack/bootstrap/store/
opensuse-clingo-sources:
runs-on: ubuntu-latest
container: "opensuse/leap:latest"
steps:
- name: Install dependencies
run: |
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
zypper update -y || zypper update -y
@@ -37,9 +133,15 @@ jobs:
make patch unzip which xz python3 python3-devel tree \
cmake bison
- name: Checkout
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup repo
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
run: |
source share/spack/setup-env.sh
@@ -49,102 +151,77 @@ jobs:
spack -d solve zlib
tree ~/.spack/bootstrap/store/
clingo-sources:
runs-on: ${{ matrix.runner }}
strategy:
matrix:
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
macos-clingo-sources:
runs-on: macos-latest
steps:
- name: Setup macOS
if: ${{ matrix.runner != 'ubuntu-latest' }}
- name: Install dependencies
run: |
brew install cmake bison tree
brew install cmake bison@2.7 tree
- name: Checkout
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: "3.12"
- name: Bootstrap clingo
run: |
source share/spack/setup-env.sh
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack external find --not-buildable cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
gnupg-sources:
runs-on: ${{ matrix.runner }}
macos-clingo-binaries:
runs-on: ${{ matrix.macos-version }}
strategy:
matrix:
runner: [ 'macos-13', 'macos-14', "ubuntu-latest" ]
macos-version: ['macos-11', 'macos-12']
steps:
- name: Setup macOS
if: ${{ matrix.runner != 'ubuntu-latest' }}
- name: Install dependencies
run: |
brew install tree
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Setup Ubuntu
if: ${{ matrix.runner == 'ubuntu-latest' }}
run: |
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
- name: Checkout
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
spack solve zlib
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack -d gpg list
tree ~/.spack/bootstrap/store/
from-binaries:
runs-on: ${{ matrix.runner }}
strategy:
matrix:
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
steps:
- name: Setup macOS
if: ${{ matrix.runner != 'ubuntu-latest' }}
run: |
brew install tree
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Setup Ubuntu
if: ${{ matrix.runner == 'ubuntu-latest' }}
run: |
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
- name: Checkout
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: |
3.8
3.9
3.10
3.11
3.12
- name: Set bootstrap sources
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.4
spack bootstrap disable spack-install
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- name: Bootstrap clingo
run: |
set -e
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
set -ex
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
not_found=1
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
echo "Testing $ver_dir"
if [[ -d "$ver_dir" ]] ; then
if $ver_dir/python --version ; then
export PYTHON="$ver_dir/python"
not_found=0
old_path="$PATH"
export PATH="$ver_dir:$PATH"
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
export PATH="$old_path"
fi
fi
# NOTE: test all pythons that exist, not all do on 12
done
ubuntu-clingo-binaries:
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup repo
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap clingo
run: |
set -ex
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
not_found=1
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
echo "Testing $ver_dir"
if [[ -d "$ver_dir" ]] ; then
echo "Testing $ver_dir"
if $ver_dir/python --version ; then
export PYTHON="$ver_dir/python"
not_found=0
@@ -159,9 +236,122 @@ jobs:
exit 1
fi
done
ubuntu-gnupg-binaries:
runs-on: ubuntu-latest
container: "ubuntu:latest"
steps:
- name: Install dependencies
env:
DEBIAN_FRONTEND: noninteractive
run: |
apt-get update -y && apt-get upgrade -y
apt-get install -y \
bzip2 curl file g++ gcc patchelf gfortran git gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup non-root user
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
useradd spack-test && mkdir -p ~spack-test
chown -R spack-test . ~spack-test
- name: Setup repo
shell: runuser -u spack-test -- bash {0}
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap GnuPG
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.4
spack bootstrap disable spack-install
spack -d gpg list
tree ~/.spack/bootstrap/store/
ubuntu-gnupg-sources:
runs-on: ubuntu-latest
container: "ubuntu:latest"
steps:
- name: Install dependencies
env:
DEBIAN_FRONTEND: noninteractive
run: |
apt-get update -y && apt-get upgrade -y
apt-get install -y \
bzip2 curl file g++ gcc patchelf gfortran git gzip \
make patch unzip xz-utils python3 python3-dev tree \
gawk
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- name: Setup non-root user
run: |
# See [1] below
git config --global --add safe.directory /__w/spack/spack
useradd spack-test && mkdir -p ~spack-test
chown -R spack-test . ~spack-test
- name: Setup repo
shell: runuser -u spack-test -- bash {0}
run: |
git --version
. .github/workflows/setup_git.sh
- name: Bootstrap GnuPG
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack solve zlib
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack -d gpg list
tree ~/.spack/bootstrap/store/
macos-gnupg-binaries:
runs-on: macos-latest
steps:
- name: Install dependencies
run: |
brew install tree
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.4
spack bootstrap disable spack-install
spack -d gpg list
tree ~/.spack/bootstrap/store/
macos-gnupg-sources:
runs-on: macos-latest
steps:
- name: Install dependencies
run: |
brew install gawk tree
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
spack solve zlib
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack -d gpg list
tree ~/.spack/bootstrap/store/
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
# See:
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
# - https://github.com/actions/checkout/issues/760
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog

View File

@@ -50,13 +50,15 @@ jobs:
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38'],
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
name: Build ${{ matrix.dockerfile[0] }}
if: github.repository == 'spack/spack'
steps:
- name: Checkout
uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
id: docker_meta

View File

@@ -36,7 +36,7 @@ jobs:
core: ${{ steps.filter.outputs.core }}
packages: ${{ steps.filter.outputs.packages }}
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
if: ${{ github.event_name == 'push' }}
with:
fetch-depth: 0

View File

@@ -14,7 +14,7 @@ jobs:
build-paraview-deps:
runs-on: windows-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d

View File

@@ -3,5 +3,5 @@ clingo==5.7.1
flake8==7.0.0
isort==5.13.2
mypy==1.8.0
types-six==1.16.21.20240513
types-six==1.16.21.9
vermin==1.6.0

View File

@@ -51,7 +51,7 @@ jobs:
on_develop: false
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
@@ -91,7 +91,7 @@ jobs:
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with:
flags: unittests,linux,${{ matrix.concretizer }}
token: ${{ secrets.CODECOV_TOKEN }}
@@ -100,7 +100,7 @@ jobs:
shell:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
@@ -124,7 +124,7 @@ jobs:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with:
flags: shelltests,linux
token: ${{ secrets.CODECOV_TOKEN }}
@@ -141,7 +141,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- name: Setup repo and non-root user
run: |
git --version
@@ -160,7 +160,7 @@ jobs:
clingo-cffi:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
@@ -185,7 +185,7 @@ jobs:
SPACK_TEST_SOLVER: clingo
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with:
flags: unittests,linux,clingo
token: ${{ secrets.CODECOV_TOKEN }}
@@ -195,10 +195,10 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [macos-13, macos-14]
os: [macos-latest, macos-14]
python-version: ["3.11"]
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
@@ -223,7 +223,7 @@ jobs:
$(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with:
flags: unittests,macos
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -18,7 +18,7 @@ jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: '3.11'
@@ -35,7 +35,7 @@ jobs:
style:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
@@ -70,7 +70,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
- name: Setup repo and non-root user
run: |
git --version

View File

@@ -15,7 +15,7 @@ jobs:
unit-tests:
runs-on: windows-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
@@ -33,7 +33,7 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}
@@ -41,7 +41,7 @@ jobs:
unit-tests-cmd:
runs-on: windows-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
@@ -59,7 +59,7 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}
@@ -67,7 +67,7 @@ jobs:
build-abseil:
runs-on: windows-latest
steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
with:
fetch-depth: 0
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d

View File

@@ -1,48 +1,3 @@
# v0.21.2 (2024-03-01)
## Bugfixes
- Containerize: accommodate nested or pre-existing spack-env paths (#41558)
- Fix setup-env script, when going back and forth between instances (#40924)
- Fix using fully-qualified namespaces from root specs (#41957)
- Fix a bug when a required provider is requested for multiple virtuals (#42088)
- OCI buildcaches:
- only push in parallel when forking (#42143)
- use pickleable errors (#42160)
- Fix using sticky variants in externals (#42253)
- Fix a rare issue with conditional requirements and multi-valued variants (#42566)
## Package updates
- rust: add v1.75, rework a few variants (#41161,#41903)
- py-transformers: add v4.35.2 (#41266)
- mgard: fix OpenMP on AppleClang (#42933)
# v0.21.1 (2024-01-11)
## New features
- Add support for reading buildcaches created by Spack v0.22 (#41773)
## Bugfixes
- spack graph: fix coloring with environments (#41240)
- spack info: sort variants in --variants-by-name (#41389)
- Spec.format: error on old style format strings (#41934)
- ASP-based solver:
- fix infinite recursion when computing concretization errors (#41061)
- don't error for type mismatch on preferences (#41138)
- don't emit spurious debug output (#41218)
- Improve the error message for deprecated preferences (#41075)
- Fix MSVC preview version breaking clingo build on Windows (#41185)
- Fix multi-word aliases (#41126)
- Add a warning for unconfigured compiler (#41213)
- environment: fix an issue with deconcretization/reconcretization of specs (#41294)
- buildcache: don't error if a patch is missing, when installing from binaries (#41986)
- Multiple improvements to unit-tests (#41215,#41369,#41495,#41359,#41361,#41345,#41342,#41308,#41226)
## Package updates
- root: add a webgui patch to address security issue (#41404)
- BerkeleyGW: update source urls (#38218)
# v0.21.0 (2023-11-11)
`v0.21.0` is a major feature release.

View File

@@ -144,5 +144,3 @@ switch($SpackSubCommand)
"unload" {Invoke-SpackLoad}
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
}
exit $LASTEXITCODE

View File

@@ -38,9 +38,10 @@ packages:
lapack: [openblas, amdlibflame]
libc: [glibc, musl]
libgfortran: [ gcc-runtime ]
libglx: [mesa+glx]
libglx: [mesa+glx, mesa18+glx]
libifcore: [ intel-oneapi-runtime ]
libllvm: [llvm]
libosmesa: [mesa+osmesa, mesa18+osmesa]
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
luajit: [lua-luajit-openresty, lua-luajit]
mariadb-client: [mariadb-c-client, mariadb]

View File

@@ -865,7 +865,7 @@ There are several different ways to use Spack packages once you have
installed them. As you've seen, spack packages are installed into long
paths with hashes, and you need a way to get them into your path. The
easiest way is to use :ref:`spack load <cmd-spack-load>`, which is
described in this section.
described in the next section.
Some more advanced ways to use Spack packages include:
@@ -959,86 +959,7 @@ use ``spack find --loaded``.
You can also use ``spack load --list`` to get the same output, but it
does not have the full set of query options that ``spack find`` offers.
We'll learn more about Spack's spec syntax in :ref:`a later section <sec-specs>`.
.. _extensions:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Python packages and virtual environments
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Spack can install a large number of Python packages. Their names are
typically prefixed with ``py-``. Installing and using them is no
different from any other package:
.. code-block:: console
$ spack install py-numpy
$ spack load py-numpy
$ python3
>>> import numpy
The ``spack load`` command sets the ``PATH`` variable so that the right Python
executable is used, and makes sure that ``numpy`` and its dependencies can be
located in the ``PYTHONPATH``.
Spack is different from other Python package managers in that it installs
every package into its *own* prefix. This is in contrast to ``pip``, which
installs all packages into the same prefix, be it in a virtual environment
or not.
For many users, **virtual environments** are more convenient than repeated
``spack load`` commands, particularly when working with multiple Python
packages. Fortunately Spack supports environments itself, which together
with a view are no different from Python virtual environments.
The recommended way of working with Python extensions such as ``py-numpy``
is through :ref:`Environments <environments>`. The following example creates
a Spack environment with ``numpy`` in the current working directory. It also
puts a filesystem view in ``./view``, which is a more traditional combined
prefix for all packages in the environment.
.. code-block:: console
$ spack env create --with-view view --dir .
$ spack -e . add py-numpy
$ spack -e . concretize
$ spack -e . install
Now you can activate the environment and start using the packages:
.. code-block:: console
$ spack env activate .
$ python3
>>> import numpy
The environment view is also a virtual environment, which is useful if you are
sharing the environment with others who are unfamiliar with Spack. They can
either use the Python executable directly:
.. code-block:: console
$ ./view/bin/python3
>>> import numpy
or use the activation script:
.. code-block:: console
$ source ./view/bin/activate
$ python3
>>> import numpy
In general, there should not be much difference between ``spack env activate``
and using the virtual environment. The main advantage of ``spack env activate``
is that it knows about more packages than just Python packages, and it may set
additional runtime variables that are not covered by the virtual environment
activation script.
See :ref:`environments` for a more in-depth description of Spack
environments and customizations to views.
We'll learn more about Spack's spec syntax in the next section.
.. _sec-specs:
@@ -1784,6 +1705,165 @@ check only local packages (as opposed to those used transparently from
``upstream`` spack instances) and the ``-j,--json`` option to output
machine-readable json data for any errors.
.. _extensions:
---------------------------
Extensions & Python support
---------------------------
Spack's installation model assumes that each package will live in its
own install prefix. However, certain packages are typically installed
*within* the directory hierarchy of other packages. For example,
`Python <https://www.python.org>`_ packages are typically installed in the
``$prefix/lib/python-2.7/site-packages`` directory.
In Spack, installation prefixes are immutable, so this type of installation
is not directly supported. However, it is possible to create views that
allow you to merge install prefixes of multiple packages into a single new prefix.
Views are a convenient way to get a more traditional filesystem structure.
Using *extensions*, you can ensure that Python packages always share the
same prefix in the view as Python itself. Suppose you have
Python installed like so:
.. code-block:: console
$ spack find python
==> 1 installed packages.
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
python@2.7.8
.. _cmd-spack-extensions:
^^^^^^^^^^^^^^^^^^^^
``spack extensions``
^^^^^^^^^^^^^^^^^^^^
You can find extensions for your Python installation like this:
.. code-block:: console
$ spack extensions python
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
py-biopython py-mako py-pmw py-rpy2 py-sympy
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
py-dateutil py-mpi4py py-pygments py-scikit-learn
py-epydoc py-mx py-pylint py-scipy
py-gnuplot py-nose py-pyparsing py-setuptools
py-h5py py-numpy py-pyqt py-shiboken
==> 12 installed:
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
The extensions are a subset of what's returned by ``spack list``, and
they are packages like any other. They are installed into their own
prefixes, and you can see this with ``spack find --paths``:
.. code-block:: console
$ spack find --paths py-numpy
==> 1 installed packages.
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-numpy@1.9.1 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/py-numpy@1.9.1-66733244
However, even though this package is installed, you cannot use it
directly when you run ``python``:
.. code-block:: console
$ spack load python
$ python
Python 2.7.8 (default, Feb 17 2015, 01:35:25)
[GCC 4.4.7 20120313 (Red Hat 4.4.7-11)] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import numpy
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ImportError: No module named numpy
>>>
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Using Extensions in Environments
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The recommended way of working with extensions such as ``py-numpy``
above is through :ref:`Environments <environments>`. For example,
the following creates an environment in the current working directory
with a filesystem view in the ``./view`` directory:
.. code-block:: console
$ spack env create --with-view view --dir .
$ spack -e . add py-numpy
$ spack -e . concretize
$ spack -e . install
We recommend environments for two reasons. Firstly, environments
can be activated (requires :ref:`shell-support`):
.. code-block:: console
$ spack env activate .
which sets all the right environment variables such as ``PATH`` and
``PYTHONPATH``. This ensures that
.. code-block:: console
$ python
>>> import numpy
works. Secondly, even without shell support, the view ensures
that Python can locate its extensions:
.. code-block:: console
$ ./view/bin/python
>>> import numpy
See :ref:`environments` for a more in-depth description of Spack
environments and customizations to views.
^^^^^^^^^^^^^^^^^^^^
Using ``spack load``
^^^^^^^^^^^^^^^^^^^^
A more traditional way of using Spack and extensions is ``spack load``
(requires :ref:`shell-support`). This will add the extension to ``PYTHONPATH``
in your current shell, and Python itself will be available in the ``PATH``:
.. code-block:: console
$ spack load py-numpy
$ python
>>> import numpy
The loaded packages can be checked using ``spack find --loaded``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Loading Extensions via Modules
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Apart from ``spack env activate`` and ``spack load``, you can load numpy
through your environment modules (using ``environment-modules`` or
``lmod``). This will also add the extension to the ``PYTHONPATH`` in
your current shell.
.. code-block:: console
$ module load <name of numpy module>
If you do not know the name of the specific numpy module you wish to
load, you can use the ``spack module tcl|lmod loads`` command to get
the name of the module from the Spack spec.
-----------------------
Filesystem requirements
-----------------------

View File

@@ -147,15 +147,6 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
def autoreconf(self, spec, prefix):
which("bash")("autogen.sh")
If the ``package.py`` has build instructions in a separate
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
.. code-block:: python
class AutotoolsBuilder(AutotoolsBuilder):
def autoreconf(self, pkg, spec, prefix):
which("bash")("autogen.sh")
"""""""""""""""""""""""""""""""""""""""
patching configure or Makefile.in files
"""""""""""""""""""""""""""""""""""""""

View File

@@ -25,7 +25,7 @@ use Spack to build packages with the tools.
The Spack Python class ``IntelOneapiPackage`` is a base class that is
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
``IntelOneapiTbb`` and other classes to implement the oneAPI
packages. Search for ``oneAPI`` at `packages.spack.io <https://packages.spack.io>`_ for the full
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
list of available oneAPI packages, or use::
spack list -d oneAPI

View File

@@ -718,45 +718,23 @@ command-line tool, or C/C++/Fortran program with optional Python
modules? The former should be prepended with ``py-``, while the
latter should not.
""""""""""""""""""""""""""""""
``extends`` vs. ``depends_on``
""""""""""""""""""""""""""""""
""""""""""""""""""""""
extends vs. depends_on
""""""""""""""""""""""
This is very similar to the naming dilemma above, with a slight twist.
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
that the extension and extendee share the same prefix in views.
This allows the user to import a Python module without
having to add that module to ``PYTHONPATH``.
Additionally, ``extends("python")`` adds a dependency on the package
``python-venv``. This improves isolation from the system, whether
it's during the build or at runtime: user and system site packages
cannot accidentally be used by any package that ``extends("python")``.
As a rule of thumb: if a package does not install any Python modules
of its own, and merely puts a Python script in the ``bin`` directory,
then there is no need for ``extends``. If the package installs modules
in the ``site-packages`` directory, it requires ``extends``.
"""""""""""""""""""""""""""""""""""""
Executing ``python`` during the build
"""""""""""""""""""""""""""""""""""""
Whenever you need to execute a Python command or pass the path of the
Python interpreter to the build system, it is best to use the global
variable ``python`` directly. For example:
.. code-block:: python
@run_before("install")
def recythonize(self):
python("setup.py", "clean") # use the `python` global
As mentioned in the previous section, ``extends("python")`` adds an
automatic dependency on ``python-venv``, which is a virtual environment
that guarantees build isolation. The ``python`` global always refers to
the correct Python interpreter, whether the package uses ``extends("python")``
or ``depends_on("python")``.
When deciding between ``extends`` and ``depends_on``, the best rule of
thumb is to check the installation prefix. If Python libraries are
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
should use ``extends``. If Python libraries are installed elsewhere
or the only files that get installed reside in ``<prefix>/bin``, then
don't use ``extends``.
^^^^^^^^^^^^^^^^^^^^^
Alternatives to Spack

View File

@@ -150,7 +150,7 @@ this can expose you to attacks. Use at your own risk.
--------------------
Path to custom certificats for SSL verification. The value can be a
filesytem path, or an environment variable that expands to an absolute file path.
filesytem path, or an environment variable that expands to a file path.
The default value is set to the environment variable ``SSL_CERT_FILE``
to use the same syntax used by many other applications that automatically
detect custom certificates.
@@ -160,9 +160,6 @@ in the subprocess calling ``curl``.
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
will work.
In all cases the expanded path must be absolute for Spack to use the certificates.
Certificates relative to an environment can be created by prepending the path variable
with the Spack configuration variable``$env``.
--------------------
``checksum``

View File

@@ -194,6 +194,9 @@ The OS that are currently supported are summarized in the table below:
* - Operating System
- Base Image
- Spack Image
* - Ubuntu 18.04
- ``ubuntu:18.04``
- ``spack/ubuntu-bionic``
* - Ubuntu 20.04
- ``ubuntu:20.04``
- ``spack/ubuntu-focal``
@@ -227,6 +230,12 @@ The OS that are currently supported are summarized in the table below:
* - Rocky Linux 9
- ``rockylinux:9``
- ``spack/rockylinux9``
* - Fedora Linux 37
- ``fedora:37``
- ``spack/fedora37``
* - Fedora Linux 38
- ``fedora:38``
- ``spack/fedora38``
* - Fedora Linux 39
- ``fedora:39``
- ``spack/fedora39``

View File

@@ -142,8 +142,12 @@ user's prompt to begin with the environment name in brackets.
$ spack env activate -p myenv
[myenv] $ ...
The ``activate`` command can also be used to create a new environment if it does not already
exist.
The ``activate`` command can also be used to create a new environment, if it is
not already defined, by adding the ``--create`` flag. Managed and anonymous
environments, anonymous environments are explained in the next section,
can both be created using the same flags that `spack env create` accepts.
If an environment already exists then spack will simply activate it and ignore the
create specific flags.
.. code-block:: console
@@ -172,36 +176,21 @@ environment will remove the view from the user environment.
Anonymous Environments
^^^^^^^^^^^^^^^^^^^^^^
Apart from managed environments, Spack also supports anonymous environments.
Anonymous environments can be placed in any directory of choice.
.. note::
When uninstalling packages, Spack asks the user to confirm the removal of packages
that are still used in a managed environment. This is not the case for anonymous
environments.
To create an anonymous environment, use one of the following commands:
Any directory can be treated as an environment if it contains a file
``spack.yaml``. To load an anonymous environment, use:
.. code-block:: console
$ spack env create --dir my_env
$ spack env create ./my_env
$ spack env activate -d /path/to/directory
As a shorthand, you can also create an anonymous environment upon activation if it does not
already exist:
Anonymous specs can be created in place using the command:
.. code-block:: console
$ spack env activate --create ./my_env
For convenience, Spack can also place an anonymous environment in a temporary directory for you:
.. code-block:: console
$ spack env activate --temp
$ spack env create -d .
In this case Spack simply creates a ``spack.yaml`` file in the requested
directory.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Environment Sensitive Commands
@@ -460,125 +449,6 @@ Sourcing that file in Bash will make the environment available to the
user; and can be included in ``.bashrc`` files, etc. The ``loads``
file may also be copied out of the environment, renamed, etc.
.. _environment_include_concrete:
------------------------------
Included Concrete Environments
------------------------------
Spack environments can create an environment based off of information in already
established environments. You can think of it as a combination of existing
environments. It will gather information from the existing environment's
``spack.lock`` and use that during the creation of this included concrete
environment. When an included concrete environment is created it will generate
a ``spack.lock`` file for the newly created environment.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Creating included environments
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
To create a combined concrete environment, you must have at least one existing
concrete environment. You will use the command ``spack env create`` with the
argument ``--include-concrete`` followed by the name or path of the environment
you'd like to include. Here is an example of how to create a combined environment
from the command line.
.. code-block:: console
$ spack env create myenv
$ spack -e myenv add python
$ spack -e myenv concretize
$ spack env create --include-concrete myenv included_env
You can also include an environment directly in the ``spack.yaml`` file. It
involves adding the ``include_concrete`` heading in the yaml followed by the
absolute path to the independent environments.
.. code-block:: yaml
spack:
specs: []
concretizer:
unify: true
include_concrete:
- /absolute/path/to/environment1
- /absolute/path/to/environment2
Once the ``spack.yaml`` has been updated you must concretize the environment to
get the concrete specs from the included environments.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Updating an included environment
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
If changes were made to the base environment and you want that reflected in the
included environment you will need to reconcretize both the base environment and the
included environment for the change to be implemented. For example:
.. code-block:: console
$ spack env create myenv
$ spack -e myenv add python
$ spack -e myenv concretize
$ spack env create --include-concrete myenv included_env
$ spack -e myenv find
==> In environment myenv
==> Root specs
python
==> 0 installed packages
$ spack -e included_env find
==> In environment included_env
==> No root specs
==> Included specs
python
==> 0 installed packages
Here we see that ``included_env`` has access to the python package through
the ``myenv`` environment. But if we were to add another spec to ``myenv``,
``included_env`` will not be able to access the new information.
.. code-block:: console
$ spack -e myenv add perl
$ spack -e myenv concretize
$ spack -e myenv find
==> In environment myenv
==> Root specs
perl python
==> 0 installed packages
$ spack -e included_env find
==> In environment included_env
==> No root specs
==> Included specs
python
==> 0 installed packages
It isn't until you run the ``spack concretize`` command that the combined
environment will get the updated information from the reconcretized base environmennt.
.. code-block:: console
$ spack -e included_env concretize
$ spack -e included_env find
==> In environment included_env
==> No root specs
==> Included specs
perl python
==> 0 installed packages
.. _environment-configuration:
------------------------
@@ -930,7 +800,6 @@ For example, the following environment has three root packages:
This allows for a much-needed reduction in redundancy between packages
and constraints.
----------------
Filesystem Views
----------------
@@ -1164,7 +1033,7 @@ other targets to depend on the environment installation.
A typical workflow is as follows:
.. code-block:: console
.. code:: console
spack env create -d .
spack -e . add perl
@@ -1257,7 +1126,7 @@ its dependencies. This can be useful when certain flags should only apply to
dependencies. Below we show a use case where a spec is installed with verbose
output (``spack install --verbose``) while its dependencies are installed silently:
.. code-block:: console
.. code:: console
$ spack env depfile -o Makefile
@@ -1279,7 +1148,7 @@ This can be accomplished through the generated ``[<prefix>/]SPACK_PACKAGE_IDS``
variable. Assuming we have an active and concrete environment, we generate the
associated ``Makefile`` with a prefix ``example``:
.. code-block:: console
.. code:: console
$ spack env depfile -o env.mk --make-prefix example

View File

@@ -478,13 +478,6 @@ prefix, you can add them to the ``extra_attributes`` field. Similarly,
all other fields from the compilers config can be added to the
``extra_attributes`` field for an external representing a compiler.
Note that the format for the ``paths`` field in the
``extra_attributes`` section is different than in the ``compilers``
config. For compilers configured as external packages, the section is
named ``compilers`` and the dictionary maps language names (``c``,
``cxx``, ``fortran``) to paths, rather than using the names ``cc``,
``fc``, and ``f77``.
.. code-block:: yaml
packages:
@@ -500,10 +493,11 @@ named ``compilers`` and the dictionary maps language names (``c``,
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
prefix: /usr
extra_attributes:
compilers:
c: /usr/bin/clang-with-suffix
paths:
cc: /usr/bin/clang-with-suffix
cxx: /usr/bin/clang++-with-extra-info
fortran: /usr/bin/gfortran
fc: /usr/bin/gfortran
f77: /usr/bin/gfortran
extra_rpaths:
- /usr/lib/llvm/

View File

@@ -476,3 +476,9 @@ implemented using Python's built-in `sys.path
:py:mod:`spack.repo` module implements a custom `Python importer
<https://docs.python.org/2/library/imp.html>`_.
.. warning::
The mechanism for extending packages is not yet extensively tested,
and extending packages across repositories imposes inter-repo
dependencies, which may be hard to manage. Use this feature at your
own risk, but let us know if you have a use case for it.

View File

@@ -4,10 +4,10 @@ sphinx_design==0.5.0
sphinx-rtd-theme==2.0.0
python-levenshtein==0.25.1
docutils==0.20.1
pygments==2.18.0
pygments==2.17.2
urllib3==2.2.1
pytest==8.2.1
pytest==8.2.0
isort==5.13.2
black==24.4.2
black==24.4.0
flake8==7.0.0
mypy==1.10.0
mypy==1.9.0

View File

@@ -18,7 +18,7 @@
* Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
* Version: 0.2.3 (commit 7b8fe60b69e2861e7dac104bc1c183decfcd3daf)
astunparse
----------------

View File

@@ -1,3 +1,3 @@
"""Init file to avoid namespace packages"""
__version__ = "0.2.4"
__version__ = "0.2.3"

View File

@@ -5,10 +5,9 @@
"""The "cpu" package permits to query and compare different
CPU microarchitectures.
"""
from .detect import brand_string, host
from .detect import host
from .microarchitecture import (
TARGETS,
InvalidCompilerVersion,
Microarchitecture,
UnsupportedMicroarchitecture,
generic_microarchitecture,
@@ -16,12 +15,10 @@
)
__all__ = [
"brand_string",
"host",
"TARGETS",
"InvalidCompilerVersion",
"Microarchitecture",
"UnsupportedMicroarchitecture",
"TARGETS",
"generic_microarchitecture",
"host",
"version_components",
]

View File

@@ -155,31 +155,6 @@ def _is_bit_set(self, register: int, bit: int) -> bool:
mask = 1 << bit
return register & mask > 0
def brand_string(self) -> Optional[str]:
"""Returns the brand string, if available."""
if self.highest_extension_support < 0x80000004:
return None
r1 = self.cpuid.registers_for(eax=0x80000002, ecx=0)
r2 = self.cpuid.registers_for(eax=0x80000003, ecx=0)
r3 = self.cpuid.registers_for(eax=0x80000004, ecx=0)
result = struct.pack(
"IIIIIIIIIIII",
r1.eax,
r1.ebx,
r1.ecx,
r1.edx,
r2.eax,
r2.ebx,
r2.ecx,
r2.edx,
r3.eax,
r3.ebx,
r3.ecx,
r3.edx,
).decode("utf-8")
return result.strip("\x00")
@detection(operating_system="Windows")
def cpuid_info():
@@ -199,8 +174,8 @@ def _check_output(args, env):
WINDOWS_MAPPING = {
"AMD64": X86_64,
"ARM64": AARCH64,
"AMD64": "x86_64",
"ARM64": "aarch64",
}
@@ -434,16 +409,3 @@ def compatibility_check_for_riscv64(info, target):
return (target == arch_root or arch_root in target.ancestors) and (
target.name == info.name or target.vendor == "generic"
)
def brand_string() -> Optional[str]:
"""Returns the brand string of the host, if detected, or None."""
if platform.system() == "Darwin":
return _check_output(
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
).strip()
if host().family == X86_64:
return CpuidInfoCollector().brand_string()
return None

View File

@@ -208,8 +208,6 @@ def optimization_flags(self, compiler, version):
"""Returns a string containing the optimization flags that needs
to be used to produce code optimized for this micro-architecture.
The version is expected to be a string of dot separated digits.
If there is no information on the compiler passed as argument the
function returns an empty string. If it is known that the compiler
version we want to use does not support this architecture the function
@@ -218,11 +216,6 @@ def optimization_flags(self, compiler, version):
Args:
compiler (str): name of the compiler to be used
version (str): version of the compiler to be used
Raises:
UnsupportedMicroarchitecture: if the requested compiler does not support
this micro-architecture.
ValueError: if the version doesn't match the expected format
"""
# If we don't have information on compiler at all return an empty string
if compiler not in self.family.compilers:
@@ -239,14 +232,6 @@ def optimization_flags(self, compiler, version):
msg = msg.format(compiler, best_target, best_target.family)
raise UnsupportedMicroarchitecture(msg)
# Check that the version matches the expected format
if not re.match(r"^(?:\d+\.)*\d+$", version):
msg = (
"invalid format for the compiler version argument. "
"Only dot separated digits are allowed."
)
raise InvalidCompilerVersion(msg)
# If we have information on this compiler we need to check the
# version being used
compiler_info = self.compilers[compiler]
@@ -307,7 +292,7 @@ def generic_microarchitecture(name):
Args:
name (str): name of the micro-architecture
"""
return Microarchitecture(name, parents=[], vendor="generic", features=set(), compilers={})
return Microarchitecture(name, parents=[], vendor="generic", features=[], compilers={})
def version_components(version):
@@ -382,15 +367,7 @@ def fill_target_from_dict(name, data, targets):
TARGETS = LazyDictionary(_known_microarchitectures)
class ArchspecError(Exception):
"""Base class for errors within archspec"""
class UnsupportedMicroarchitecture(ArchspecError, ValueError):
class UnsupportedMicroarchitecture(ValueError):
"""Raised if a compiler version does not support optimization for a given
micro-architecture.
"""
class InvalidCompilerVersion(ArchspecError, ValueError):
"""Raised when an invalid format is used for compiler versions in archspec."""

View File

@@ -2937,6 +2937,8 @@
"ilrcpc",
"flagm",
"ssbs",
"paca",
"pacg",
"dcpodp",
"svei8mm",
"svebf16",
@@ -3064,6 +3066,8 @@
"flagm",
"ssbs",
"sb",
"paca",
"pacg",
"dcpodp",
"sve2",
"sveaes",
@@ -3077,7 +3081,8 @@
"svebf16",
"i8mm",
"bf16",
"dgh"
"dgh",
"bti"
],
"compilers" : {
"gcc": [

View File

@@ -98,10 +98,3 @@ def path_filter_caller(*args, **kwargs):
if _func:
return holder_func(_func)
return holder_func
def sanitize_win_longpath(path: str) -> str:
"""Strip Windows extended path prefix from strings
Returns sanitized string.
no-op if extended path prefix is not present"""
return path.lstrip("\\\\?\\")

View File

@@ -187,18 +187,12 @@ def polite_filename(filename: str) -> str:
return _polite_antipattern().sub("_", filename)
def getuid() -> Union[str, int]:
"""Returns os getuid on non Windows
On Windows returns 0 for admin users, login string otherwise
This is in line with behavior from get_owner_uid which
always returns the login string on Windows
"""
def getuid():
if sys.platform == "win32":
import ctypes
# If not admin, use the string name of the login as a unique ID
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
return os.getlogin()
return 1
return 0
else:
return os.getuid()
@@ -219,15 +213,6 @@ def _win_rename(src, dst):
os.replace(src, dst)
@system_path_filter
def msdos_escape_parens(path):
"""MS-DOS interprets parens as grouping parameters even in a quoted string"""
if sys.platform == "win32":
return path.replace("(", "^(").replace(")", "^)")
else:
return path
@system_path_filter
def rename(src, dst):
# On Windows, os.rename will fail if the destination file already exists
@@ -568,13 +553,7 @@ def exploding_archive_handler(tarball_container, stage):
@system_path_filter(arg_slice=slice(1))
def get_owner_uid(path, err_msg=None) -> Union[str, int]:
"""Returns owner UID of path destination
On non Windows this is the value of st_uid
On Windows this is the login string associated with the
owning user.
"""
def get_owner_uid(path, err_msg=None):
if not os.path.exists(path):
mkdirp(path, mode=stat.S_IRWXU)
@@ -843,7 +822,7 @@ def copy_tree(
if islink(s):
link_target = resolve_link_target_relative_to_the_link(s)
if symlinks:
target = readlink(s)
target = os.readlink(s)
if os.path.isabs(target):
def escaped_path(path):
@@ -2450,10 +2429,9 @@ def add_library_dependent(self, *dest):
"""
for pth in dest:
if os.path.isfile(pth):
new_pth = pathlib.Path(pth).parent
self._additional_library_dependents.add(pathlib.Path(pth).parent)
else:
new_pth = pathlib.Path(pth)
self._additional_library_dependents.add(new_pth)
self._additional_library_dependents.add(pathlib.Path(pth))
@property
def rpaths(self):
@@ -2531,14 +2509,8 @@ def establish_link(self):
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
# install a symlink to each dependent library
# do not rpath for system libraries included in the dag
# we should not be modifying libraries managed by the Windows system
# as this will negatively impact linker behavior and can result in permission
# errors if those system libs are not modifiable by Spack
if "windows-system" not in getattr(self.pkg, "tags", []):
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
self._link(library, lib_dir)
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
self._link(library, lib_dir)
@system_path_filter

View File

@@ -11,7 +11,7 @@
from llnl.util import lang, tty
from ..path import sanitize_win_longpath, system_path_filter
from ..path import system_path_filter
if sys.platform == "win32":
from win32file import CreateHardLink
@@ -247,9 +247,9 @@ def _windows_create_junction(source: str, link: str):
out, err = proc.communicate()
tty.debug(out.decode())
if proc.returncode != 0:
err_str = err.decode()
tty.error(err_str)
raise SymlinkError("Make junction command returned a non-zero return code.", err_str)
err = err.decode()
tty.error(err)
raise SymlinkError("Make junction command returned a non-zero return code.", err)
def _windows_create_hard_link(path: str, link: str):
@@ -269,14 +269,14 @@ def _windows_create_hard_link(path: str, link: str):
CreateHardLink(link, path)
def readlink(path: str, *, dir_fd=None):
def readlink(path: str):
"""Spack utility to override of os.readlink method to work cross platform"""
if _windows_is_hardlink(path):
return _windows_read_hard_link(path)
elif _windows_is_junction(path):
return _windows_read_junction(path)
else:
return sanitize_win_longpath(os.readlink(path, dir_fd=dir_fd))
return os.readlink(path)
def _windows_read_hard_link(link: str) -> str:

View File

@@ -59,7 +59,6 @@
To output an @, use '@@'. To output a } inside braces, use '}}'.
"""
import os
import re
import sys
from contextlib import contextmanager
@@ -102,29 +101,9 @@ def __init__(self, message):
# Mapping from color arguments to values for tty.set_color
color_when_values = {"always": True, "auto": None, "never": False}
def _color_when_value(when):
"""Raise a ValueError for an invalid color setting.
Valid values are 'always', 'never', and 'auto', or equivalently,
True, False, and None.
"""
if when in color_when_values:
return color_when_values[when]
elif when not in color_when_values.values():
raise ValueError("Invalid color setting: %s" % when)
return when
def _color_from_environ() -> Optional[bool]:
try:
return _color_when_value(os.environ.get("SPACK_COLOR", "auto"))
except ValueError:
return None
#: When `None` colorize when stdout is tty, when `True` or `False` always or never colorize resp.
_force_color = _color_from_environ()
# Force color; None: Only color if stdout is a tty
# True: Always colorize output, False: Never colorize output
_force_color = None
def try_enable_terminal_color_on_windows():
@@ -185,6 +164,19 @@ def _err_check(result, func, args):
debug("Unable to support color on Windows terminal")
def _color_when_value(when):
"""Raise a ValueError for an invalid color setting.
Valid values are 'always', 'never', and 'auto', or equivalently,
True, False, and None.
"""
if when in color_when_values:
return color_when_values[when]
elif when not in color_when_values.values():
raise ValueError("Invalid color setting: %s" % when)
return when
def get_color_when():
"""Return whether commands should print color or not."""
if _force_color is not None:

View File

@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "0.23.0.dev0"
__version__ = "0.22.0.dev0"
spack_version = __version__

View File

@@ -254,8 +254,8 @@ def _search_duplicate_specs_in_externals(error_cls):
@config_packages
def _deprecated_preferences(error_cls):
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.22)"""
# TODO (v0.22): remove this audit as the attributes will not be allowed in config
errors = []
packages_yaml = spack.config.CONFIG.get_config("packages")
@@ -421,10 +421,6 @@ def _check_patch_urls(pkgs, error_cls):
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
)
github_pull_commits_re = (
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
r".+/.+/pull/\d+/commits/[a-fA-F0-9]+\.(?:patch|diff)"
)
# Only .diff URLs have stable/full hashes:
# https://forum.gitlab.com/t/patches-with-full-index/29313
gitlab_patch_url_re = (
@@ -440,24 +436,14 @@ def _check_patch_urls(pkgs, error_cls):
if not isinstance(patch, spack.patch.UrlPatch):
continue
if re.match(github_pull_commits_re, patch.url):
url = re.sub(r"/pull/\d+/commits/", r"/commit/", patch.url)
url = re.sub(r"^(.*)(?<!full_index=1)$", r"\1?full_index=1", url)
errors.append(
error_cls(
f"patch URL in package {pkg_cls.name} "
+ "must not be a pull request commit; "
+ f"instead use {url}",
[patch.url],
)
)
elif re.match(github_patch_url_re, patch.url):
if re.match(github_patch_url_re, patch.url):
full_index_arg = "?full_index=1"
if not patch.url.endswith(full_index_arg):
errors.append(
error_cls(
f"patch URL in package {pkg_cls.name} "
+ f"must end with {full_index_arg}",
"patch URL in package {0} must end with {1}".format(
pkg_cls.name, full_index_arg
),
[patch.url],
)
)
@@ -465,7 +451,9 @@ def _check_patch_urls(pkgs, error_cls):
if not patch.url.endswith(".diff"):
errors.append(
error_cls(
f"patch URL in package {pkg_cls.name} must end with .diff",
"patch URL in package {0} must end with .diff".format(
pkg_cls.name
),
[patch.url],
)
)

View File

@@ -29,7 +29,6 @@
import llnl.util.lang
import llnl.util.tty as tty
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
from llnl.util.symlink import readlink
import spack.caches
import spack.cmd
@@ -659,7 +658,7 @@ def get_buildfile_manifest(spec):
# 2. paths are used as strings.
for rel_path in visitor.symlinks:
abs_path = os.path.join(root, rel_path)
link = readlink(abs_path)
link = os.readlink(abs_path)
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
data["link_to_relocate"].append(rel_path)
@@ -2002,7 +2001,6 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
with spack.util.path.filter_padding():
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
extract_tarball(spec, download_result, force)
spec.package.windows_establish_runtime_linkage()
spack.hooks.post_install(spec, False)
spack.store.STORE.db.add(spec, spack.store.STORE.layout)

View File

@@ -5,13 +5,7 @@
"""Function and classes needed to bootstrap Spack itself."""
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
from .core import (
all_core_root_specs,
ensure_clingo_importable_or_raise,
ensure_core_dependencies,
ensure_gpg_in_path_or_raise,
ensure_patchelf_in_path_or_raise,
)
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
from .environment import BootstrapEnvironment, ensure_environment_dependencies
from .status import status_message
@@ -19,8 +13,6 @@
"is_bootstrapping",
"ensure_bootstrap_configuration",
"ensure_core_dependencies",
"ensure_gpg_in_path_or_raise",
"ensure_clingo_importable_or_raise",
"ensure_patchelf_in_path_or_raise",
"all_core_root_specs",
"ensure_environment_dependencies",

View File

@@ -54,14 +54,10 @@ def _try_import_from_store(
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
for candidate_spec in installed_specs:
# previously bootstrapped specs may not have a python-venv dependency.
if candidate_spec.dependencies("python-venv"):
python, *_ = candidate_spec.dependencies("python-venv")
else:
python, *_ = candidate_spec.dependencies("python")
pkg = candidate_spec["python"].package
module_paths = [
os.path.join(candidate_spec.prefix, python.package.purelib),
os.path.join(candidate_spec.prefix, python.package.platlib),
os.path.join(candidate_spec.prefix, pkg.purelib),
os.path.join(candidate_spec.prefix, pkg.platlib),
]
path_before = list(sys.path)

View File

@@ -270,6 +270,10 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
with spack_python_interpreter():
# Add hint to use frontend operating system on Cray
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
# This is needed to help the old concretizer taking the `setuptools` dependency
# only when bootstrapping from sources on Python 3.12
if spec_for_current_python() == "python@3.12":
concrete_spec.constrain("+force_setuptools")
if module == "clingo":
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme

View File

@@ -3,11 +3,13 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Bootstrap non-core Spack dependencies from an environment."""
import glob
import hashlib
import os
import pathlib
import sys
from typing import Iterable, List
import warnings
from typing import List
import archspec.cpu
@@ -26,16 +28,6 @@
class BootstrapEnvironment(spack.environment.Environment):
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
def __init__(self) -> None:
if not self.spack_yaml().exists():
self._write_spack_yaml_file()
super().__init__(self.environment_root())
# Remove python package roots created before python-venv was introduced
for s in self.concrete_roots():
if "python" in s.package.extendees and not s.dependencies("python-venv"):
self.deconcretize(s)
@classmethod
def spack_dev_requirements(cls) -> List[str]:
"""Spack development requirements"""
@@ -67,19 +59,31 @@ def view_root(cls) -> pathlib.Path:
return cls.environment_root().joinpath("view")
@classmethod
def bin_dir(cls) -> pathlib.Path:
"""Paths to be added to PATH"""
return cls.view_root().joinpath("bin")
def pythonpaths(cls) -> List[str]:
"""Paths to be added to sys.path or PYTHONPATH"""
python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}"
glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**"))
result = glob.glob(glob_expr)
if not result:
msg = f"Cannot find any Python path in {cls.view_root()}"
warnings.warn(msg)
return result
def python_dirs(self) -> Iterable[pathlib.Path]:
python = next(s for s in self.all_specs_generator() if s.name == "python-venv").package
return {self.view_root().joinpath(p) for p in (python.platlib, python.purelib)}
@classmethod
def bin_dirs(cls) -> List[pathlib.Path]:
"""Paths to be added to PATH"""
return [cls.view_root().joinpath("bin")]
@classmethod
def spack_yaml(cls) -> pathlib.Path:
"""Environment spack.yaml file"""
return cls.environment_root().joinpath("spack.yaml")
def __init__(self) -> None:
if not self.spack_yaml().exists():
self._write_spack_yaml_file()
super().__init__(self.environment_root())
def update_installations(self) -> None:
"""Update the installations of this environment."""
log_enabled = tty.is_debug() or tty.is_verbose()
@@ -96,13 +100,21 @@ def update_installations(self) -> None:
self.install_all()
self.write(regenerate=True)
def load(self) -> None:
"""Update PATH and sys.path."""
# Make executables available (shouldn't need PYTHONPATH)
os.environ["PATH"] = f"{self.bin_dir()}{os.pathsep}{os.environ.get('PATH', '')}"
# Spack itself imports pytest
sys.path.extend(str(p) for p in self.python_dirs())
def update_syspath_and_environ(self) -> None:
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
the environment view.
"""
# Do minimal modifications to sys.path and environment variables. In particular, pay
# attention to have the smallest PYTHONPATH / sys.path possible, since that may impact
# the performance of the current interpreter
sys.path.extend(self.pythonpaths())
os.environ["PATH"] = os.pathsep.join(
[str(x) for x in self.bin_dirs()] + os.environ.get("PATH", "").split(os.pathsep)
)
os.environ["PYTHONPATH"] = os.pathsep.join(
os.environ.get("PYTHONPATH", "").split(os.pathsep)
+ [str(x) for x in self.pythonpaths()]
)
def _write_spack_yaml_file(self) -> None:
tty.msg(
@@ -152,4 +164,4 @@ def ensure_environment_dependencies() -> None:
_add_externals_if_missing()
with BootstrapEnvironment() as env:
env.update_installations()
env.load()
env.update_syspath_and_environ()

View File

@@ -43,7 +43,7 @@
from collections import defaultdict
from enum import Flag, auto
from itertools import chain
from typing import Dict, List, Set, Tuple
from typing import List, Set, Tuple
import llnl.util.tty as tty
from llnl.string import plural
@@ -730,28 +730,12 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwa
return compiler(*compiler_args, output=compiler_output)
def _get_rpath_deps_from_spec(
spec: spack.spec.Spec, transitive_rpaths: bool
) -> List[spack.spec.Spec]:
if not transitive_rpaths:
return spec.dependencies(deptype=dt.LINK)
by_name: Dict[str, spack.spec.Spec] = {}
for dep in spec.traverse(root=False, deptype=dt.LINK):
lookup = by_name.get(dep.name)
if lookup is None:
by_name[dep.name] = dep
elif lookup.version < dep.version:
by_name[dep.name] = dep
return list(by_name.values())
def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]:
"""Return immediate or transitive dependencies (depending on the package) that need to be
rpath'ed. If a package occurs multiple times, the newest version is kept."""
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
def get_rpath_deps(pkg):
"""Return immediate or transitive RPATHs depending on the package."""
if pkg.transitive_rpaths:
return [d for d in pkg.spec.traverse(root=False, deptype=("link"))]
else:
return pkg.spec.dependencies(deptype="link")
def get_rpaths(pkg):

View File

@@ -39,11 +39,16 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
``find_python_hints`` for context."""
if not getattr(pkg, "find_python_hints", False) or not pkg.spec.dependencies(
"python", dt.BUILD | dt.LINK
):
if not getattr(pkg, "find_python_hints", False):
return
python_executable = pkg.spec["python"].command.path
pythons = pkg.spec.dependencies("python", dt.BUILD | dt.LINK)
if len(pythons) != 1:
return
try:
python_executable = pythons[0].package.command.path
except RuntimeError:
return
args.extend(
[
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),

View File

@@ -1,144 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import itertools
import os
import pathlib
import re
import sys
from typing import Dict, List, Sequence, Tuple, Union
import llnl.util.tty as tty
from llnl.util.lang import classproperty
import spack.compiler
import spack.package_base
# Local "type" for type hints
Path = Union[str, pathlib.Path]
class CompilerPackage(spack.package_base.PackageBase):
"""A Package mixin for all common logic for packages that implement compilers"""
# TODO: how do these play nicely with other tags
tags: Sequence[str] = ["compiler"]
#: Optional suffix regexes for searching for this type of compiler.
#: Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
#: version suffix for gcc.
compiler_suffixes: List[str] = [r"-.*"]
#: Optional prefix regexes for searching for this compiler
compiler_prefixes: List[str] = []
#: Compiler argument(s) that produces version information
#: If multiple arguments, the earlier arguments must produce errors when invalid
compiler_version_argument: Union[str, Tuple[str]] = "-dumpversion"
#: Regex used to extract version from compiler's output
compiler_version_regex: str = "(.*)"
#: Static definition of languages supported by this class
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
def __init__(self, spec: "spack.spec.Spec"):
super().__init__(spec)
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
msg += f" supports: {self.supported_languages}, valid values: {self.compiler_languages}"
assert set(self.supported_languages) <= set(self.compiler_languages), msg
@property
def supported_languages(self) -> Sequence[str]:
"""Dynamic definition of languages supported by this package"""
return self.compiler_languages
@classproperty
def compiler_names(cls) -> Sequence[str]:
"""Construct list of compiler names from per-language names"""
names = []
for language in cls.compiler_languages:
names.extend(getattr(cls, f"{language}_names"))
return names
@classproperty
def executables(cls) -> Sequence[str]:
"""Construct executables for external detection from names, prefixes, and suffixes."""
regexp_fmt = r"^({0}){1}({2})$"
prefixes = [""] + cls.compiler_prefixes
suffixes = [""] + cls.compiler_suffixes
if sys.platform == "win32":
ext = r"\.(?:exe|bat)"
suffixes += [suf + ext for suf in suffixes]
return [
regexp_fmt.format(prefix, re.escape(name), suffix)
for prefix, name, suffix in itertools.product(prefixes, cls.compiler_names, suffixes)
]
@classmethod
def determine_version(cls, exe: Path):
version_argument = cls.compiler_version_argument
if isinstance(version_argument, str):
version_argument = (version_argument,)
for va in version_argument:
try:
output = spack.compiler.get_compiler_version_output(exe, va)
match = re.search(cls.compiler_version_regex, output)
if match:
return ".".join(match.groups())
except spack.util.executable.ProcessError:
pass
except Exception as e:
tty.debug(
f"[{__file__}] Cannot detect a valid version for the executable "
f"{str(exe)}, for package '{cls.name}': {e}"
)
@classmethod
def compiler_bindir(cls, prefix: Path) -> Path:
"""Overridable method for the location of the compiler bindir within the preifx"""
return os.path.join(prefix, "bin")
@classmethod
def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
"""Compute the paths to compiler executables associated with this package
This is a helper method for ``determine_variants`` to compute the ``extra_attributes``
to include with each spec object."""
# There are often at least two copies (not symlinks) of each compiler executable in the
# same directory: one with a canonical name, e.g. "gfortran", and another one with the
# target prefix, e.g. "x86_64-pc-linux-gnu-gfortran". There also might be a copy of "gcc"
# with the version suffix, e.g. "x86_64-pc-linux-gnu-gcc-6.3.0". To ensure the consistency
# of values in the "paths" dictionary (i.e. we prefer all of them to reference copies
# with canonical names if possible), we iterate over the executables in the reversed sorted
# order:
# First pass over languages identifies exes that are perfect matches for canonical names
# Second pass checks for names with prefix/suffix
# Second pass is sorted by language name length because longer named languages
# e.g. cxx can often contain the names of shorter named languages
# e.g. c (e.g. clang/clang++)
paths = {}
exes = sorted(exes, reverse=True)
languages = {
lang: getattr(cls, f"{lang}_names")
for lang in sorted(cls.compiler_languages, key=len, reverse=True)
}
for exe in exes:
for lang, names in languages.items():
if os.path.basename(exe) in names:
paths[lang] = exe
break
else:
for lang, names in languages.items():
if any(name in os.path.basename(exe) for name in names):
paths[lang] = exe
break
return paths
@classmethod
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
# path determination is separated so it can be reused in subclasses
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}

View File

@@ -137,14 +137,11 @@ def cuda_flags(arch_list):
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.4")
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
conflicts("%clang@15:", when="+cuda ^cuda@:12.0")
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
conflicts("%clang@18:", when="+cuda ^cuda@:12.4")
conflicts("%clang@16:", when="+cuda ^cuda@:12.3")
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")

View File

@@ -145,7 +145,7 @@ def install(self, pkg, spec, prefix):
opts += self.nmake_install_args()
if self.makefile_name:
opts.append("/F{}".format(self.makefile_name))
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
opts.append(self.define("PREFIX", prefix))
with fs.working_dir(self.build_directory):
inspect.getmodule(self.pkg).nmake(
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes

View File

@@ -138,21 +138,16 @@ def view_file_conflicts(self, view, merge_map):
return conflicts
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
# Patch up shebangs if the package extends Python and we put a Python interpreter in the
# view.
if not self.extendee_spec:
return super().add_files_to_view(view, merge_map, skip_if_exists)
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
if python.external:
# Patch up shebangs to the python linked in the view only if python is built by Spack.
if not self.extendee_spec or self.extendee_spec.external:
return super().add_files_to_view(view, merge_map, skip_if_exists)
# We only patch shebangs in the bin directory.
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
bin_dir = self.spec.prefix.bin
bin_dir = self.spec.prefix.bin
python_prefix = self.extendee_spec.prefix
for src, dst in merge_map.items():
if skip_if_exists and os.path.lexists(dst):
continue
@@ -173,7 +168,7 @@ def add_files_to_view(self, view, merge_map, skip_if_exists=True):
copied_files[(s.st_dev, s.st_ino)] = dst
shutil.copy2(src, dst)
fs.filter_file(
python.prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
python_prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
)
else:
view.link(src, dst)
@@ -204,13 +199,14 @@ def remove_files_from_view(self, view, merge_map):
ignore_namespace = True
bin_dir = self.spec.prefix.bin
global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec)
to_remove = []
for src, dst in merge_map.items():
if ignore_namespace and namespace_init(dst):
continue
if not fs.path_contains_subdirectory(src, bin_dir):
if global_view or not fs.path_contains_subdirectory(src, bin_dir):
to_remove.append(dst)
else:
os.remove(dst)
@@ -366,12 +362,6 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
return f"https://pypi.org/simple/{name}/"
return None
@property
def python_spec(self):
"""Get python-venv if it exists or python otherwise."""
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
return python
@property
def headers(self) -> HeaderList:
"""Discover header files in platlib."""
@@ -381,9 +371,8 @@ def headers(self) -> HeaderList:
# Headers should only be in include or platlib, but no harm in checking purelib too
include = self.prefix.join(self.spec["python"].package.include).join(name)
python = self.python_spec
platlib = self.prefix.join(python.package.platlib).join(name)
purelib = self.prefix.join(python.package.purelib).join(name)
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
headers_list = map(fs.find_all_headers, [include, platlib, purelib])
headers = functools.reduce(operator.add, headers_list)
@@ -402,9 +391,8 @@ def libs(self) -> LibraryList:
name = self.spec.name[3:]
# Libraries should only be in platlib, but no harm in checking purelib too
python = self.python_spec
platlib = self.prefix.join(python.package.platlib).join(name)
purelib = self.prefix.join(python.package.purelib).join(name)
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
libs_list = map(find_all_libraries, [platlib, purelib])
@@ -516,8 +504,6 @@ def global_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
"""Install everything from build directory."""
pip = spec["python"].command
pip.add_default_arg("-m", "pip")
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
@@ -533,6 +519,14 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
else:
args.append(".")
pip = spec["python"].command
# Hide user packages, since we don't have build isolation. This is
# necessary because pip / setuptools may run hooks from arbitrary
# packages during the build. There is no equivalent variable to hide
# system packages, so this is not reliable for external Python.
pip.add_default_env("PYTHONNOUSERSITE", "1")
pip.add_default_arg("-m")
pip.add_default_arg("pip")
with fs.working_dir(self.build_directory):
pip(*args)

View File

@@ -44,7 +44,6 @@
from spack import traverse
from spack.error import SpackError
from spack.reporters import CDash, CDashConfiguration
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
from spack.reporters.cdash import build_stamp as cdash_build_stamp
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
@@ -684,22 +683,6 @@ def generate_gitlab_ci_yaml(
"instead.",
)
def ensure_expected_target_path(path):
"""Returns passed paths with all Windows path separators exchanged
for posix separators only if copy_only_pipeline is enabled
This is required as copy_only_pipelines are a unique scenario where
the generate job and child pipelines are run on different platforms.
To make this compatible w/ Windows, we cannot write Windows style path separators
that will be consumed on by the Posix copy job runner.
TODO (johnwparent): Refactor config + cli read/write to deal only in posix
style paths
"""
if copy_only_pipeline and path:
path = path.replace("\\", "/")
return path
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
deprecated_mirror_config = False
buildcache_destination = None
@@ -823,7 +806,7 @@ def ensure_expected_target_path(path):
if scope not in include_scopes and scope not in env_includes:
include_scopes.insert(0, scope)
env_includes.extend(include_scopes)
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
env_yaml_root["spack"]["include"] = env_includes
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
@@ -1244,9 +1227,6 @@ def main_script_replacements(cmd):
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
}
output_vars = output_object["variables"]
for item, val in output_vars.items():
output_vars[item] = ensure_expected_target_path(val)
# TODO: Remove this block in Spack 0.23
if deprecated_mirror_config and remote_mirror_override:
@@ -1303,6 +1283,7 @@ def main_script_replacements(cmd):
sorted_output = {}
for output_key, output_value in sorted(output_object.items()):
sorted_output[output_key] = output_value
if known_broken_specs_encountered:
tty.error("This pipeline generated hashes known to be broken on develop:")
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
@@ -1497,12 +1478,6 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
def win_quote(quote_str: str) -> str:
if IS_WINDOWS:
quote_str = f'"{quote_str}"'
return quote_str
def download_and_extract_artifacts(url, work_dir):
"""Look for gitlab artifacts.zip at the given url, and attempt to download
and extract the contents into the given work_dir
@@ -1525,7 +1500,7 @@ def download_and_extract_artifacts(url, work_dir):
request = Request(url, headers=headers)
request.get_method = lambda: "GET"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response = opener.open(request)
response_code = response.getcode()
if response_code != 200:
@@ -1967,9 +1942,9 @@ def compose_command_err_handling(args):
# but we need to handle EXEs (git, etc) ourselves
catch_exe_failure = (
"""
if ($LASTEXITCODE -ne 0){{
throw 'Command {} has failed'
}}
if ($LASTEXITCODE -ne 0){
throw "Command {} has failed"
}
"""
if IS_WINDOWS
else ""
@@ -2201,13 +2176,13 @@ def __init__(self, ci_cdash):
def args(self):
return [
"--cdash-upload-url",
win_quote(self.upload_url),
self.upload_url,
"--cdash-build",
win_quote(self.build_name),
self.build_name,
"--cdash-site",
win_quote(self.site),
self.site,
"--cdash-buildstamp",
win_quote(self.build_stamp),
self.build_stamp,
]
@property # type: ignore
@@ -2273,7 +2248,7 @@ def create_buildgroup(self, opener, headers, url, group_name, group_type):
request = Request(url, data=enc_data, headers=headers)
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response = opener.open(request)
response_code = response.getcode()
if response_code not in [200, 201]:
@@ -2319,7 +2294,7 @@ def populate_buildgroup(self, job_names):
request = Request(url, data=enc_data, headers=headers)
request.get_method = lambda: "PUT"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response = opener.open(request)
response_code = response.getcode()
if response_code != 200:

View File

@@ -13,6 +13,7 @@
import shutil
import sys
import tempfile
import urllib.request
from typing import Dict, List, Optional, Tuple, Union
import llnl.util.tty as tty
@@ -53,7 +54,6 @@
from spack.oci.oci import (
copy_missing_layers_with_retry,
get_manifest_and_config_with_retry,
list_tags,
upload_blob_with_retry,
upload_manifest_with_retry,
)
@@ -856,7 +856,10 @@ def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
tags = list_tags(image_ref)
request = urllib.request.Request(url=image_ref.tags_url())
response = spack.oci.opener.urlopen(request)
spack.oci.opener.ensure_status(request, response, 200)
tags = json.load(response)["tags"]
# Fetch all image config files in parallel
spec_dicts = pool.starmap(

View File

@@ -31,6 +31,7 @@
level = "long"
SPACK_COMMAND = "spack"
MAKE_COMMAND = "make"
INSTALL_FAIL_CODE = 1
FAILED_CREATE_BUILDCACHE_CODE = 100
@@ -39,12 +40,6 @@ def deindent(desc):
return desc.replace(" ", "")
def unicode_escape(path: str) -> str:
"""Returns transformed path with any unicode
characters replaced with their corresponding escapes"""
return path.encode("unicode-escape").decode("utf-8")
def setup_parser(subparser):
setup_parser.parser = subparser
subparsers = subparser.add_subparsers(help="CI sub-commands")
@@ -556,35 +551,75 @@ def ci_rebuild(args):
# No hash match anywhere means we need to rebuild spec
# Start with spack arguments
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose", "install"]
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose"]
config = cfg.get("config")
if not config["verify_ssl"]:
spack_cmd.append("-k")
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
install_args = []
can_verify = spack_ci.can_verify_binaries()
verify_binaries = can_verify and spack_is_pr_pipeline is False
if not verify_binaries:
install_args.append("--no-check-signature")
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
slash_hash = "/{}".format(job_spec.dag_hash())
# Arguments when installing dependencies from cache
deps_install_args = install_args
# Arguments when installing the root from sources
deps_install_args = install_args + ["--only=dependencies"]
root_install_args = install_args + ["--keep-stage", "--only=package"]
root_install_args = install_args + [
"--keep-stage",
"--only=package",
"--use-buildcache=package:never,dependencies:only",
]
if cdash_handler:
# Add additional arguments to `spack install` for CDash reporting.
root_install_args.extend(cdash_handler.args())
root_install_args.append(slash_hash)
# ["x", "y"] -> "'x' 'y'"
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
commands = [
# apparently there's a race when spack bootstraps? do it up front once
[SPACK_COMMAND, "-e", unicode_escape(env.path), "bootstrap", "now"],
spack_cmd + deps_install_args + [slash_hash],
spack_cmd + root_install_args + [slash_hash],
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
[
SPACK_COMMAND,
"-e",
env.path,
"env",
"depfile",
"-o",
"Makefile",
"--use-buildcache=package:never,dependencies:only",
slash_hash, # limit to spec we're building
],
[
# --output-sync requires GNU make 4.x.
# Old make errors when you pass it a flag it doesn't recognize,
# but it doesn't error or warn when you set unrecognized flags in
# this variable.
"export",
"GNUMAKEFLAGS=--output-sync=recurse",
],
[
MAKE_COMMAND,
"SPACK={}".format(args_to_string(spack_cmd)),
"SPACK_COLOR=always",
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
"-j$(nproc)",
"install-deps/{}".format(
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
"{name}-{version}-{hash}"
)
),
],
spack_cmd + ["install"] + root_install_args,
]
tty.debug("Installing {0} from source".format(job_spec.name))
install_exit_code = spack_ci.process_command("install", commands, repro_dir)

View File

@@ -10,13 +10,13 @@
import sys
import tempfile
from pathlib import Path
from typing import List, Optional
from typing import Optional
import llnl.string as string
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
from llnl.util.tty.color import cescape, colorize
from llnl.util.tty.color import colorize
import spack.cmd
import spack.cmd.common
@@ -61,7 +61,14 @@
#
def env_create_setup_parser(subparser):
"""create a new environment"""
subparser.add_argument("env_name", metavar="env", help="name or directory of environment")
subparser.add_argument(
"env_name",
metavar="env",
help=(
"name of managed environment or directory of the anonymous env "
"(when using --dir/-d) to activate"
),
)
subparser.add_argument(
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
)
@@ -87,9 +94,6 @@ def env_create_setup_parser(subparser):
default=None,
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
)
subparser.add_argument(
"--include-concrete", action="append", help="name of old environment to copy specs from"
)
def env_create(args):
@@ -107,32 +111,19 @@ def env_create(args):
# the environment should not include a view.
with_view = None
include_concrete = None
if hasattr(args, "include_concrete"):
include_concrete = args.include_concrete
env = _env_create(
args.env_name,
init_file=args.envfile,
dir=args.dir or os.path.sep in args.env_name or args.env_name in (".", ".."),
dir=args.dir,
with_view=with_view,
keep_relative=args.keep_relative,
include_concrete=include_concrete,
)
# Generate views, only really useful for environments created from spack.lock files.
env.regenerate_views()
def _env_create(
name_or_path: str,
*,
init_file: Optional[str] = None,
dir: bool = False,
with_view: Optional[str] = None,
keep_relative: bool = False,
include_concrete: Optional[List[str]] = None,
):
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
"""Create a new environment, with an optional yaml description.
Arguments:
@@ -144,31 +135,22 @@ def _env_create(
keep_relative (bool): if True, develop paths are copied verbatim into
the new environment file, otherwise they may be made absolute if the
new environment is in a different location
include_concrete (list): list of the included concrete environments
"""
if not dir:
env = ev.create(
name_or_path,
init_file=init_file,
with_view=with_view,
keep_relative=keep_relative,
include_concrete=include_concrete,
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
)
tty.msg(
colorize(
f"Created environment @c{{{cescape(name_or_path)}}} in: @c{{{cescape(env.path)}}}"
)
)
else:
env = ev.create_in_dir(
name_or_path,
init_file=init_file,
with_view=with_view,
keep_relative=keep_relative,
include_concrete=include_concrete,
)
tty.msg(colorize(f"Created independent environment in: @c{{{cescape(env.path)}}}"))
tty.msg(f"Activate with: {colorize(f'@c{{spack env activate {cescape(name_or_path)}}}')}")
tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
tty.msg("You can activate this environment with:")
tty.msg(" spack env activate %s" % (name_or_path))
return env
env = ev.create_in_dir(
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
)
tty.msg("Created environment in %s" % env.path)
tty.msg("You can activate this environment with:")
tty.msg(" spack env activate %s" % env.path)
return env
@@ -454,12 +436,6 @@ def env_remove_setup_parser(subparser):
"""remove an existing environment"""
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
arguments.add_common_arguments(subparser, ["yes_to_all"])
subparser.add_argument(
"-f",
"--force",
action="store_true",
help="remove the environment even if it is included in another environment",
)
def env_remove(args):
@@ -469,35 +445,13 @@ def env_remove(args):
and manifests embedded in repositories should be removed manually.
"""
read_envs = []
valid_envs = []
bad_envs = []
invalid_envs = []
for env_name in ev.all_environment_names():
for env_name in args.rm_env:
try:
env = ev.read(env_name)
valid_envs.append(env_name)
if env_name in args.rm_env:
read_envs.append(env)
read_envs.append(env)
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
invalid_envs.append(env_name)
if env_name in args.rm_env:
bad_envs.append(env_name)
# Check if env is linked to another before trying to remove
for name in valid_envs:
# don't check if environment is included to itself
if name == env_name:
continue
environ = ev.Environment(ev.root(name))
if ev.root(env_name) in environ.included_concrete_envs:
msg = f'Environment "{env_name}" is being used by environment "{name}"'
if args.force:
tty.warn(msg)
else:
tty.die(msg)
bad_envs.append(env_name)
if not args.yes_to_all:
environments = string.plural(len(args.rm_env), "environment", show_n=False)

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import copy
import sys
import llnl.util.lang
@@ -272,27 +271,6 @@ def root_decorator(spec, string):
print()
if env.included_concrete_envs:
tty.msg("Included specs")
# Root specs cannot be displayed with prefixes, since those are not
# set for abstract specs. Same for hashes
root_args = copy.copy(args)
root_args.paths = False
# Roots are displayed with variants, etc. so that we can see
# specifically what the user asked for.
cmd.display_specs(
env.included_user_specs,
root_args,
decorator=lambda s, f: color.colorize("@*{%s}" % f),
namespace=True,
show_flags=True,
show_full_compiler=True,
variants=True,
)
print()
if args.show_concretized:
tty.msg("Concretized roots")
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)

View File

@@ -9,7 +9,7 @@
import spack.environment as ev
import spack.store
from spack.cmd.common import arguments
from spack.graph import DAGWithDependencyTypes, SimpleDAG, graph_ascii, graph_dot, static_graph_dot
from spack.graph import DotGraph, MermaidGraph, DAGWithDependencyTypes, SimpleDAG, graph_ascii, graph_dot, static_graph_dot
description = "generate graphs of package dependency relationships"
section = "basic"
@@ -33,6 +33,9 @@ def setup_parser(subparser):
method.add_argument(
"-d", "--dot", action="store_true", help="generate graph in dot format and print to stdout"
)
method.add_argument(
"-m", "--mermaid", action="store_true", help="generate graph in mermaid format and print to stdout"
)
subparser.add_argument(
"-s",
@@ -85,10 +88,14 @@ def graph(parser, args):
static_graph_dot(specs, depflag=args.deptype)
return
if args.dot:
builder = SimpleDAG()
if args.dot or args.mermaid:
if args.dot:
graph = DotGraph()
if args.mermaid:
graph = MermaidGraph()
builder = SimpleDAG(graph=graph)
if args.color:
builder = DAGWithDependencyTypes()
builder = DAGWithDependencyTypes(graph=graph)
graph_dot(specs, builder=builder, depflag=args.deptype)
return

View File

@@ -23,7 +23,7 @@
# tutorial configuration parameters
tutorial_branch = "releases/v0.22"
tutorial_branch = "releases/v0.21"
tutorial_mirror = "file:///mirror"
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")

View File

@@ -151,8 +151,7 @@ def is_installed(spec):
key=lambda s: s.dag_hash(),
)
with spack.store.STORE.db.read_transaction():
return [spec for spec in specs if is_installed(spec)]
return [spec for spec in specs if is_installed(spec)]
def dependent_environments(
@@ -240,8 +239,6 @@ def get_uninstall_list(args, specs: List[spack.spec.Spec], env: Optional[ev.Envi
print()
tty.info("The following environments still reference these specs:")
colify([e.name for e in other_dependent_envs.keys()], indent=4)
if env:
msgs.append("use `spack remove` to remove the spec from the current environment")
msgs.append("use `spack env remove` to remove environments")
msgs.append("use `spack uninstall --force` to override")
print()

View File

@@ -156,15 +156,7 @@ def get_compiler_config_from_packages(
def _compiler_config_from_package_config(config):
compilers = []
for entry in config:
try:
compiler = _compiler_config_from_external(entry)
except Exception as e:
msg = "Reading compiler from packages config section failed\n"
msg += f" Compiler: {entry.get('spec', None)}\n"
msg += f" Prefix: {entry.get('prefix', None)}\n"
msg += f" Failure: {e}"
warnings.warn(msg)
compiler = None
compiler = _compiler_config_from_external(entry)
if compiler:
compilers.append(compiler)
@@ -172,56 +164,33 @@ def _compiler_config_from_package_config(config):
def _compiler_config_from_external(config):
extra_attributes_key = "extra_attributes"
compilers_key = "compilers"
c_key, cxx_key, fortran_key = "c", "cxx", "fortran"
# Allow `@x.y.z` instead of `@=x.y.z`
spec = spack.spec.parse_with_version_concrete(config["spec"])
# use str(spec.versions) to allow `@x.y.z` instead of `@=x.y.z`
compiler_spec = spack.spec.CompilerSpec(
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
)
err_header = f"The external spec '{spec}' cannot be used as a compiler"
extra_attributes = config.get("extra_attributes", {})
prefix = config.get("prefix", None)
# If extra_attributes is not there I might not want to use this entry as a compiler,
# therefore just leave a debug message, but don't be loud with a warning.
if extra_attributes_key not in config:
tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key")
compiler_class = class_for_compiler_name(compiler_spec.name)
paths = extra_attributes.get("paths", {})
compiler_langs = ["cc", "cxx", "fc", "f77"]
for lang in compiler_langs:
if paths.setdefault(lang, None):
continue
if not prefix:
continue
# Check for files that satisfy the naming scheme for this compiler
bindir = os.path.join(prefix, "bin")
for f, regex in itertools.product(os.listdir(bindir), compiler_class.search_regexps(lang)):
if regex.match(f):
paths[lang] = os.path.join(bindir, f)
if all(v is None for v in paths.values()):
return None
extra_attributes = config[extra_attributes_key]
# If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler
if compilers_key not in extra_attributes:
warnings.warn(
f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'"
)
return None
attribute_compilers = extra_attributes[compilers_key]
if c_key not in attribute_compilers:
warnings.warn(
f"{err_header}: missing the C compiler path under "
f"'{extra_attributes_key}:{compilers_key}'"
)
return None
c_compiler = attribute_compilers[c_key]
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
if cxx_key not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
if fortran_key not in attribute_compilers:
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
# compilers format has cc/fc/f77, externals format has "c/fortran"
paths = {
"cc": c_compiler,
"cxx": attribute_compilers.get(cxx_key, None),
"fc": attribute_compilers.get(fortran_key, None),
"f77": attribute_compilers.get(fortran_key, None),
}
if not spec.architecture:
host_platform = spack.platforms.host()

View File

@@ -96,8 +96,6 @@ def verbose_flag(self):
openmp_flag = "-fopenmp"
# C++ flags based on CMake Modules/Compiler/Clang.cmake
@property
def cxx11_flag(self):
if self.real_version < Version("3.3"):
@@ -122,24 +120,6 @@ def cxx17_flag(self):
return "-std=c++17"
@property
def cxx20_flag(self):
if self.real_version < Version("5.0"):
raise UnsupportedCompilerFlag(self, "the C++20 standard", "cxx20_flag", "< 5.0")
elif self.real_version < Version("11.0"):
return "-std=c++2a"
else:
return "-std=c++20"
@property
def cxx23_flag(self):
if self.real_version < Version("12.0"):
raise UnsupportedCompilerFlag(self, "the C++23 standard", "cxx23_flag", "< 12.0")
elif self.real_version < Version("17.0"):
return "-std=c++2b"
else:
return "-std=c++23"
@property
def c99_flag(self):
return "-std=c99"
@@ -162,10 +142,7 @@ def c17_flag(self):
def c23_flag(self):
if self.real_version < Version("9.0"):
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
elif self.real_version < Version("18.0"):
return "-std=c2x"
else:
return "-std=c23"
return "-std=c2x"
@property
def cc_pic_flag(self):

View File

@@ -34,6 +34,28 @@
"image": "docker.io/fedora:39"
}
},
"fedora:38": {
"bootstrap": {
"template": "container/fedora.dockerfile",
"image": "docker.io/fedora:38"
},
"os_package_manager": "dnf",
"build": "spack/fedora38",
"final": {
"image": "docker.io/fedora:38"
}
},
"fedora:37": {
"bootstrap": {
"template": "container/fedora.dockerfile",
"image": "docker.io/fedora:37"
},
"os_package_manager": "dnf",
"build": "spack/fedora37",
"final": {
"image": "docker.io/fedora:37"
}
},
"rockylinux:9": {
"bootstrap": {
"template": "container/rockylinux_9.dockerfile",
@@ -136,6 +158,13 @@
},
"build": "spack/ubuntu-focal",
"os_package_manager": "apt"
},
"ubuntu:18.04": {
"bootstrap": {
"template": "container/ubuntu_1804.dockerfile"
},
"os_package_manager": "apt",
"build": "spack/ubuntu-bionic"
}
},
"os_package_managers": {

View File

@@ -662,7 +662,6 @@ def _execute_redistribute(
@directive(("extendees", "dependencies"))
def extends(spec, when=None, type=("build", "run"), patches=None):
"""Same as depends_on, but also adds this package to the extendee list.
In case of Python, also adds a dependency on python-venv.
keyword arguments can be passed to extends() so that extension
packages can pass parameters to the extendee's extension
@@ -678,11 +677,6 @@ def _execute_extends(pkg):
_depends_on(pkg, spec, when=when, type=type, patches=patches)
spec_obj = spack.spec.Spec(spec)
# When extending python, also add a dependency on python-venv. This is done so that
# Spack environment views are Python virtual environments.
if spec_obj.name == "python" and not pkg.name == "python-venv":
_depends_on(pkg, "python-venv", when=when, type=("build", "run"))
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
pkg.extendees[spec_obj.name] = (spec_obj, None)

View File

@@ -15,7 +15,6 @@
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.symlink import readlink
import spack.config
import spack.hash_types as ht
@@ -182,7 +181,7 @@ def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
base_dir = (
self.path_for_spec(deprecator_spec)
if deprecator_spec
else readlink(deprecated_spec.prefix)
else os.readlink(deprecated_spec.prefix)
)
yaml_path = os.path.join(

View File

@@ -34,9 +34,6 @@
* ``spec``: a string representation of the abstract spec that was concretized
4. ``concrete_specs``: a dictionary containing the specs in the environment.
5. ``include_concrete`` (dictionary): an optional dictionary that includes the roots
and concrete specs from the included environments, keyed by the path to that
environment
Compatibility
-------------
@@ -53,37 +50,26 @@
- ``v2``
- ``v3``
- ``v4``
- ``v5``
* - ``v0.12:0.14``
-
-
-
-
-
* - ``v0.15:0.16``
-
-
-
-
-
* - ``v0.17``
-
-
-
-
-
* - ``v0.18:``
-
-
-
-
-
* - ``v0.22:``
-
-
-
-
-
Version 1
---------
@@ -348,118 +334,6 @@
}
}
}
Version 5
---------
Version 5 doesn't change the top-level lockfile format, but an optional dictionary is
added. The dictionary has the ``root`` and ``concrete_specs`` of the included
environments, which are keyed by the path to that environment. Since this is optional
if the environment does not have any included environments ``include_concrete`` will
not be a part of the lockfile.
.. code-block:: json
{
"_meta": {
"file-type": "spack-lockfile",
"lockfile-version": 5,
"specfile-version": 3
},
"roots": [
{
"hash": "<dag_hash 1>",
"spec": "<abstract spec 1>"
},
{
"hash": "<dag_hash 2>",
"spec": "<abstract spec 2>"
}
],
"concrete_specs": {
"<dag_hash 1>": {
"... <spec dict attributes> ...": { },
"dependencies": [
{
"name": "depname_1",
"hash": "<dag_hash for depname_1>",
"type": ["build", "link"]
},
{
"name": "depname_2",
"hash": "<dag_hash for depname_2>",
"type": ["build", "link"]
}
],
"hash": "<dag_hash 1>",
},
"<daghash 2>": {
"... <spec dict attributes> ...": { },
"dependencies": [
{
"name": "depname_3",
"hash": "<dag_hash for depname_3>",
"type": ["build", "link"]
},
{
"name": "depname_4",
"hash": "<dag_hash for depname_4>",
"type": ["build", "link"]
}
],
"hash": "<dag_hash 2>"
}
}
"include_concrete": {
"<path to environment>": {
"roots": [
{
"hash": "<dag_hash 1>",
"spec": "<abstract spec 1>"
},
{
"hash": "<dag_hash 2>",
"spec": "<abstract spec 2>"
}
],
"concrete_specs": {
"<dag_hash 1>": {
"... <spec dict attributes> ...": { },
"dependencies": [
{
"name": "depname_1",
"hash": "<dag_hash for depname_1>",
"type": ["build", "link"]
},
{
"name": "depname_2",
"hash": "<dag_hash for depname_2>",
"type": ["build", "link"]
}
],
"hash": "<dag_hash 1>",
},
"<daghash 2>": {
"... <spec dict attributes> ...": { },
"dependencies": [
{
"name": "depname_3",
"hash": "<dag_hash for depname_3>",
"type": ["build", "link"]
},
{
"name": "depname_4",
"hash": "<dag_hash for depname_4>",
"type": ["build", "link"]
}
],
"hash": "<dag_hash 2>"
}
}
}
}
}
"""
from .environment import (

View File

@@ -16,13 +16,13 @@
import urllib.parse
import urllib.request
import warnings
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
from typing import Dict, Iterable, List, Optional, Set, Tuple, Union
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import llnl.util.tty.color as clr
from llnl.util.link_tree import ConflictingSpecsError
from llnl.util.symlink import readlink, symlink
from llnl.util.symlink import symlink
import spack.compilers
import spack.concretize
@@ -159,8 +159,6 @@ def default_manifest_yaml():
default_view_name = "default"
# Default behavior to link all packages into views (vs. only root packages)
default_view_link = "all"
# The name for any included concrete specs
included_concrete_name = "include_concrete"
def installed_specs():
@@ -295,7 +293,6 @@ def create(
init_file: Optional[Union[str, pathlib.Path]] = None,
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
keep_relative: bool = False,
include_concrete: Optional[List[str]] = None,
) -> "Environment":
"""Create a managed environment in Spack and returns it.
@@ -312,15 +309,10 @@ def create(
string, it specifies the path to the view
keep_relative: if True, develop paths are copied verbatim into the new environment file,
otherwise they are made absolute
include_concrete: list of concrete environment names/paths to be included
"""
environment_dir = environment_dir_from_name(name, exists_ok=False)
return create_in_dir(
environment_dir,
init_file=init_file,
with_view=with_view,
keep_relative=keep_relative,
include_concrete=include_concrete,
environment_dir, init_file=init_file, with_view=with_view, keep_relative=keep_relative
)
@@ -329,7 +321,6 @@ def create_in_dir(
init_file: Optional[Union[str, pathlib.Path]] = None,
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
keep_relative: bool = False,
include_concrete: Optional[List[str]] = None,
) -> "Environment":
"""Create an environment in the directory passed as input and returns it.
@@ -343,7 +334,6 @@ def create_in_dir(
string, it specifies the path to the view
keep_relative: if True, develop paths are copied verbatim into the new environment file,
otherwise they are made absolute
include_concrete: concrete environment names/paths to be included
"""
initialize_environment_dir(root, envfile=init_file)
@@ -356,12 +346,6 @@ def create_in_dir(
if with_view is not None:
manifest.set_default_view(with_view)
if include_concrete is not None:
set_included_envs_to_env_paths(include_concrete)
validate_included_envs_exists(include_concrete)
validate_included_envs_concrete(include_concrete)
manifest.set_include_concrete(include_concrete)
manifest.flush()
except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e:
@@ -435,67 +419,6 @@ def ensure_env_root_path_exists():
fs.mkdirp(env_root_path())
def set_included_envs_to_env_paths(include_concrete: List[str]) -> None:
"""If the included environment(s) is the environment name
it is replaced by the path to the environment
Args:
include_concrete: list of env name or path to env"""
for i, env_name in enumerate(include_concrete):
if is_env_dir(env_name):
include_concrete[i] = env_name
elif exists(env_name):
include_concrete[i] = root(env_name)
def validate_included_envs_exists(include_concrete: List[str]) -> None:
"""Checks that all of the included environments exist
Args:
include_concrete: list of already existing concrete environments to include
Raises:
SpackEnvironmentError: if any of the included environments do not exist
"""
missing_envs = set()
for i, env_name in enumerate(include_concrete):
if not is_env_dir(env_name):
missing_envs.add(env_name)
if missing_envs:
msg = "The following environment(s) are missing: {0}".format(", ".join(missing_envs))
raise SpackEnvironmentError(msg)
def validate_included_envs_concrete(include_concrete: List[str]) -> None:
"""Checks that all of the included environments are concrete
Args:
include_concrete: list of already existing concrete environments to include
Raises:
SpackEnvironmentError: if any of the included environments are not concrete
"""
non_concrete_envs = set()
for env_path in include_concrete:
if not os.path.exists(Environment(env_path).lock_path):
non_concrete_envs.add(Environment(env_path).name)
if non_concrete_envs:
msg = "The following environment(s) are not concrete: {0}\n" "Please run:".format(
", ".join(non_concrete_envs)
)
for env in non_concrete_envs:
msg += f"\n\t`spack -e {env} concretize`"
raise SpackEnvironmentError(msg)
def all_environment_names():
"""List the names of environments that currently exist."""
# just return empty if the env path does not exist. A read-only
@@ -662,7 +585,7 @@ def _current_root(self):
if not os.path.islink(self.root):
return None
root = readlink(self.root)
root = os.readlink(self.root)
if os.path.isabs(root):
return root
@@ -898,18 +821,6 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
self.specs_by_hash: Dict[str, Spec] = {}
#: Repository for this environment (memoized)
self._repo = None
#: Environment paths for concrete (lockfile) included environments
self.included_concrete_envs: List[str] = []
#: First-level included concretized spec data from/to the lockfile.
self.included_concrete_spec_data: Dict[str, Dict[str, List[str]]] = {}
#: User specs from included environments from the last concretization
self.included_concretized_user_specs: Dict[str, List[Spec]] = {}
#: Roots from included environments with the last concretization, in order
self.included_concretized_order: Dict[str, List[str]] = {}
#: Concretized specs by hash from the included environments
self.included_specs_by_hash: Dict[str, Dict[str, Spec]] = {}
#: Previously active environment
self._previous_active = None
self._dev_specs = None
@@ -947,7 +858,7 @@ def _read(self):
if os.path.exists(self.lock_path):
with open(self.lock_path) as f:
read_lock_version = self._read_lockfile(f)["_meta"]["lockfile-version"]
read_lock_version = self._read_lockfile(f)
if read_lock_version == 1:
tty.debug(f"Storing backup of {self.lock_path} at {self._lock_backup_v1_path}")
@@ -1015,20 +926,6 @@ def add_view(name, values):
if self.views == dict():
self.views[default_view_name] = ViewDescriptor(self.path, self.view_path_default)
def _process_concrete_includes(self):
"""Extract and load into memory included concrete spec data."""
self.included_concrete_envs = self.manifest[TOP_LEVEL_KEY].get(included_concrete_name, [])
if self.included_concrete_envs:
if os.path.exists(self.lock_path):
with open(self.lock_path) as f:
data = self._read_lockfile(f)
if included_concrete_name in data:
self.included_concrete_spec_data = data[included_concrete_name]
else:
self.include_concrete_envs()
def _construct_state_from_manifest(self):
"""Set up user specs and views from the manifest file."""
self.spec_lists = collections.OrderedDict()
@@ -1045,31 +942,6 @@ def _construct_state_from_manifest(self):
self.spec_lists[user_speclist_name] = user_specs
self._process_view(spack.config.get("view", True))
self._process_concrete_includes()
def all_concretized_user_specs(self) -> List[Spec]:
"""Returns all of the concretized user specs of the environment and
its included environment(s)."""
concretized_user_specs = self.concretized_user_specs[:]
for included_specs in self.included_concretized_user_specs.values():
for included in included_specs:
# Don't duplicate included spec(s)
if included not in concretized_user_specs:
concretized_user_specs.append(included)
return concretized_user_specs
def all_concretized_orders(self) -> List[str]:
"""Returns all of the concretized order of the environment and
its included environment(s)."""
concretized_order = self.concretized_order[:]
for included_concretized_order in self.included_concretized_order.values():
for included in included_concretized_order:
# Don't duplicate included spec(s)
if included not in concretized_order:
concretized_order.append(included)
return concretized_order
@property
def user_specs(self):
@@ -1094,26 +966,6 @@ def _read_dev_specs(self):
dev_specs[name] = local_entry
return dev_specs
@property
def included_user_specs(self) -> SpecList:
"""Included concrete user (or root) specs from last concretization."""
spec_list = SpecList()
if not self.included_concrete_envs:
return spec_list
def add_root_specs(included_concrete_specs):
# add specs from the include *and* any nested includes it may have
for env, info in included_concrete_specs.items():
for root_list in info["roots"]:
spec_list.add(root_list["spec"])
if "include_concrete" in info:
add_root_specs(info["include_concrete"])
add_root_specs(self.included_concrete_spec_data)
return spec_list
def clear(self, re_read=False):
"""Clear the contents of the environment
@@ -1125,15 +977,9 @@ def clear(self, re_read=False):
self.spec_lists[user_speclist_name] = SpecList()
self._dev_specs = {}
self.concretized_order = [] # roots of last concretize, in order
self.concretized_user_specs = [] # user specs from last concretize
self.concretized_order = [] # roots of last concretize, in order
self.specs_by_hash = {} # concretized specs by hash
self.included_concrete_spec_data = {} # concretized specs from lockfile of included envs
self.included_concretized_order = {} # root specs of the included envs, keyed by env path
self.included_concretized_user_specs = {} # user specs from last concretize's included env
self.included_specs_by_hash = {} # concretized specs by hash from the included envs
self.invalidate_repository_cache()
self._previous_active = None # previously active environment
if not re_read:
@@ -1187,55 +1033,6 @@ def scope_name(self):
"""Name of the config scope of this environment's manifest file."""
return self.manifest.scope_name
def include_concrete_envs(self):
"""Copy and save the included envs' specs internally"""
lockfile_meta = None
root_hash_seen = set()
concrete_hash_seen = set()
self.included_concrete_spec_data = {}
for env_path in self.included_concrete_envs:
# Check that environment exists
if not is_env_dir(env_path):
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
env = Environment(env_path)
with open(env.lock_path) as f:
lockfile_as_dict = env._read_lockfile(f)
# Lockfile_meta must match each env and use at least format version 5
if lockfile_meta is None:
lockfile_meta = lockfile_as_dict["_meta"]
elif lockfile_meta != lockfile_as_dict["_meta"]:
raise SpackEnvironmentError("All lockfile _meta values must match")
elif lockfile_meta["lockfile-version"] < 5:
raise SpackEnvironmentError("The lockfile format must be at version 5 or higher")
# Copy unique root specs from env
self.included_concrete_spec_data[env_path] = {"roots": []}
for root_dict in lockfile_as_dict["roots"]:
if root_dict["hash"] not in root_hash_seen:
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
root_hash_seen.add(root_dict["hash"])
# Copy unique concrete specs from env
for concrete_spec in lockfile_as_dict["concrete_specs"]:
if concrete_spec not in concrete_hash_seen:
self.included_concrete_spec_data[env_path].update(
{"concrete_specs": lockfile_as_dict["concrete_specs"]}
)
concrete_hash_seen.add(concrete_spec)
if "include_concrete" in lockfile_as_dict.keys():
self.included_concrete_spec_data[env_path]["include_concrete"] = lockfile_as_dict[
"include_concrete"
]
self._read_lockfile_dict(self._to_lockfile_dict())
self.write()
def destroy(self):
"""Remove this environment from Spack entirely."""
shutil.rmtree(self.path)
@@ -1435,10 +1232,6 @@ def concretize(self, force=False, tests=False):
for spec in set(self.concretized_user_specs) - set(self.user_specs):
self.deconcretize(spec, concrete=False)
# If a combined env, check updated spec is in the linked envs
if self.included_concrete_envs:
self.include_concrete_envs()
# Pick the right concretization strategy
if self.unify == "when_possible":
return self._concretize_together_where_possible(tests=tests)
@@ -1622,7 +1415,7 @@ def _concretize_separately(self, tests=False):
# Ensure we don't try to bootstrap clingo in parallel
if spack.config.get("config:concretizer", "clingo") == "clingo":
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_clingo_importable_or_raise()
spack.bootstrap.ensure_core_dependencies()
# Ensure all the indexes have been built or updated, since
# otherwise the processes in the pool may timeout on waiting
@@ -1911,14 +1704,8 @@ def _partition_roots_by_install_status(self):
of per spec."""
installed, uninstalled = [], []
with spack.store.STORE.db.read_transaction():
for concretized_hash in self.all_concretized_orders():
if concretized_hash in self.specs_by_hash:
spec = self.specs_by_hash[concretized_hash]
else:
for env_path in self.included_specs_by_hash.keys():
if concretized_hash in self.included_specs_by_hash[env_path]:
spec = self.included_specs_by_hash[env_path][concretized_hash]
break
for concretized_hash in self.concretized_order:
spec = self.specs_by_hash[concretized_hash]
if not spec.installed or (
spec.satisfies("dev_path=*") or spec.satisfies("^dev_path=*")
):
@@ -1998,14 +1785,8 @@ def added_specs(self):
def concretized_specs(self):
"""Tuples of (user spec, concrete spec) for all concrete specs."""
for s, h in zip(self.all_concretized_user_specs(), self.all_concretized_orders()):
if h in self.specs_by_hash:
yield (s, self.specs_by_hash[h])
else:
for env_path in self.included_specs_by_hash.keys():
if h in self.included_specs_by_hash[env_path]:
yield (s, self.included_specs_by_hash[env_path][h])
break
for s, h in zip(self.concretized_user_specs, self.concretized_order):
yield (s, self.specs_by_hash[h])
def concrete_roots(self):
"""Same as concretized_specs, except it returns the list of concrete
@@ -2134,7 +1915,8 @@ def _get_environment_specs(self, recurse_dependencies=True):
If these specs appear under different user_specs, only one copy
is added to the list returned.
"""
specs = [self.specs_by_hash[h] for h in self.all_concretized_orders()]
specs = [self.specs_by_hash[h] for h in self.concretized_order]
if recurse_dependencies:
specs.extend(
traverse.traverse_nodes(
@@ -2179,76 +1961,31 @@ def _to_lockfile_dict(self):
"concrete_specs": concrete_specs,
}
if self.included_concrete_envs:
data[included_concrete_name] = self.included_concrete_spec_data
return data
def _read_lockfile(self, file_or_json):
"""Read a lockfile from a file or from a raw string."""
lockfile_dict = sjson.load(file_or_json)
self._read_lockfile_dict(lockfile_dict)
return lockfile_dict
def set_included_concretized_user_specs(
self,
env_name: str,
env_info: Dict[str, Dict[str, Any]],
included_json_specs_by_hash: Dict[str, Dict[str, Any]],
) -> Dict[str, Dict[str, Any]]:
"""Sets all of the concretized user specs from included environments
to include those from nested included environments.
Args:
env_name: the name (technically the path) of the included environment
env_info: included concrete environment data
included_json_specs_by_hash: concrete spec data keyed by hash
Returns: updated specs_by_hash
"""
self.included_concretized_order[env_name] = []
self.included_concretized_user_specs[env_name] = []
def add_specs(name, info, specs_by_hash):
# Add specs from the environment as well as any of its nested
# environments.
for root_info in info["roots"]:
self.included_concretized_order[name].append(root_info["hash"])
self.included_concretized_user_specs[name].append(Spec(root_info["spec"]))
if "concrete_specs" in info:
specs_by_hash.update(info["concrete_specs"])
if included_concrete_name in info:
for included_name, included_info in info[included_concrete_name].items():
if included_name not in self.included_concretized_order:
self.included_concretized_order[included_name] = []
self.included_concretized_user_specs[included_name] = []
add_specs(included_name, included_info, specs_by_hash)
add_specs(env_name, env_info, included_json_specs_by_hash)
return included_json_specs_by_hash
return lockfile_dict["_meta"]["lockfile-version"]
def _read_lockfile_dict(self, d):
"""Read a lockfile dictionary into this environment."""
self.specs_by_hash = {}
self.included_specs_by_hash = {}
self.included_concretized_user_specs = {}
self.included_concretized_order = {}
roots = d["roots"]
self.concretized_user_specs = [Spec(r["spec"]) for r in roots]
self.concretized_order = [r["hash"] for r in roots]
json_specs_by_hash = d["concrete_specs"]
included_json_specs_by_hash = {}
if included_concrete_name in d:
for env_name, env_info in d[included_concrete_name].items():
included_json_specs_by_hash.update(
self.set_included_concretized_user_specs(
env_name, env_info, included_json_specs_by_hash
)
)
# Track specs by their lockfile key. Currently spack uses the finest
# grained hash as the lockfile key, while older formats used the build
# hash or a previous incarnation of the DAG hash (one that did not
# include build deps or package hash).
specs_by_hash = {}
# Track specs by their DAG hash, allows handling DAG hash collisions
first_seen = {}
current_lockfile_format = d["_meta"]["lockfile-version"]
try:
reader = READER_CLS[current_lockfile_format]
@@ -2261,39 +1998,6 @@ def _read_lockfile_dict(self, d):
msg += " You need to use a newer Spack version."
raise SpackEnvironmentError(msg)
first_seen, self.concretized_order = self.filter_specs(
reader, json_specs_by_hash, self.concretized_order
)
for spec_dag_hash in self.concretized_order:
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
if any(self.included_concretized_order.values()):
first_seen = {}
for env_name, concretized_order in self.included_concretized_order.items():
filtered_spec, self.included_concretized_order[env_name] = self.filter_specs(
reader, included_json_specs_by_hash, concretized_order
)
first_seen.update(filtered_spec)
for env_path, spec_hashes in self.included_concretized_order.items():
self.included_specs_by_hash[env_path] = {}
for spec_dag_hash in spec_hashes:
self.included_specs_by_hash[env_path].update(
{spec_dag_hash: first_seen[spec_dag_hash]}
)
def filter_specs(self, reader, json_specs_by_hash, order_concretized):
# Track specs by their lockfile key. Currently spack uses the finest
# grained hash as the lockfile key, while older formats used the build
# hash or a previous incarnation of the DAG hash (one that did not
# include build deps or package hash).
specs_by_hash = {}
# Track specs by their DAG hash, allows handling DAG hash collisions
first_seen = {}
# First pass: Put each spec in the map ignoring dependencies
for lockfile_key, node_dict in json_specs_by_hash.items():
spec = reader.from_node_dict(node_dict)
@@ -2316,8 +2020,7 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
# keep. This is only required as long as we support older lockfile
# formats where the mapping from DAG hash to lockfile key is possibly
# one-to-many.
for lockfile_key in order_concretized:
for lockfile_key in self.concretized_order:
for s in specs_by_hash[lockfile_key].traverse():
if s.dag_hash() not in first_seen:
first_seen[s.dag_hash()] = s
@@ -2325,10 +2028,12 @@ def filter_specs(self, reader, json_specs_by_hash, order_concretized):
# Now make sure concretized_order and our internal specs dict
# contains the keys used by modern spack (i.e. the dag_hash
# that includes build deps and package hash).
self.concretized_order = [
specs_by_hash[h_key].dag_hash() for h_key in self.concretized_order
]
order_concretized = [specs_by_hash[h_key].dag_hash() for h_key in order_concretized]
return first_seen, order_concretized
for spec_dag_hash in self.concretized_order:
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
def write(self, regenerate: bool = True) -> None:
"""Writes an in-memory environment to its location on disk.
@@ -2341,7 +2046,7 @@ def write(self, regenerate: bool = True) -> None:
regenerate: regenerate views and run post-write hooks as well as writing if True.
"""
self.manifest_uptodate_or_warn()
if self.specs_by_hash or self.included_concrete_envs:
if self.specs_by_hash:
self.ensure_env_directory_exists(dot_env=True)
self.update_environment_repository()
self.manifest.flush()
@@ -2840,19 +2545,6 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
raise SpackEnvironmentError(msg) from e
self.changed = True
def set_include_concrete(self, include_concrete: List[str]) -> None:
"""Sets the included concrete environments in the manifest to the value(s) passed as input.
Args:
include_concrete: list of already existing concrete environments to include
"""
self.pristine_configuration[included_concrete_name] = []
for env_path in include_concrete:
self.pristine_configuration[included_concrete_name].append(env_path)
self.changed = True
def add_definition(self, user_spec: str, list_name: str) -> None:
"""Appends a user spec to the first active definition matching the name passed as argument.
@@ -3036,56 +2728,54 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
for i, config_path in enumerate(reversed(includes)):
# allow paths to contain spack config/environment variables, etc.
config_path = substitute_path_variables(config_path)
include_url = urllib.parse.urlparse(config_path)
# If scheme is not valid, config_path is not a url
# of a type Spack is generally aware
if spack.util.url.validate_scheme(include_url.scheme):
# Transform file:// URLs to direct includes.
if include_url.scheme == "file":
config_path = urllib.request.url2pathname(include_url.path)
# Transform file:// URLs to direct includes.
if include_url.scheme == "file":
config_path = urllib.request.url2pathname(include_url.path)
# Any other URL should be fetched.
elif include_url.scheme in ("http", "https", "ftp"):
# Stage any remote configuration file(s)
staged_configs = (
os.listdir(self.config_stage_dir)
if os.path.exists(self.config_stage_dir)
else []
# Any other URL should be fetched.
elif include_url.scheme in ("http", "https", "ftp"):
# Stage any remote configuration file(s)
staged_configs = (
os.listdir(self.config_stage_dir)
if os.path.exists(self.config_stage_dir)
else []
)
remote_path = urllib.request.url2pathname(include_url.path)
basename = os.path.basename(remote_path)
if basename in staged_configs:
# Do NOT re-stage configuration files over existing
# ones with the same name since there is a risk of
# losing changes (e.g., from 'spack config update').
tty.warn(
"Will not re-stage configuration from {0} to avoid "
"losing changes to the already staged file of the "
"same name.".format(remote_path)
)
remote_path = urllib.request.url2pathname(include_url.path)
basename = os.path.basename(remote_path)
if basename in staged_configs:
# Do NOT re-stage configuration files over existing
# ones with the same name since there is a risk of
# losing changes (e.g., from 'spack config update').
tty.warn(
"Will not re-stage configuration from {0} to avoid "
"losing changes to the already staged file of the "
"same name.".format(remote_path)
)
# Recognize the configuration stage directory
# is flattened to ensure a single copy of each
# configuration file.
config_path = self.config_stage_dir
if basename.endswith(".yaml"):
config_path = os.path.join(config_path, basename)
else:
staged_path = spack.config.fetch_remote_configs(
config_path, str(self.config_stage_dir), skip_existing=True
)
if not staged_path:
raise SpackEnvironmentError(
"Unable to fetch remote configuration {0}".format(config_path)
)
config_path = staged_path
elif include_url.scheme:
raise ValueError(
f"Unsupported URL scheme ({include_url.scheme}) for "
f"environment include: {config_path}"
# Recognize the configuration stage directory
# is flattened to ensure a single copy of each
# configuration file.
config_path = self.config_stage_dir
if basename.endswith(".yaml"):
config_path = os.path.join(config_path, basename)
else:
staged_path = spack.config.fetch_remote_configs(
config_path, str(self.config_stage_dir), skip_existing=True
)
if not staged_path:
raise SpackEnvironmentError(
"Unable to fetch remote configuration {0}".format(config_path)
)
config_path = staged_path
elif include_url.scheme:
raise ValueError(
f"Unsupported URL scheme ({include_url.scheme}) for "
f"environment include: {config_path}"
)
# treat relative paths as relative to the environment
if not os.path.isabs(config_path):

View File

@@ -34,7 +34,7 @@
/
o boost
graph_dot() will output a graph of a spec (or multiple specs) in dot format.
graph_dot() will output a graph of a spec (or multiple specs) in dot or mermaid format.
"""
import enum
import sys
@@ -446,10 +446,27 @@ def graph_ascii(
graph.write(spec, color=color, out=out)
class DotGraphBuilder:
"""Visit edges of a graph a build DOT options for nodes and edges"""
class DotGraph:
"""Configuration for DOT graphs"""
def __init__(self):
self.label = "label="
self.template = "misc/graph.dot"
class MermaidGraph:
"""Configuration for Mermaid graphs"""
def __init__(self):
self.label = ""
self.template = "misc/graph.md"
class GraphBuilder:
"""Visit edges of a graph a build options for nodes and edges"""
def __init__(self, graph = DotGraph()):
self.graph: Union[DotGraph, MermaidGraph] = graph
self.nodes: Set[Tuple[str, str]] = set()
self.edges: Set[Tuple[str, str, str]] = set()
@@ -472,40 +489,40 @@ def edge_entry(self, edge: spack.spec.DependencySpec) -> Tuple[str, str, str]:
raise NotImplementedError("Need to be implemented by derived classes")
def context(self):
"""Return the context to be used to render the DOT graph template"""
"""Return the context to be used to render the graph template"""
result = {"nodes": self.nodes, "edges": self.edges}
return result
def render(self) -> str:
"""Return a string with the output in DOT format"""
"""Return a string with the output in format"""
environment = spack.tengine.make_environment()
template = environment.get_template("misc/graph.dot")
template = environment.get_template(self.graph.template)
return template.render(self.context())
class SimpleDAG(DotGraphBuilder):
"""Simple DOT graph, with nodes colored uniformly and edges without properties"""
class SimpleDAG(GraphBuilder):
"""Simple graph, with nodes colored uniformly and edges without properties"""
def node_entry(self, node):
format_option = "{name}{@version}{%compiler}{/hash:7}"
return node.dag_hash(), f'[label="{node.format(format_option)}"]'
return node.dag_hash(), f'[{self.graph.label}"{node.format(format_option)}"]'
def edge_entry(self, edge):
return edge.parent.dag_hash(), edge.spec.dag_hash(), None
class StaticDag(DotGraphBuilder):
"""DOT graph for possible dependencies"""
class StaticDag(GraphBuilder):
"""Graph for possible dependencies"""
def node_entry(self, node):
return node.name, f'[label="{node.name}"]'
return node.name, f'[{self.graph.label}"{node.name}"]'
def edge_entry(self, edge):
return edge.parent.name, edge.spec.name, None
class DAGWithDependencyTypes(DotGraphBuilder):
"""DOT graph with link,run nodes grouped together and edges colored according to
class DAGWithDependencyTypes(GraphBuilder):
"""Graph with link,run nodes grouped together and edges colored according to
the dependency types.
"""
@@ -521,7 +538,7 @@ def visit(self, edge):
def node_entry(self, node):
node_str = node.format("{name}{@version}{%compiler}{/hash:7}")
options = f'[label="{node_str}", group="build_dependencies", fillcolor="coral"]'
options = f'[{self.graph.label}"{node_str}", group="build_dependencies", fillcolor="coral"]'
if node.dag_hash() in self.main_unified_space:
options = f'[label="{node_str}", group="main_psid"]'
return node.dag_hash(), options
@@ -574,7 +591,7 @@ def static_graph_dot(
def graph_dot(
specs: List[spack.spec.Spec],
builder: Optional[DotGraphBuilder] = None,
builder: Optional[GraphBuilder] = None,
depflag: dt.DepFlag = dt.ALL,
out: Optional[TextIO] = None,
):

View File

@@ -1,8 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
def post_install(spec, explicit=None):
spec.package.windows_establish_runtime_linkage()

View File

@@ -488,7 +488,6 @@ def _process_binary_cache_tarball(
with timer.measure("install"), spack.util.path.filter_padding():
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
pkg.windows_establish_runtime_linkage()
if hasattr(pkg, "_post_buildcache_install_hook"):
pkg._post_buildcache_install_hook()
@@ -1699,6 +1698,10 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
pkg, build_process, install_args
)
# Currently this is how RPATH-like behavior is achieved on Windows, after install
# establish runtime linkage via Windows Runtime link object
# Note: this is a no-op on non Windows platforms
pkg.windows_establish_runtime_linkage()
# Note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)

View File

@@ -427,7 +427,7 @@ def make_argument_parser(**kwargs):
parser.add_argument(
"--color",
action="store",
default=None,
default=os.environ.get("SPACK_COLOR", "auto"),
choices=("always", "never", "auto"),
help="when to colorize output (default: auto)",
)
@@ -622,8 +622,7 @@ def setup_main_options(args):
# with color
color.try_enable_terminal_color_on_windows()
# when to use color (takes always, auto, or never)
if args.color is not None:
color.set_color_when(args.color)
color.set_color_when(args.color)
def allows_unknown_args(command):

View File

@@ -11,7 +11,7 @@
import urllib.parse
import urllib.request
from http.client import HTTPResponse
from typing import List, NamedTuple, Tuple
from typing import NamedTuple, Tuple
from urllib.request import Request
import llnl.util.tty as tty
@@ -27,7 +27,6 @@
import spack.stage
import spack.traverse
import spack.util.crypto
import spack.util.url
from .image import Digest, ImageReference
@@ -70,42 +69,6 @@ def with_query_param(url: str, param: str, value: str) -> str:
)
def list_tags(ref: ImageReference, _urlopen: spack.oci.opener.MaybeOpen = None) -> List[str]:
"""Retrieves the list of tags associated with an image, handling pagination."""
_urlopen = _urlopen or spack.oci.opener.urlopen
tags = set()
fetch_url = ref.tags_url()
while True:
# Fetch tags
request = Request(url=fetch_url)
response = _urlopen(request)
spack.oci.opener.ensure_status(request, response, 200)
tags.update(json.load(response)["tags"])
# Check for pagination
link_header = response.headers["Link"]
if link_header is None:
break
tty.debug(f"OCI tag pagination: {link_header}")
rel_next_value = spack.util.url.parse_link_rel_next(link_header)
if rel_next_value is None:
break
rel_next = urllib.parse.urlparse(rel_next_value)
if rel_next.scheme not in ("https", ""):
break
fetch_url = ref.endpoint(rel_next_value)
return sorted(tags)
def upload_blob(
ref: ImageReference,
file: str,

View File

@@ -398,7 +398,7 @@ def create_opener():
opener = urllib.request.OpenerDirector()
for handler in [
urllib.request.UnknownHandler(),
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
urllib.request.HTTPSHandler(),
spack.util.web.SpackHTTPDefaultErrorHandler(),
urllib.request.HTTPRedirectHandler(),
urllib.request.HTTPErrorProcessor(),
@@ -418,27 +418,18 @@ def ensure_status(request: urllib.request.Request, response: HTTPResponse, statu
)
def default_retry(f, retries: int = 5, sleep=None):
def default_retry(f, retries: int = 3, sleep=None):
sleep = sleep or time.sleep
def wrapper(*args, **kwargs):
for i in range(retries):
try:
return f(*args, **kwargs)
except (urllib.error.URLError, TimeoutError) as e:
except urllib.error.HTTPError as e:
# Retry on internal server errors, and rate limit errors
# Potentially this could take into account the Retry-After header
# if registries support it
if i + 1 != retries and (
(
isinstance(e, urllib.error.HTTPError)
and (500 <= e.code < 600 or e.code == 429)
)
or (
isinstance(e, urllib.error.URLError) and isinstance(e.reason, TimeoutError)
)
or isinstance(e, TimeoutError)
):
if i + 1 != retries and (500 <= e.code < 600 or e.code == 429):
# Exponential backoff
sleep(2**i)
continue

View File

@@ -39,7 +39,6 @@
)
from spack.build_systems.cargo import CargoPackage
from spack.build_systems.cmake import CMakePackage, generator
from spack.build_systems.compiler import CompilerPackage
from spack.build_systems.cuda import CudaPackage
from spack.build_systems.generic import Package
from spack.build_systems.gnu import GNUMirrorPackage

View File

@@ -161,11 +161,7 @@ def windows_establish_runtime_linkage(self):
Performs symlinking to incorporate rpath dependencies to Windows runtime search paths
"""
# If spec is an external, we should not be modifying its bin directory, as we would
# be doing in this method
# Spack should in general not modify things it has not installed
# we can reasonably expect externals to have their link interface properly established
if sys.platform == "win32" and not self.spec.external:
if sys.platform == "win32":
self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
self.win_rpath.add_rpath(*self.win_add_rpath())
self.win_rpath.establish_link()
@@ -1244,7 +1240,7 @@ def install_test_root(self):
"""Return the install test root directory."""
tty.warn(
"The 'pkg.install_test_root' property is deprecated with removal "
"expected v0.23. Use 'install_test_root(pkg)' instead."
"expected v0.22. Use 'install_test_root(pkg)' instead."
)
return install_test_root(self)
@@ -1902,7 +1898,7 @@ def cache_extra_test_sources(self, srcs):
"""
msg = (
"'pkg.cache_extra_test_sources(srcs) is deprecated with removal "
"expected in v0.23. Use 'cache_extra_test_sources(pkg, srcs)' "
"expected in v0.22. Use 'cache_extra_test_sources(pkg, srcs)' "
"instead."
)
warnings.warn(msg)
@@ -2450,18 +2446,9 @@ def rpath(self):
# on Windows, libraries of runtime interest are typically
# stored in the bin directory
# Do not include Windows system libraries in the rpath interface
# these libraries are handled automatically by VS/VCVARS and adding
# Spack derived system libs into the link path or address space of a program
# can result in conflicting versions, which makes Spack packages less useable
if sys.platform == "win32":
rpaths = [self.prefix.bin]
rpaths.extend(
d.prefix.bin
for d in deps
if os.path.isdir(d.prefix.bin)
and "windows-system" not in getattr(d.package, "tags", [])
)
rpaths.extend(d.prefix.bin for d in deps if os.path.isdir(d.prefix.bin))
else:
rpaths = [self.prefix.lib, self.prefix.lib64]
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))

View File

@@ -10,7 +10,6 @@
import archspec.cpu
import llnl.util.tty as tty
from llnl.util.symlink import readlink
import spack.target
import spack.version
@@ -134,7 +133,7 @@ def craype_type_and_version(cls):
# Take the default version from known symlink path
default_path = os.path.join(craype_dir, "default")
if os.path.islink(default_path):
version = spack.version.Version(readlink(default_path))
version = spack.version.Version(os.readlink(default_path))
return (craype_type, version)
# If no default version, sort available versions and return latest

View File

@@ -16,7 +16,7 @@
import llnl.util.lang
import llnl.util.tty as tty
from llnl.util.lang import memoized
from llnl.util.symlink import readlink, symlink
from llnl.util.symlink import symlink
import spack.paths
import spack.platforms
@@ -25,7 +25,6 @@
import spack.store
import spack.util.elf as elf
import spack.util.executable as executable
import spack.util.path
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
@@ -566,7 +565,7 @@ def make_link_relative(new_links, orig_links):
orig_links (list): original links
"""
for new_link, orig_link in zip(new_links, orig_links):
target = readlink(orig_link)
target = os.readlink(orig_link)
relative_target = os.path.relpath(target, os.path.dirname(orig_link))
os.unlink(new_link)
symlink(relative_target, new_link)
@@ -614,7 +613,7 @@ def relocate_links(links, prefix_to_prefix):
"""Relocate links to a new install prefix."""
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
for link in links:
old_target = readlink(link)
old_target = os.readlink(link)
match = regex.match(old_target)
# No match.

View File

@@ -241,7 +241,7 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
Arguments:
type (str): String containing one or more of 'A', 'R', 'C'
type (str): String containing one or more of 'A', 'B', 'C'
rev1 (str): Revision to compare against, default is 'HEAD^'
rev2 (str): Revision to compare to rev1, default is 'HEAD'
@@ -264,7 +264,7 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
lines = [] if not out else re.split(r"\s+", out)
changed = set()
for path in lines:
pkg_name, _, _ = path.partition("/")
pkg_name, _, _ = path.partition(os.sep)
if pkg_name not in added and pkg_name not in removed:
changed.add(pkg_name)

View File

@@ -27,7 +27,7 @@
from spack.error import SpackError
from spack.util.crypto import checksum
from spack.util.log_parse import parse_log_events
from spack.util.web import ssl_create_default_context
from spack.util.web import urllib_ssl_cert_handler
from .base import Reporter
from .extract import extract_test_parts
@@ -58,8 +58,7 @@
# Initialize data structures common to each phase's report.
CDASH_PHASES = set(MAP_PHASES_TO_CDASH.values())
CDASH_PHASES.add("update")
# CDash request timeout in seconds
SPACK_CDASH_TIMEOUT = 45
CDashConfiguration = collections.namedtuple(
"CDashConfiguration", ["upload_url", "packages", "build", "site", "buildstamp", "track"]
@@ -429,7 +428,7 @@ def upload(self, filename):
# Compute md5 checksum for the contents of this file.
md5sum = checksum(hashlib.md5, filename, block_size=8192)
opener = build_opener(HTTPSHandler(context=ssl_create_default_context()))
opener = build_opener(HTTPSHandler(context=urllib_ssl_cert_handler()))
with open(filename, "rb") as f:
params_dict = {
"build": self.buildname,
@@ -448,7 +447,7 @@ def upload(self, filename):
# By default, urllib2 only support GET and POST.
# CDash expects this file to be uploaded via PUT.
request.get_method = lambda: "PUT"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response = opener.open(request)
if self.current_package_name not in self.buildIds:
resp_value = response.read()
if isinstance(resp_value, bytes):

View File

@@ -9,7 +9,7 @@
import tempfile
from collections import OrderedDict
from llnl.util.symlink import readlink, symlink
from llnl.util.symlink import symlink
import spack.binary_distribution as bindist
import spack.error
@@ -26,7 +26,7 @@ def _relocate_spliced_links(links, orig_prefix, new_prefix):
in our case. This still needs to be called after the copy to destination
because it expects the new directory structure to be in place."""
for link in links:
link_target = readlink(os.path.join(orig_prefix, link))
link_target = os.readlink(os.path.join(orig_prefix, link))
link_target = re.sub("^" + orig_prefix, new_prefix, link_target)
new_link_path = os.path.join(new_prefix, link)
os.unlink(new_link_path)

View File

@@ -35,7 +35,6 @@
{
"include": {"type": "array", "default": [], "items": {"type": "string"}},
"specs": spec_list_schema,
"include_concrete": {"type": "array", "default": [], "items": {"type": "string"}},
},
),
}

View File

@@ -141,7 +141,7 @@
"deprecatedProperties": {
"properties": ["version"],
"message": "setting version preferences in the 'all' section of packages.yaml "
"is deprecated and will be removed in v0.23\n\n\tThese preferences "
"is deprecated and will be removed in v0.22\n\n\tThese preferences "
"will be ignored by Spack. You can set them only in package-specific sections "
"of the same file.\n",
"error": False,
@@ -197,7 +197,7 @@
"properties": ["target", "compiler", "providers"],
"message": "setting 'compiler:', 'target:' or 'provider:' preferences in "
"a package-specific section of packages.yaml is deprecated, and will be "
"removed in v0.23.\n\n\tThese preferences will be ignored by Spack, and "
"removed in v0.22.\n\n\tThese preferences will be ignored by Spack, and "
"can be set only in the 'all' section of the same file. "
"You can run:\n\n\t\t$ spack audit configs\n\n\tto get better diagnostics, "
"including files:lines where the deprecated attributes are used.\n\n"

View File

@@ -314,10 +314,6 @@ def using_libc_compatibility() -> bool:
return spack.platforms.host().name == "linux"
def c_compiler_runs(compiler: spack.compiler.Compiler) -> bool:
return compiler.compiler_verbose_output is not None
def extend_flag_list(flag_list, new_flags):
"""Extend a list of flags, preserving order and precedence.
@@ -948,26 +944,14 @@ class ConcreteSpecsByHash(collections.abc.Mapping):
def __init__(self) -> None:
self.data: Dict[str, spack.spec.Spec] = {}
self.explicit: Set[str] = set()
def __getitem__(self, dag_hash: str) -> spack.spec.Spec:
return self.data[dag_hash]
def explicit_items(self) -> Iterator[Tuple[str, spack.spec.Spec]]:
"""Iterate on items that have been added explicitly, and not just as a dependency
of other nodes.
"""
for h, s in self.items():
# We need to make an exception for gcc-runtime, until we can splice it.
if h in self.explicit or s.name == "gcc-runtime":
yield h, s
def add(self, spec: spack.spec.Spec) -> bool:
"""Adds a new concrete spec to the mapping. Returns True if the spec was just added,
False if the spec was already in the mapping.
Calling this function marks the spec as added explicitly.
Args:
spec: spec to be added
@@ -982,7 +966,6 @@ def add(self, spec: spack.spec.Spec) -> bool:
raise ValueError(msg)
dag_hash = spec.dag_hash()
self.explicit.add(dag_hash)
if dag_hash in self.data:
return False
@@ -1653,15 +1636,11 @@ def external_packages(self):
if isinstance(reuse_yaml, typing.Mapping):
default_include = reuse_yaml.get("include", [])
default_exclude = reuse_yaml.get("exclude", [])
libc_externals = list(all_libcs())
for source in reuse_yaml.get("from", []):
if source["type"] != "external":
continue
include = source.get("include", default_include)
if include:
# Since libcs are implicit externals, we need to implicitly include them
include = include + libc_externals
exclude = source.get("exclude", default_exclude)
spec_filters.append(
SpecFilter(
@@ -2093,7 +2072,7 @@ def _supported_targets(self, compiler_name, compiler_version, targets):
try:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
target.optimization_flags(compiler_name, str(compiler_version))
target.optimization_flags(compiler_name, compiler_version)
supported.append(target)
except archspec.cpu.UnsupportedMicroarchitecture:
continue
@@ -2370,7 +2349,7 @@ def register_concrete_spec(self, spec, possible):
def concrete_specs(self):
"""Emit facts for reusable specs"""
for h, spec in self.reusable_and_possible.explicit_items():
for h, spec in self.reusable_and_possible.items():
# this indicates that there is a spec like this installed
self.gen.fact(fn.installed_hash(spec.name, h))
# this describes what constraints it imposes on the solve
@@ -2979,13 +2958,6 @@ class CompilerParser:
def __init__(self, configuration) -> None:
self.compilers: Set[KnownCompiler] = set()
for c in all_compilers_in_config(configuration):
if using_libc_compatibility() and not c_compiler_runs(c):
tty.debug(
f"the C compiler {c.cc} does not exist, or does not run correctly."
f" The compiler {c.spec} will not be used during concretization."
)
continue
if using_libc_compatibility() and not c.default_libc:
warnings.warn(
f"cannot detect libc from {c.spec}. The compiler will not be used "
@@ -3261,16 +3233,13 @@ class SpecBuilder:
r"^.*_propagate$",
r"^.*_satisfies$",
r"^.*_set$",
r"^compatible_libc$",
r"^dependency_holds$",
r"^external_conditions_hold$",
r"^node_compiler$",
r"^package_hash$",
r"^root$",
r"^track_dependencies$",
r"^variant_default_value_from_cli$",
r"^virtual_node$",
r"^virtual_on_incoming_edges$",
r"^virtual_root$",
]
)
@@ -3794,12 +3763,6 @@ class Solver:
def __init__(self):
self.driver = PyclingoDriver()
self.selector = ReusableSpecsSelector(configuration=spack.config.CONFIG)
if spack.platforms.host().name == "cray":
msg = (
"The Cray platform, i.e. 'platform=cray', will be removed in Spack v0.23. "
"All Cray machines will be then detected as 'platform=linux'."
)
warnings.warn(msg)
@staticmethod
def _check_input_and_extract_concrete_specs(specs):

View File

@@ -1424,7 +1424,6 @@ opt_criterion(73, "deprecated versions used").
#minimize{
1@73+Priority,PackageNode
: attr("deprecated", PackageNode, _),
not external(PackageNode),
build_priority(PackageNode, Priority)
}.
@@ -1432,11 +1431,11 @@ opt_criterion(73, "deprecated versions used").
% 1. Version weight
% 2. Number of variants with a non default value, if not set
% for the root package.
opt_criterion(70, "version badness (roots)").
opt_criterion(70, "version weight").
#minimize{ 0@270: #true }.
#minimize{ 0@70: #true }.
#minimize {
Weight@70+Priority,PackageNode
Weight@70+Priority
: attr("root", PackageNode),
version_weight(PackageNode, Weight),
build_priority(PackageNode, Priority)
@@ -1526,14 +1525,13 @@ opt_criterion(30, "non-preferred OS's").
}.
% Choose more recent versions for nodes
opt_criterion(25, "version badness (non roots)").
opt_criterion(25, "version badness").
#minimize{ 0@225: #true }.
#minimize{ 0@25: #true }.
#minimize{
Weight@25+Priority,node(X, Package)
: version_weight(node(X, Package), Weight),
build_priority(node(X, Package), Priority),
not attr("root", node(X, Package)),
not runtime(Package)
}.

View File

@@ -195,7 +195,7 @@ def _bootstrap_clingo() -> ModuleType:
import spack.bootstrap
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_clingo_importable_or_raise()
spack.bootstrap.ensure_core_dependencies()
clingo_mod = importlib.import_module("clingo")
return clingo_mod

View File

@@ -1030,13 +1030,16 @@ def clear(self):
self.edges.clear()
def _command_default_handler(spec: "Spec"):
def _command_default_handler(descriptor, spec, cls):
"""Default handler when looking for the 'command' attribute.
Tries to search for ``spec.name`` in the ``spec.home.bin`` directory.
Parameters:
spec: spec that is being queried
descriptor (ForwardQueryToPackage): descriptor that triggered the call
spec (Spec): spec that is being queried
cls (type(spec)): type of spec, to match the signature of the
descriptor ``__get__`` method
Returns:
Executable: An executable of the command
@@ -1049,17 +1052,22 @@ def _command_default_handler(spec: "Spec"):
if fs.is_exe(path):
return spack.util.executable.Executable(path)
raise RuntimeError(f"Unable to locate {spec.name} command in {home.bin}")
else:
msg = "Unable to locate {0} command in {1}"
raise RuntimeError(msg.format(spec.name, home.bin))
def _headers_default_handler(spec: "Spec"):
def _headers_default_handler(descriptor, spec, cls):
"""Default handler when looking for the 'headers' attribute.
Tries to search for ``*.h`` files recursively starting from
``spec.package.home.include``.
Parameters:
spec: spec that is being queried
descriptor (ForwardQueryToPackage): descriptor that triggered the call
spec (Spec): spec that is being queried
cls (type(spec)): type of spec, to match the signature of the
descriptor ``__get__`` method
Returns:
HeaderList: The headers in ``prefix.include``
@@ -1072,10 +1080,12 @@ def _headers_default_handler(spec: "Spec"):
if headers:
return headers
raise spack.error.NoHeadersError(f"Unable to locate {spec.name} headers in {home}")
else:
msg = "Unable to locate {0} headers in {1}"
raise spack.error.NoHeadersError(msg.format(spec.name, home))
def _libs_default_handler(spec: "Spec"):
def _libs_default_handler(descriptor, spec, cls):
"""Default handler when looking for the 'libs' attribute.
Tries to search for ``lib{spec.name}`` recursively starting from
@@ -1083,7 +1093,10 @@ def _libs_default_handler(spec: "Spec"):
``{spec.name}`` instead.
Parameters:
spec: spec that is being queried
descriptor (ForwardQueryToPackage): descriptor that triggered the call
spec (Spec): spec that is being queried
cls (type(spec)): type of spec, to match the signature of the
descriptor ``__get__`` method
Returns:
LibraryList: The libraries found
@@ -1122,33 +1135,27 @@ def _libs_default_handler(spec: "Spec"):
if libs:
return libs
raise spack.error.NoLibrariesError(
f"Unable to recursively locate {spec.name} libraries in {home}"
)
msg = "Unable to recursively locate {0} libraries in {1}"
raise spack.error.NoLibrariesError(msg.format(spec.name, home))
class ForwardQueryToPackage:
"""Descriptor used to forward queries from Spec to Package"""
def __init__(
self,
attribute_name: str,
default_handler: Optional[Callable[["Spec"], Any]] = None,
_indirect: bool = False,
) -> None:
def __init__(self, attribute_name, default_handler=None):
"""Create a new descriptor.
Parameters:
attribute_name: name of the attribute to be searched for in the Package instance
default_handler: default function to be called if the attribute was not found in the
Package instance
_indirect: temporarily added to redirect a query to another package.
attribute_name (str): name of the attribute to be
searched for in the Package instance
default_handler (callable, optional): default function to be
called if the attribute was not found in the Package
instance
"""
self.attribute_name = attribute_name
self.default = default_handler
self.indirect = _indirect
def __get__(self, instance: "SpecBuildInterface", cls):
def __get__(self, instance, cls):
"""Retrieves the property from Package using a well defined chain
of responsibility.
@@ -1170,18 +1177,13 @@ def __get__(self, instance: "SpecBuildInterface", cls):
indicating a query failure, e.g. that library files were not found in a
'libs' query.
"""
# TODO: this indirection exist solely for `spec["python"].command` to actually return
# spec["python-venv"].command. It should be removed when `python` is a virtual.
if self.indirect and instance.indirect_spec:
pkg = instance.indirect_spec.package
else:
pkg = instance.wrapped_obj.package
pkg = instance.package
try:
query = instance.last_query
except AttributeError:
# There has been no query yet: this means
# a spec is trying to access its own attributes
_ = instance.wrapped_obj[instance.wrapped_obj.name] # NOQA: ignore=F841
_ = instance[instance.name] # NOQA: ignore=F841
query = instance.last_query
callbacks_chain = []
@@ -1193,8 +1195,7 @@ def __get__(self, instance: "SpecBuildInterface", cls):
callbacks_chain.append(lambda: getattr(pkg, self.attribute_name))
# Final resort : default callback
if self.default is not None:
_default = self.default # make mypy happy
callbacks_chain.append(lambda: _default(instance.wrapped_obj))
callbacks_chain.append(lambda: self.default(self, instance, cls))
# Trigger the callbacks in order, the first one producing a
# value wins
@@ -1253,33 +1254,25 @@ def __set__(self, instance, value):
class SpecBuildInterface(lang.ObjectWrapper):
# home is available in the base Package so no default is needed
home = ForwardQueryToPackage("home", default_handler=None)
headers = ForwardQueryToPackage("headers", default_handler=_headers_default_handler)
libs = ForwardQueryToPackage("libs", default_handler=_libs_default_handler)
command = ForwardQueryToPackage(
"command", default_handler=_command_default_handler, _indirect=True
)
def __init__(self, spec: "Spec", name: str, query_parameters: List[str], _parent: "Spec"):
command = ForwardQueryToPackage("command", default_handler=_command_default_handler)
headers = ForwardQueryToPackage("headers", default_handler=_headers_default_handler)
libs = ForwardQueryToPackage("libs", default_handler=_libs_default_handler)
def __init__(self, spec, name, query_parameters):
super().__init__(spec)
# Adding new attributes goes after super() call since the ObjectWrapper
# resets __dict__ to behave like the passed object
original_spec = getattr(spec, "wrapped_obj", spec)
self.wrapped_obj = original_spec
self.token = original_spec, name, query_parameters, _parent
self.token = original_spec, name, query_parameters
is_virtual = spack.repo.PATH.is_virtual(name)
self.last_query = QueryState(
name=name, extra_parameters=query_parameters, isvirtual=is_virtual
)
# TODO: this ad-hoc logic makes `spec["python"].command` return
# `spec["python-venv"].command` and should be removed when `python` is a virtual.
self.indirect_spec = None
if spec.name == "python":
python_venvs = _parent.dependencies("python-venv")
if not python_venvs:
return
self.indirect_spec = python_venvs[0]
def __reduce__(self):
return SpecBuildInterface, self.token
@@ -4144,7 +4137,7 @@ def version(self):
raise spack.error.SpecError("Spec version is not concrete: " + str(self))
return self.versions[0]
def __getitem__(self, name: str):
def __getitem__(self, name):
"""Get a dependency from the spec by its name. This call implicitly
sets a query state in the package being retrieved. The behavior of
packages may be influenced by additional query parameters that are
@@ -4153,7 +4146,7 @@ def __getitem__(self, name: str):
Note that if a virtual package is queried a copy of the Spec is
returned while for non-virtual a reference is returned.
"""
query_parameters: List[str] = name.split(":")
query_parameters = name.split(":")
if len(query_parameters) > 2:
raise KeyError("key has more than one ':' symbol. At most one is admitted.")
@@ -4176,7 +4169,7 @@ def __getitem__(self, name: str):
)
try:
child: Spec = next(
value = next(
itertools.chain(
# Regular specs
(x for x in order() if x.name == name),
@@ -4193,9 +4186,9 @@ def __getitem__(self, name: str):
raise KeyError(f"No spec with name {name} in {self}")
if self._concrete:
return SpecBuildInterface(child, name, query_parameters, _parent=self)
return SpecBuildInterface(value, name, query_parameters)
return child
return value
def __contains__(self, spec):
"""True if this spec or some dependency satisfies the spec.

View File

@@ -19,8 +19,6 @@
(["missing-dependency"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
# The package use a non existing variant in a depends_on directive
(["wrong-variant-in-depends-on"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
# This package has a GitHub pull request commit patch URL
(["invalid-github-pull-commits-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
# This package has a GitHub patch URL without full_index=1
(["invalid-github-patch-url"], ["PKG-DIRECTIVES", "PKG-PROPERTIES"]),
# This package has invalid GitLab patch URLs

View File

@@ -22,7 +22,6 @@
import archspec.cpu
from llnl.util.filesystem import join_path, visit_directory_tree
from llnl.util.symlink import readlink
import spack.binary_distribution as bindist
import spack.caches
@@ -1063,10 +1062,10 @@ def test_tarball_common_prefix(dummy_prefix, tmpdir):
assert set(os.listdir(os.path.join("prefix2", "share"))) == {"file"}
# Relative symlink should still be correct
assert readlink(os.path.join("prefix2", "bin", "relative_app_link")) == "app"
assert os.readlink(os.path.join("prefix2", "bin", "relative_app_link")) == "app"
# Absolute symlink should remain absolute -- this is for relocation to fix up.
assert readlink(os.path.join("prefix2", "bin", "absolute_app_link")) == os.path.join(
assert os.readlink(os.path.join("prefix2", "bin", "absolute_app_link")) == os.path.join(
dummy_prefix, "bin", "app"
)

View File

@@ -14,7 +14,6 @@
import spack.build_environment
import spack.config
import spack.deptypes as dt
import spack.package_base
import spack.spec
import spack.util.spack_yaml as syaml
@@ -717,21 +716,3 @@ def test_build_system_globals_only_set_on_root_during_build(default_mock_concret
for depth, spec in root.traverse(depth=True, root=True):
for variable in build_variables:
assert hasattr(spec.package.module, variable) == should_be_set(depth)
def test_rpath_with_duplicate_link_deps():
"""If we have two instances of one package in the same link sub-dag, only the newest version is
rpath'ed. This is for runtime support without splicing."""
runtime_1 = spack.spec.Spec("runtime@=1.0")
runtime_2 = spack.spec.Spec("runtime@=2.0")
child = spack.spec.Spec("child@=1.0")
root = spack.spec.Spec("root@=1.0")
root.add_dependency_edge(child, depflag=dt.LINK, virtuals=())
root.add_dependency_edge(runtime_2, depflag=dt.LINK, virtuals=())
child.add_dependency_edge(runtime_1, depflag=dt.LINK, virtuals=())
rpath_deps = spack.build_environment._get_rpath_deps_from_spec(root, transitive_rpaths=True)
assert child in rpath_deps
assert runtime_2 in rpath_deps
assert runtime_1 not in rpath_deps

View File

@@ -51,7 +51,7 @@ def __init__(self, response_code=200, content_to_read=[]):
self._content = content_to_read
self._read = [False for c in content_to_read]
def open(self, request, data=None, timeout=object()):
def open(self, request):
return self
def getcode(self):

View File

@@ -760,6 +760,7 @@ def test_ci_rebuild_mock_success(
rebuild_env = create_rebuild_env(tmpdir, pkg_name, broken_tests)
monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "echo")
monkeypatch.setattr(spack.cmd.ci, "MAKE_COMMAND", "echo")
with rebuild_env.env_dir.as_cwd():
activate_rebuild_env(tmpdir, pkg_name, rebuild_env)
@@ -842,6 +843,7 @@ def test_ci_rebuild(
ci_cmd("rebuild", "--tests", fail_on_error=False)
monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "notcommand")
monkeypatch.setattr(spack.cmd.ci, "MAKE_COMMAND", "notcommand")
monkeypatch.setattr(spack.cmd.ci, "INSTALL_FAIL_CODE", 127)
with rebuild_env.env_dir.as_cwd():

View File

@@ -261,14 +261,15 @@ def test_compiler_list_empty(no_compilers_yaml, working_env, compilers_dir):
[
(
{
"spec": "gcc@=7.7.7 languages=c,cxx,fortran os=foobar target=x86_64",
"spec": "gcc@=7.7.7 os=foobar target=x86_64",
"prefix": "/path/to/fake",
"modules": ["gcc/7.7.7", "foobar"],
"extra_attributes": {
"compilers": {
"c": "/path/to/fake/gcc",
"paths": {
"cc": "/path/to/fake/gcc",
"cxx": "/path/to/fake/g++",
"fortran": "/path/to/fake/gfortran",
"fc": "/path/to/fake/gfortran",
"f77": "/path/to/fake/gfortran",
},
"flags": {"fflags": "-ffree-form"},
},
@@ -284,7 +285,26 @@ def test_compiler_list_empty(no_compilers_yaml, working_env, compilers_dir):
\tmodules = ['gcc/7.7.7', 'foobar']
\toperating system = foobar
""",
)
),
(
{
"spec": "gcc@7.7.7",
"prefix": "{prefix}",
"modules": ["gcc/7.7.7", "foobar"],
"extra_attributes": {"flags": {"fflags": "-ffree-form"}},
},
"""gcc@7.7.7:
\tpaths:
\t\tcc = {compilers_dir}{sep}gcc-8{suffix}
\t\tcxx = {compilers_dir}{sep}g++-8{suffix}
\t\tf77 = {compilers_dir}{sep}gfortran-8{suffix}
\t\tfc = {compilers_dir}{sep}gfortran-8{suffix}
\tflags:
\t\tfflags = ['-ffree-form']
\tmodules = ['gcc/7.7.7', 'foobar']
\toperating system = debian6
""",
),
],
)
def test_compilers_shows_packages_yaml(

View File

@@ -15,7 +15,6 @@
import llnl.util.filesystem as fs
import llnl.util.link_tree
import llnl.util.tty as tty
from llnl.util.symlink import readlink
import spack.cmd.env
import spack.config
@@ -61,27 +60,6 @@
sep = os.sep
def setup_combined_multiple_env():
env("create", "test1")
test1 = ev.read("test1")
with test1:
add("zlib")
test1.concretize()
test1.write()
env("create", "test2")
test2 = ev.read("test2")
with test2:
add("libelf")
test2.concretize()
test2.write()
env("create", "--include-concrete", "test1", "--include-concrete", "test2", "combined_env")
combined = ev.read("combined_env")
return test1, test2, combined
@pytest.fixture()
def environment_from_manifest(tmp_path):
"""Returns a new environment named 'test' from the content of a manifest file."""
@@ -391,29 +369,6 @@ def test_env_install_single_spec(install_mockery, mock_fetch):
assert e.specs_by_hash[e.concretized_order[0]].name == "cmake-client"
@pytest.mark.parametrize("unify", [True, False, "when_possible"])
def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch):
test1, test2, combined = setup_combined_multiple_env()
combined.concretize()
combined.write()
combined.unify = unify
with combined:
install()
test1_roots = test1.concretized_order
test2_roots = test2.concretized_order
combined_included_roots = combined.included_concretized_order
for spec in combined.all_specs():
assert spec.installed
assert test1_roots == combined_included_roots[test1.path]
assert test2_roots == combined_included_roots[test2.path]
def test_env_roots_marked_explicit(install_mockery, mock_fetch):
install = SpackCommand("install")
install("dependent-install")
@@ -602,41 +557,6 @@ def test_remove_command():
assert "mpileaks@" not in find("--show-concretized")
def test_bad_remove_included_env():
env("create", "test")
test = ev.read("test")
with test:
add("mpileaks")
test.concretize()
test.write()
env("create", "--include-concrete", "test", "combined_env")
with pytest.raises(SpackCommandError):
env("remove", "test")
def test_force_remove_included_env():
env("create", "test")
test = ev.read("test")
with test:
add("mpileaks")
test.concretize()
test.write()
env("create", "--include-concrete", "test", "combined_env")
rm_output = env("remove", "-f", "-y", "test")
list_output = env("list")
assert '"test" is being used by environment "combined_env"' in rm_output
assert "test" not in list_output
def test_environment_status(capsys, tmpdir):
with tmpdir.as_cwd():
with capsys.disabled():
@@ -1716,275 +1636,6 @@ def test_env_without_view_install(tmpdir, mock_stage, mock_fetch, install_mocker
check_mpileaks_and_deps_in_view(view_dir)
@pytest.mark.parametrize("env_name", [True, False])
def test_env_include_concrete_env_yaml(env_name):
env("create", "test")
test = ev.read("test")
with test:
add("mpileaks")
test.concretize()
test.write()
environ = "test" if env_name else test.path
env("create", "--include-concrete", environ, "combined_env")
combined = ev.read("combined_env")
combined_yaml = combined.manifest["spack"]
assert "include_concrete" in combined_yaml
assert test.path in combined_yaml["include_concrete"]
def test_env_bad_include_concrete_env():
with pytest.raises(ev.SpackEnvironmentError):
env("create", "--include-concrete", "nonexistant_env", "combined_env")
def test_env_not_concrete_include_concrete_env():
env("create", "test")
test = ev.read("test")
with test:
add("mpileaks")
with pytest.raises(ev.SpackEnvironmentError):
env("create", "--include-concrete", "test", "combined_env")
def test_env_multiple_include_concrete_envs():
test1, test2, combined = setup_combined_multiple_env()
combined_yaml = combined.manifest["spack"]
assert test1.path in combined_yaml["include_concrete"][0]
assert test2.path in combined_yaml["include_concrete"][1]
# No local specs in the combined env
assert not combined_yaml["specs"]
def test_env_include_concrete_envs_lockfile():
test1, test2, combined = setup_combined_multiple_env()
combined_yaml = combined.manifest["spack"]
assert "include_concrete" in combined_yaml
assert test1.path in combined_yaml["include_concrete"]
with open(combined.lock_path) as f:
lockfile_as_dict = combined._read_lockfile(f)
assert set(
entry["hash"] for entry in lockfile_as_dict["include_concrete"][test1.path]["roots"]
) == set(test1.specs_by_hash)
assert set(
entry["hash"] for entry in lockfile_as_dict["include_concrete"][test2.path]["roots"]
) == set(test2.specs_by_hash)
def test_env_include_concrete_add_env():
test1, test2, combined = setup_combined_multiple_env()
# crete new env & crecretize
env("create", "new")
new_env = ev.read("new")
with new_env:
add("mpileaks")
new_env.concretize()
new_env.write()
# add new env to combined
combined.included_concrete_envs.append(new_env.path)
# assert thing haven't changed yet
with open(combined.lock_path) as f:
lockfile_as_dict = combined._read_lockfile(f)
assert new_env.path not in lockfile_as_dict["include_concrete"].keys()
# concretize combined env with new env
combined.concretize()
combined.write()
# assert changes
with open(combined.lock_path) as f:
lockfile_as_dict = combined._read_lockfile(f)
assert new_env.path in lockfile_as_dict["include_concrete"].keys()
def test_env_include_concrete_remove_env():
test1, test2, combined = setup_combined_multiple_env()
# remove test2 from combined
combined.included_concrete_envs = [test1.path]
# assert test2 is still in combined's lockfile
with open(combined.lock_path) as f:
lockfile_as_dict = combined._read_lockfile(f)
assert test2.path in lockfile_as_dict["include_concrete"].keys()
# reconcretize combined
combined.concretize()
combined.write()
# assert test2 is not in combined's lockfile
with open(combined.lock_path) as f:
lockfile_as_dict = combined._read_lockfile(f)
assert test2.path not in lockfile_as_dict["include_concrete"].keys()
@pytest.mark.parametrize("unify", [True, False, "when_possible"])
def test_env_include_concrete_env_reconcretized(unify):
"""Double check to make sure that concrete_specs for the local specs is empty
after recocnretizing.
"""
_, _, combined = setup_combined_multiple_env()
combined.unify = unify
with open(combined.lock_path) as f:
lockfile_as_dict = combined._read_lockfile(f)
assert not lockfile_as_dict["roots"]
assert not lockfile_as_dict["concrete_specs"]
combined.concretize()
combined.write()
with open(combined.lock_path) as f:
lockfile_as_dict = combined._read_lockfile(f)
assert not lockfile_as_dict["roots"]
assert not lockfile_as_dict["concrete_specs"]
def test_concretize_include_concrete_env():
test1, _, combined = setup_combined_multiple_env()
with test1:
add("mpileaks")
test1.concretize()
test1.write()
assert Spec("mpileaks") in test1.concretized_user_specs
assert Spec("mpileaks") not in combined.included_concretized_user_specs[test1.path]
combined.concretize()
combined.write()
assert Spec("mpileaks") in combined.included_concretized_user_specs[test1.path]
def test_concretize_nested_include_concrete_envs():
env("create", "test1")
test1 = ev.read("test1")
with test1:
add("zlib")
test1.concretize()
test1.write()
env("create", "--include-concrete", "test1", "test2")
test2 = ev.read("test2")
with test2:
add("libelf")
test2.concretize()
test2.write()
env("create", "--include-concrete", "test2", "test3")
test3 = ev.read("test3")
with open(test3.lock_path) as f:
lockfile_as_dict = test3._read_lockfile(f)
assert test2.path in lockfile_as_dict["include_concrete"]
assert test1.path in lockfile_as_dict["include_concrete"][test2.path]["include_concrete"]
assert Spec("zlib") in test3.included_concretized_user_specs[test1.path]
def test_concretize_nested_included_concrete():
"""Confirm that nested included environments use specs concretized at
environment creation time and change with reconcretization."""
env("create", "test1")
test1 = ev.read("test1")
with test1:
add("zlib")
test1.concretize()
test1.write()
# test2 should include test1 with zlib
env("create", "--include-concrete", "test1", "test2")
test2 = ev.read("test2")
with test2:
add("libelf")
test2.concretize()
test2.write()
assert Spec("zlib") in test2.included_concretized_user_specs[test1.path]
# Modify/re-concretize test1 to replace zlib with mpileaks
with test1:
remove("zlib")
add("mpileaks")
test1.concretize()
test1.write()
# test3 should include the latest concretization of test1
env("create", "--include-concrete", "test1", "test3")
test3 = ev.read("test3")
with test3:
add("callpath")
test3.concretize()
test3.write()
included_specs = test3.included_concretized_user_specs[test1.path]
assert len(included_specs) == 1
assert Spec("mpileaks") in included_specs
# The last concretization of test4's included environments should have test2
# with the original concretized test1 spec and test3 with the re-concretized
# test1 spec.
env("create", "--include-concrete", "test2", "--include-concrete", "test3", "test4")
test4 = ev.read("test4")
def included_included_spec(path1, path2):
included_path1 = test4.included_concrete_spec_data[path1]
included_path2 = included_path1["include_concrete"][path2]
return included_path2["roots"][0]["spec"]
included_test2_test1 = included_included_spec(test2.path, test1.path)
assert "zlib" in included_test2_test1
included_test3_test1 = included_included_spec(test3.path, test1.path)
assert "mpileaks" in included_test3_test1
# test4's concretized specs should reflect the original concretization.
concrete_specs = [s for s, _ in test4.concretized_specs()]
expected = [Spec(s) for s in ["libelf", "zlib", "mpileaks", "callpath"]]
assert all(s in concrete_specs for s in expected)
# Re-concretize test2 to reflect the new concretization of included test1
# to remove zlib and write it out so it can be picked up by test4.
# Re-concretize test4 to reflect the re-concretization of included test2
# and ensure that its included specs are up-to-date
test2.concretize()
test2.write()
test4.concretize()
concrete_specs = [s for s, _ in test4.concretized_specs()]
assert Spec("zlib") not in concrete_specs
# Expecting mpileaks to appear only once
expected = [Spec(s) for s in ["libelf", "mpileaks", "callpath"]]
assert len(concrete_specs) == 3 and all(s in concrete_specs for s in expected)
def test_env_config_view_default(
environment_from_manifest, mock_stage, mock_fetch, install_mockery
):
@@ -3639,7 +3290,7 @@ def test_create_and_activate_managed(tmp_path):
def test_create_and_activate_anonymous(tmp_path):
with fs.working_dir(str(tmp_path)):
env_dir = os.path.join(str(tmp_path), "foo")
shell = env("activate", "--without-view", "--create", "--sh", env_dir)
shell = env("activate", "--without-view", "--create", "--sh", "-d", env_dir)
active_env_var = next(line for line in shell.splitlines() if ev.spack_env_var in line)
assert str(env_dir) in active_env_var
assert ev.is_env_dir(env_dir)
@@ -4415,8 +4066,8 @@ def test_env_view_resolves_identical_file_conflicts(tmp_path, install_mockery, m
# view-file/bin/
# x # expect this x to be linked
assert readlink(tmp_path / "view" / "bin" / "x") == bottom.bin.x
assert readlink(tmp_path / "view" / "bin" / "y") == top.bin.y
assert os.readlink(tmp_path / "view" / "bin" / "x") == bottom.bin.x
assert os.readlink(tmp_path / "view" / "bin" / "y") == top.bin.y
def test_env_view_ignores_different_file_conflicts(tmp_path, install_mockery, mock_fetch):
@@ -4427,4 +4078,4 @@ def test_env_view_ignores_different_file_conflicts(tmp_path, install_mockery, mo
install()
prefix_dependent = e.matching_spec("view-ignore-conflict").prefix
# The dependent's file is linked into the view
assert readlink(tmp_path / "view" / "bin" / "x") == prefix_dependent.bin.x
assert os.readlink(tmp_path / "view" / "bin" / "x") == prefix_dependent.bin.x

View File

@@ -33,23 +33,21 @@ def check_output(ni):
packages = extensions("-s", "packages", "python")
installed = extensions("-s", "installed", "python")
assert "==> python@2.7.11" in output
assert "==> 3 extensions" in output
assert "==> 2 extensions" in output
assert "py-extension1" in output
assert "py-extension2" in output
assert "python-venv" in output
assert "==> 3 extensions" in packages
assert "==> 2 extensions" in packages
assert "py-extension1" in packages
assert "py-extension2" in packages
assert "python-venv" in packages
assert "installed" not in packages
assert f"{ni if ni else 'None'} installed" in output
assert f"{ni if ni else 'None'} installed" in installed
assert ("%s installed" % (ni if ni else "None")) in output
assert ("%s installed" % (ni if ni else "None")) in installed
check_output(3)
ext2.package.do_uninstall(force=True)
check_output(2)
ext2.package.do_uninstall(force=True)
check_output(1)
def test_extensions_no_arguments(mock_packages):

View File

@@ -349,87 +349,6 @@ def test_find_prefix_in_env(
# Would throw error on regression
def test_find_specs_include_concrete_env(mutable_mock_env_path, config, mutable_mock_repo, tmpdir):
path = tmpdir.join("spack.yaml")
with tmpdir.as_cwd():
with open(str(path), "w") as f:
f.write(
"""\
spack:
specs:
- mpileaks
"""
)
env("create", "test1", "spack.yaml")
test1 = ev.read("test1")
test1.concretize()
test1.write()
with tmpdir.as_cwd():
with open(str(path), "w") as f:
f.write(
"""\
spack:
specs:
- libelf
"""
)
env("create", "test2", "spack.yaml")
test2 = ev.read("test2")
test2.concretize()
test2.write()
env("create", "--include-concrete", "test1", "--include-concrete", "test2", "combined_env")
with ev.read("combined_env"):
output = find()
assert "No root specs" in output
assert "Included specs" in output
assert "mpileaks" in output
assert "libelf" in output
def test_find_specs_nested_include_concrete_env(
mutable_mock_env_path, config, mutable_mock_repo, tmpdir
):
path = tmpdir.join("spack.yaml")
with tmpdir.as_cwd():
with open(str(path), "w") as f:
f.write(
"""\
spack:
specs:
- mpileaks
"""
)
env("create", "test1", "spack.yaml")
test1 = ev.read("test1")
test1.concretize()
test1.write()
env("create", "--include-concrete", "test1", "test2")
test2 = ev.read("test2")
test2.add("libelf")
test2.concretize()
test2.write()
env("create", "--include-concrete", "test2", "test3")
with ev.read("test3"):
output = find()
assert "No root specs" in output
assert "Included specs" in output
assert "mpileaks" in output
assert "libelf" in output
def test_find_loaded(database, working_env):
output = find("--loaded", "--group")
assert output == ""

View File

@@ -384,18 +384,9 @@ def test_clang_flags():
unsupported_flag_test("cxx17_flag", "clang@3.4")
supported_flag_test("cxx17_flag", "-std=c++1z", "clang@3.5")
supported_flag_test("cxx17_flag", "-std=c++17", "clang@5.0")
unsupported_flag_test("cxx20_flag", "clang@4.0")
supported_flag_test("cxx20_flag", "-std=c++2a", "clang@5.0")
supported_flag_test("cxx20_flag", "-std=c++20", "clang@11.0")
unsupported_flag_test("cxx23_flag", "clang@11.0")
supported_flag_test("cxx23_flag", "-std=c++2b", "clang@12.0")
supported_flag_test("cxx23_flag", "-std=c++23", "clang@17.0")
supported_flag_test("c99_flag", "-std=c99", "clang@3.3")
unsupported_flag_test("c11_flag", "clang@2.0")
supported_flag_test("c11_flag", "-std=c11", "clang@6.1.0")
unsupported_flag_test("c23_flag", "clang@8.0")
supported_flag_test("c23_flag", "-std=c2x", "clang@9.0")
supported_flag_test("c23_flag", "-std=c23", "clang@18.0")
supported_flag_test("cc_pic_flag", "-fPIC", "clang@3.3")
supported_flag_test("cxx_pic_flag", "-fPIC", "clang@3.3")
supported_flag_test("f77_pic_flag", "-fPIC", "clang@3.3")

View File

@@ -1244,11 +1244,10 @@ def test_variant_not_default(self):
@pytest.mark.regression("20055")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_custom_compiler_version(self, mutable_config, compiler_factory, monkeypatch):
def test_custom_compiler_version(self, mutable_config, compiler_factory):
mutable_config.set(
"compilers", [compiler_factory(spec="gcc@10foo", operating_system="redhat6")]
)
monkeypatch.setattr(spack.compiler.Compiler, "real_version", "10.2.1")
s = Spec("a %gcc@10foo os=redhat6").concretized()
assert "%gcc@10foo" in s
@@ -1914,11 +1913,11 @@ def test_version_weight_and_provenance(self):
libc_offset = 1 if spack.solver.asp.using_libc_compatibility() else 0
criteria = [
(num_specs - 1 - libc_offset, None, "number of packages to build (vs. reuse)"),
(2, 0, "version badness (non roots)"),
(2, 0, "version badness"),
]
for criterion in criteria:
assert criterion in result.criteria, criterion
assert criterion in result.criteria, result_spec
assert result_spec.satisfies("^b@1.0")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
@@ -2463,89 +2462,6 @@ def test_spec_with_build_dep_from_json(self, tmp_path):
s = Spec(f"dtuse ^{str(json_file)}").concretized()
assert s["dttop"].dag_hash() == build_dep.dag_hash()
@pytest.mark.regression("44040")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_exclude_specs_from_reuse(self, monkeypatch):
"""Tests that we can exclude a spec from reuse when concretizing, and that the spec
is not added back to the solve as a dependency of another reusable spec.
The expected spec is:
o callpath@1.0
|\
| |\
o | | mpich@3.0.4
|/ /
| o dyninst@8.2
|/|
| |\
| | o libdwarf@20130729
| |/|
|/|/
| o libelf@0.8.13
|/
o glibc@2.31
"""
# Prepare a mock mirror that returns an old version of dyninst
request_str = "callpath ^mpich"
reused = Spec(f"{request_str} ^dyninst@8.1.1").concretized()
monkeypatch.setattr(spack.solver.asp, "_specs_from_mirror", lambda: [reused])
# Exclude dyninst from reuse, so we expect that the old version is not taken into account
with spack.config.override(
"concretizer:reuse", {"from": [{"type": "buildcache", "exclude": ["dyninst"]}]}
):
result = Spec(request_str).concretized()
assert result.dag_hash() != reused.dag_hash()
assert result["mpich"].dag_hash() == reused["mpich"].dag_hash()
assert result["dyninst"].dag_hash() != reused["dyninst"].dag_hash()
assert result["dyninst"].satisfies("@=8.2")
for dep in result["dyninst"].traverse(root=False):
assert dep.dag_hash() == reused[dep.name].dag_hash()
@pytest.mark.regression("44091")
@pytest.mark.parametrize(
"included_externals",
[
["deprecated-versions"],
# Try the empty list, to ensure that in that case everything will be included
# since filtering should happen only when the list is non-empty
[],
],
)
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_include_specs_from_externals_and_libcs(
self, included_externals, mutable_config, tmp_path
):
"""Tests that when we include specs from externals, we always include libcs."""
mutable_config.set(
"packages",
{
"deprecated-versions": {
"externals": [{"spec": "deprecated-versions@1.1.0", "prefix": str(tmp_path)}]
}
},
)
request_str = "deprecated-client"
# When using the external the version is selected even if deprecated
with spack.config.override(
"concretizer:reuse", {"from": [{"type": "external", "include": included_externals}]}
):
result = Spec(request_str).concretized()
assert result["deprecated-versions"].satisfies("@1.1.0")
# When excluding it, we pick the non-deprecated version
with spack.config.override(
"concretizer:reuse",
{"from": [{"type": "external", "exclude": ["deprecated-versions"]}]},
):
result = Spec(request_str).concretized()
assert result["deprecated-versions"].satisfies("@1.0.0")
@pytest.fixture()
def duplicates_test_repository():

View File

@@ -1492,26 +1492,3 @@ def test_config_path_dsl(path, it_should_work, expected_parsed):
else:
with pytest.raises(ValueError):
spack.config.ConfigPath._validate(path)
def test_compiler_parsing_errors(tmpdir):
content = """\
packages:
gcc:
externals:
- spec: gcc@8.5.0 languages='c,c++,fortran'
prefix: /usr
extra_attributes:
compilers:
c: /usr/bin/gcc
cxx: /usr/bin/g++
fortran: /usr/bin/gfortran
"""
testscope = join_path(tmpdir.strpath, "packages.yaml")
with open(testscope, "w") as f:
f.write(content)
with spack.config.use_configuration(tmpdir.strpath):
compilers = spack.compilers.get_compiler_config_from_packages(spack.config.CONFIG)
assert spack.spec.Spec(compilers[0]["compiler"]["spec"]).satisfies("gcc@8.5.0")

View File

@@ -1694,7 +1694,7 @@ def mock_executable(tmp_path):
"""Factory to create a mock executable in a temporary directory that
output a custom string when run.
"""
shebang = "#!/bin/sh\n" if sys.platform != "win32" else "@ECHO OFF\n"
shebang = "#!/bin/sh\n" if sys.platform != "win32" else "@ECHO OFF"
def _factory(name, output, subdir=("bin",)):
executable_dir = tmp_path.joinpath(*subdir)
@@ -2053,11 +2053,3 @@ def _true(x):
@pytest.fixture()
def do_not_check_runtimes_on_reuse(monkeypatch):
monkeypatch.setattr(spack.solver.asp, "_has_runtime_dependencies", _true)
@pytest.fixture(autouse=True, scope="session")
def _c_compiler_always_exists():
fn = spack.solver.asp.c_compiler_runs
spack.solver.asp.c_compiler_runs = _true
yield
spack.solver.asp.c_compiler_runs = fn

View File

@@ -17,7 +17,7 @@ def test_command(default_config, container_config_dir, capsys):
with capsys.disabled():
with fs.working_dir(container_config_dir):
output = containerize()
assert "FROM spack/ubuntu-jammy" in output
assert "FROM spack/ubuntu-bionic" in output
def test_listing_possible_os():

View File

@@ -14,7 +14,7 @@ def minimal_configuration():
"specs": ["gromacs", "mpich", "fftw precision=float"],
"container": {
"format": "docker",
"images": {"os": "ubuntu:22.04", "spack": "develop"},
"images": {"os": "ubuntu:18.04", "spack": "develop"},
},
}
}

View File

@@ -21,11 +21,11 @@ def test_build_and_run_images(minimal_configuration):
# Test the output of run property
run = writer.run
assert run.image == "ubuntu:22.04"
assert run.image == "ubuntu:18.04"
# Test the output of the build property
build = writer.build
assert build.image == "spack/ubuntu-jammy:develop"
assert build.image == "spack/ubuntu-bionic:develop"
def test_packages(minimal_configuration):

View File

@@ -12,8 +12,8 @@
@pytest.mark.parametrize(
"image,spack_version,expected",
[
("ubuntu:22.04", "develop", ("spack/ubuntu-jammy", "develop")),
("ubuntu:22.04", "0.14.0", ("spack/ubuntu-jammy", "0.14.0")),
("ubuntu:18.04", "develop", ("spack/ubuntu-bionic", "develop")),
("ubuntu:18.04", "0.14.0", ("spack/ubuntu-bionic", "0.14.0")),
],
)
def test_build_info(image, spack_version, expected):
@@ -21,7 +21,7 @@ def test_build_info(image, spack_version, expected):
assert output == expected
@pytest.mark.parametrize("image", ["ubuntu:22.04"])
@pytest.mark.parametrize("image", ["ubuntu:18.04"])
def test_package_info(image):
pkg_manager = spack.container.images.os_package_manager_for(image)
update, install, clean = spack.container.images.commands_for(pkg_manager)

View File

@@ -813,33 +813,3 @@ def test_deconcretize_then_concretize_does_not_error(mutable_mock_env_path, mock
assert len(e.concrete_roots()) == 3
all_root_hashes = set(x.dag_hash() for x in e.concrete_roots())
assert len(all_root_hashes) == 2
@pytest.mark.regression("44216")
@pytest.mark.only_clingo()
def test_root_version_weights_for_old_versions(mutable_mock_env_path, mock_packages):
"""Tests that, when we select two old versions of root specs that have the same version
optimization penalty, both are considered.
"""
mutable_mock_env_path.mkdir()
spack_yaml = mutable_mock_env_path / ev.manifest_name
spack_yaml.write_text(
"""spack:
specs:
# allow any version, but the most recent
- bowtie@:1.3
# allows only the third most recent, so penalty is 2
- gcc@1
concretizer:
unify: true
"""
)
e = ev.Environment(mutable_mock_env_path)
with e:
e.concretize()
bowtie = [x for x in e.concrete_roots() if x.name == "bowtie"][0]
gcc = [x for x in e.concrete_roots() if x.name == "gcc"][0]
assert bowtie.satisfies("@=1.3.0")
assert gcc.satisfies("@=1.0")

View File

@@ -14,7 +14,7 @@
import pytest
import llnl.util.filesystem as fs
from llnl.util.symlink import islink, readlink, symlink
from llnl.util.symlink import islink, symlink
import spack.paths
@@ -181,7 +181,7 @@ def test_symlinks_true(self, stage):
assert os.path.exists("dest/a/b2")
with fs.working_dir("dest/a"):
assert os.path.exists(readlink("b2"))
assert os.path.exists(os.readlink("b2"))
assert os.path.realpath("dest/f/2") == os.path.abspath("dest/a/b/2")
assert os.path.realpath("dest/2") == os.path.abspath("dest/1")
@@ -281,7 +281,7 @@ def test_allow_broken_symlinks(self, stage):
symlink("nonexistant.txt", "source/broken", allow_broken_symlinks=True)
fs.install_tree("source", "dest", symlinks=True, allow_broken_symlinks=True)
assert os.path.islink("dest/broken")
assert not os.path.exists(readlink("dest/broken"))
assert not os.path.exists(os.readlink("dest/broken"))
def test_glob_src(self, stage):
"""Test using a glob as the source."""

View File

@@ -7,8 +7,6 @@
import pytest
from llnl.util.symlink import readlink
import spack.cmd.modules
import spack.config
import spack.error
@@ -80,7 +78,7 @@ def test_modules_default_symlink(
link_path = os.path.join(os.path.dirname(mock_module_filename), "default")
assert os.path.islink(link_path)
assert readlink(link_path) == mock_module_filename
assert os.readlink(link_path) == mock_module_filename
generator.remove()
assert not os.path.lexists(link_path)

Some files were not shown because too many files have changed in this diff Show More