Compare commits
16 Commits
develop-20
...
features/e
Author | SHA1 | Date | |
---|---|---|---|
![]() |
1b4ff30665 | ||
![]() |
c10797718a | ||
![]() |
81ea29b007 | ||
![]() |
d8666a7fdf | ||
![]() |
7c41bba6f8 | ||
![]() |
20e9fe3785 | ||
![]() |
401218b4f1 | ||
![]() |
adfc1c0896 | ||
![]() |
f4402c1cde | ||
![]() |
c1d6d93388 | ||
![]() |
e9012c7781 | ||
![]() |
59acfe4f0b | ||
![]() |
004ff9d4e2 | ||
![]() |
9d20be5fe5 | ||
![]() |
edc07dab27 | ||
![]() |
acde8ef104 |
10
.github/dependabot.yml
vendored
10
.github/dependabot.yml
vendored
@@ -5,13 +5,3 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to build documentation
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/lib/spack/docs"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to run style checks
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/.github/workflows/style"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
13
.github/workflows/audit.yaml
vendored
13
.github/workflows/audit.yaml
vendored
@@ -17,13 +17,10 @@ concurrency:
|
||||
jobs:
|
||||
# Run audits on all the packages in the built-in repository
|
||||
package-audits:
|
||||
runs-on: ${{ matrix.operating_system }}
|
||||
strategy:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -34,7 +31,6 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage run $(which spack) audit externals
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
@@ -42,8 +38,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
$(which spack) audit externals
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
flags: unittests,linux,audits
|
||||
|
43
.github/workflows/bootstrap.yml
vendored
43
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -42,8 +42,8 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -80,8 +80,8 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -145,8 +145,8 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -158,16 +158,13 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
with:
|
||||
python-version: "3.12"
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -182,11 +179,11 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
@@ -207,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -217,7 +214,7 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
@@ -250,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -268,7 +265,6 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -287,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -306,8 +302,8 @@ jobs:
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
@@ -320,11 +316,10 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
@@ -338,13 +333,13 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable github-actions-v0.3
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
52
.github/workflows/build-containers.yml
vendored
52
.github/workflows/build-containers.yml
vendored
@@ -38,39 +38,38 @@ jobs:
|
||||
# Meaning of the various items in the matrix list
|
||||
# 0: Container name (e.g. ubuntu-bionic)
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:22.04)
|
||||
# 2: Base image (e.g. ubuntu:18.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
|
||||
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-bionic, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:18.04'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'rockylinux:9'],
|
||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
|
||||
- uses: docker/metadata-action@e6428a5c4e294a61438ed7f43155db912025b6b3
|
||||
id: docker_meta
|
||||
with:
|
||||
images: |
|
||||
ghcr.io/${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
|
||||
${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
|
||||
tags: |
|
||||
type=schedule,pattern=nightly
|
||||
type=schedule,pattern=develop
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
container="${{ matrix.dockerfile[0] }}:latest"
|
||||
echo "container=${container}" >> $GITHUB_ENV
|
||||
echo "versioned=${container}" >> $GITHUB_ENV
|
||||
|
||||
# On a new release create a container with the same tag as the release.
|
||||
- name: Set Container Tag on Release
|
||||
if: github.event_name == 'release'
|
||||
run: |
|
||||
versioned="${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}"
|
||||
echo "versioned=${versioned}" >> $GITHUB_ENV
|
||||
|
||||
- name: Generate the Dockerfile
|
||||
env:
|
||||
@@ -87,19 +86,19 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226
|
||||
uses: docker/setup-buildx-action@4b4e9c3e2d4531116a6f8ba8e71fc6e2cb6e6c8c # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -107,18 +106,21 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@4a13e500e55cf31b7a5d59a38ab2040ab0f42f56
|
||||
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||
tags: |
|
||||
spack/${{ env.container }}
|
||||
spack/${{ env.versioned }}
|
||||
ghcr.io/spack/${{ env.container }}
|
||||
ghcr.io/spack/${{ env.versioned }}
|
||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
7
.github/workflows/style/requirements.txt
vendored
7
.github/workflows/style/requirements.txt
vendored
@@ -1,7 +0,0 @@
|
||||
black==23.11.0
|
||||
clingo==5.6.2
|
||||
flake8==6.1.0
|
||||
isort==5.12.0
|
||||
mypy==1.7.1
|
||||
types-six==1.16.21.9
|
||||
vermin==1.6.0
|
29
.github/workflows/unit_tests.yaml
vendored
29
.github/workflows/unit_tests.yaml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
|
||||
concretizer: ['clingo']
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
@@ -45,16 +45,12 @@ jobs:
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.11'
|
||||
os: ubuntu-latest
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -98,10 +94,10 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -137,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -156,10 +152,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -169,7 +165,6 @@ jobs:
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -189,12 +184,12 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.11"]
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -215,7 +210,7 @@ jobs:
|
||||
$(which spack) bootstrap disable spack-install
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
with:
|
||||
flags: unittests,macos
|
||||
|
18
.github/workflows/valid-style.yml
vendored
18
.github/workflows/valid-style.yml
vendored
@@ -18,15 +18,15 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
@@ -35,17 +35,16 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
python3 -m pip install --upgrade pip setuptools types-six black==23.1.0 mypy isort clingo flake8
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -69,7 +68,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -82,7 +81,6 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack unit-test -V
|
||||
|
15
.github/workflows/windows_python.yml
vendored
15
.github/workflows/windows_python.yml
vendored
@@ -15,10 +15,10 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -39,10 +39,10 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -63,10 +63,10 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -75,5 +75,6 @@ jobs:
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack -d external find cmake ninja
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack -d install abseil-cpp
|
||||
|
@@ -1,16 +1,10 @@
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: "ubuntu-22.04"
|
||||
apt_packages:
|
||||
- graphviz
|
||||
tools:
|
||||
python: "3.11"
|
||||
|
||||
sphinx:
|
||||
configuration: lib/spack/docs/conf.py
|
||||
fail_on_warning: true
|
||||
|
||||
python:
|
||||
version: 3.7
|
||||
install:
|
||||
- requirements: lib/spack/docs/requirements.txt
|
||||
|
335
CHANGELOG.md
335
CHANGELOG.md
@@ -1,338 +1,3 @@
|
||||
# v0.21.0 (2023-11-11)
|
||||
|
||||
`v0.21.0` is a major feature release.
|
||||
|
||||
## Features in this release
|
||||
|
||||
1. **Better error messages with condition chaining**
|
||||
|
||||
In v0.18, we added better error messages that could tell you what problem happened,
|
||||
but they couldn't tell you *why* it happened. `0.21` adds *condition chaining* to the
|
||||
solver, and Spack can now trace back through the conditions that led to an error and
|
||||
build a tree of causes potential causes and where they came from. For example:
|
||||
|
||||
```console
|
||||
$ spack solve hdf5 ^cmake@3.0.1
|
||||
==> Error: concretization failed for the following reasons:
|
||||
|
||||
1. Cannot satisfy 'cmake@3.0.1'
|
||||
2. Cannot satisfy 'cmake@3.0.1'
|
||||
required because hdf5 ^cmake@3.0.1 requested from CLI
|
||||
3. Cannot satisfy 'cmake@3.18:' and 'cmake@3.0.1
|
||||
required because hdf5 ^cmake@3.0.1 requested from CLI
|
||||
required because hdf5 depends on cmake@3.18: when @1.13:
|
||||
required because hdf5 ^cmake@3.0.1 requested from CLI
|
||||
4. Cannot satisfy 'cmake@3.12:' and 'cmake@3.0.1
|
||||
required because hdf5 depends on cmake@3.12:
|
||||
required because hdf5 ^cmake@3.0.1 requested from CLI
|
||||
required because hdf5 ^cmake@3.0.1 requested from CLI
|
||||
```
|
||||
|
||||
More details in #40173.
|
||||
|
||||
2. **OCI build caches**
|
||||
|
||||
You can now use an arbitrary [OCI](https://opencontainers.org) registry as a build
|
||||
cache:
|
||||
|
||||
```console
|
||||
$ spack mirror add my_registry oci://user/image # Dockerhub
|
||||
$ spack mirror add my_registry oci://ghcr.io/haampie/spack-test # GHCR
|
||||
$ spack mirror set --push --oci-username ... --oci-password ... my_registry # set login creds
|
||||
$ spack buildcache push my_registry [specs...]
|
||||
```
|
||||
|
||||
And you can optionally add a base image to get *runnable* images:
|
||||
|
||||
```console
|
||||
$ spack buildcache push --base-image ubuntu:23.04 my_registry python
|
||||
Pushed ... as [image]:python-3.11.2-65txfcpqbmpawclvtasuog4yzmxwaoia.spack
|
||||
|
||||
$ docker run --rm -it [image]:python-3.11.2-65txfcpqbmpawclvtasuog4yzmxwaoia.spack
|
||||
```
|
||||
|
||||
This creates a container image from the Spack installations on the host system,
|
||||
without the need to run `spack install` from a `Dockerfile` or `sif` file. It also
|
||||
addresses the inconvenience of losing binaries of dependencies when `RUN spack
|
||||
install` fails inside `docker build`.
|
||||
|
||||
Further, the container image layers and build cache tarballs are the same files. This
|
||||
means that `spack install` and `docker pull` use the exact same underlying binaries.
|
||||
If you previously used `spack install` inside of `docker build`, this feature helps
|
||||
you save storage by a factor two.
|
||||
|
||||
More details in #38358.
|
||||
|
||||
3. **Multiple versions of build dependencies**
|
||||
|
||||
Increasingly, complex package builds require multiple versions of some build
|
||||
dependencies. For example, Python packages frequently require very specific versions
|
||||
of `setuptools`, `cython`, and sometimes different physics packages require different
|
||||
versions of Python to build. The concretizer enforced that every solve was *unified*,
|
||||
i.e., that there only be one version of every package. The concretizer now supports
|
||||
"duplicate" nodes for *build dependencies*, but enforces unification through
|
||||
transitive link and run dependencies. This will allow it to better resolve complex
|
||||
dependency graphs in ecosystems like Python, and it also gets us very close to
|
||||
modeling compilers as proper dependencies.
|
||||
|
||||
This change required a major overhaul of the concretizer, as well as a number of
|
||||
performance optimizations. See #38447, #39621.
|
||||
|
||||
4. **Cherry-picking virtual dependencies**
|
||||
|
||||
You can now select only a subset of virtual dependencies from a spec that may provide
|
||||
more. For example, if you want `mpich` to be your `mpi` provider, you can be explicit
|
||||
by writing:
|
||||
|
||||
```
|
||||
hdf5 ^[virtuals=mpi] mpich
|
||||
```
|
||||
|
||||
Or, if you want to use, e.g., `intel-parallel-studio` for `blas` along with an external
|
||||
`lapack` like `openblas`, you could write:
|
||||
|
||||
```
|
||||
strumpack ^[virtuals=mpi] intel-parallel-studio+mkl ^[virtuals=lapack] openblas
|
||||
```
|
||||
|
||||
The `virtuals=mpi` is an edge attribute, and dependency edges in Spack graphs now
|
||||
track which virtuals they satisfied. More details in #17229 and #35322.
|
||||
|
||||
Note for packaging: in Spack 0.21 `spec.satisfies("^virtual")` is true if and only if
|
||||
the package specifies `depends_on("virtual")`. This is different from Spack 0.20,
|
||||
where depending on a provider implied depending on the virtual provided. See #41002
|
||||
for an example where `^mkl` was being used to test for several `mkl` providers in a
|
||||
package that did not depend on `mkl`.
|
||||
|
||||
5. **License directive**
|
||||
|
||||
Spack packages can now have license metadata, with the new `license()` directive:
|
||||
|
||||
```python
|
||||
license("Apache-2.0")
|
||||
```
|
||||
|
||||
Licenses use [SPDX identifiers](https://spdx.org/licenses), and you can use SPDX
|
||||
expressions to combine them:
|
||||
|
||||
```python
|
||||
license("Apache-2.0 OR MIT")
|
||||
```
|
||||
|
||||
Like other directives in Spack, it's conditional, so you can handle complex cases like
|
||||
Spack itself:
|
||||
|
||||
```python
|
||||
license("LGPL-2.1", when="@:0.11")
|
||||
license("Apache-2.0 OR MIT", when="@0.12:")
|
||||
```
|
||||
|
||||
More details in #39346, #40598.
|
||||
|
||||
6. **`spack deconcretize` command**
|
||||
|
||||
We are getting close to having a `spack update` command for environments, but we're
|
||||
not quite there yet. This is the next best thing. `spack deconcretize` gives you
|
||||
control over what you want to update in an already concrete environment. If you have
|
||||
an environment built with, say, `meson`, and you want to update your `meson` version,
|
||||
you can run:
|
||||
|
||||
```console
|
||||
spack deconcretize meson
|
||||
```
|
||||
|
||||
and have everything that depends on `meson` rebuilt the next time you run `spack
|
||||
concretize`. In a future Spack version, we'll handle all of this in a single command,
|
||||
but for now you can use this to drop bits of your lockfile and resolve your
|
||||
dependencies again. More in #38803.
|
||||
|
||||
7. **UI Improvements**
|
||||
|
||||
The venerable `spack info` command was looking shabby compared to the rest of Spack's
|
||||
UI, so we reworked it to have a bit more flair. `spack info` now makes much better
|
||||
use of terminal space and shows variants, their values, and their descriptions much
|
||||
more clearly. Conditional variants are grouped separately so you can more easily
|
||||
understand how packages are structured. More in #40998.
|
||||
|
||||
`spack checksum` now allows you to filter versions from your editor, or by version
|
||||
range. It also notifies you about potential download URL changes. See #40403.
|
||||
|
||||
8. **Environments can include definitions**
|
||||
|
||||
Spack did not previously support using `include:` with The
|
||||
[definitions](https://spack.readthedocs.io/en/latest/environments.html#spec-list-references)
|
||||
section of an environment, but now it does. You can use this to curate lists of specs
|
||||
and more easily reuse them across environments. See #33960.
|
||||
|
||||
9. **Aliases**
|
||||
|
||||
You can now add aliases to Spack commands in `config.yaml`, e.g. this might enshrine
|
||||
your favorite args to `spack find` as `spack f`:
|
||||
|
||||
```yaml
|
||||
config:
|
||||
aliases:
|
||||
f: find -lv
|
||||
```
|
||||
|
||||
See #17229.
|
||||
|
||||
10. **Improved autoloading of modules**
|
||||
|
||||
Spack 0.20 was the first release to enable autoloading of direct dependencies in
|
||||
module files.
|
||||
|
||||
The downside of this was that `module avail` and `module load` tab completion would
|
||||
show users too many modules to choose from, and many users disabled generating
|
||||
modules for dependencies through `exclude_implicits: true`. Further, it was
|
||||
necessary to keep hashes in module names to avoid file name clashes.
|
||||
|
||||
In this release, you can start using `hide_implicits: true` instead, which exposes
|
||||
only explicitly installed packages to the user, while still autoloading
|
||||
dependencies. On top of that, you can safely use `hash_length: 0`, as this config
|
||||
now only applies to the modules exposed to the user -- you don't have to worry about
|
||||
file name clashes for hidden dependencies.
|
||||
|
||||
Note: for `tcl` this feature requires Modules 4.7 or higher
|
||||
|
||||
11. **Updated container labeling**
|
||||
|
||||
Nightly Docker images from the `develop` branch will now be tagged as `:develop` and
|
||||
`:nightly`. The `:latest` tag is no longer associated with `:develop`, but with the
|
||||
latest stable release. Releases will be tagged with `:{major}`, `:{major}.{minor}`
|
||||
and `:{major}.{minor}.{patch}`. `ubuntu:18.04` has also been removed from the list of
|
||||
generated Docker images, as it is no longer supported. See #40593.
|
||||
|
||||
## Other new commands and directives
|
||||
|
||||
* `spack env activate` without arguments now loads a `default` environment that you do
|
||||
not have to create (#40756).
|
||||
* `spack find -H` / `--hashes`: a new shortcut for piping `spack find` output to
|
||||
other commands (#38663)
|
||||
* Add `spack checksum --verify`, fix `--add` (#38458)
|
||||
* New `default_args` context manager factors out common args for directives (#39964)
|
||||
* `spack compiler find --[no]-mixed-toolchain` lets you easily mix `clang` and
|
||||
`gfortran` on Linux (#40902)
|
||||
|
||||
## Performance improvements
|
||||
|
||||
* `spack external find` execution is now much faster (#39843)
|
||||
* `spack location -i` now much faster on success (#40898)
|
||||
* Drop redundant rpaths post install (#38976)
|
||||
* ASP-based solver: avoid cycles in clingo using hidden directive (#40720)
|
||||
* Fix multiple quadratic complexity issues in environments (#38771)
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* archspec: update to v0.2.2, support for Sapphire Rapids, Power10, Neoverse V2 (#40917)
|
||||
* Propagate variants across nodes that don't have that variant (#38512)
|
||||
* Implement fish completion (#29549)
|
||||
* Can now distinguish between source/binary mirror; don't ping mirror.spack.io as much (#34523)
|
||||
* Improve status reporting on install (add [n/total] display) (#37903)
|
||||
|
||||
## Windows
|
||||
|
||||
This release has the best Windows support of any Spack release yet, with numerous
|
||||
improvements and much larger swaths of tests passing:
|
||||
|
||||
* MSVC and SDK improvements (#37711, #37930, #38500, #39823, #39180)
|
||||
* Windows external finding: update default paths; treat .bat as executable on Windows (#39850)
|
||||
* Windows decompression: fix removal of intermediate file (#38958)
|
||||
* Windows: executable/path handling (#37762)
|
||||
* Windows build systems: use ninja and enable tests (#33589)
|
||||
* Windows testing (#36970, #36972, #36973, #36840, #36977, #36792, #36834, #34696, #36971)
|
||||
* Windows PowerShell support (#39118, #37951)
|
||||
* Windows symlinking and libraries (#39933, #38599, #34701, #38578, #34701)
|
||||
|
||||
## Notable refactors
|
||||
* User-specified flags take precedence over others in Spack compiler wrappers (#37376)
|
||||
* Improve setup of build, run, and test environments (#35737, #40916)
|
||||
* `make` is no longer a required system dependency of Spack (#40380)
|
||||
* Support Python 3.12 (#40404, #40155, #40153)
|
||||
* docs: Replace package list with packages.spack.io (#40251)
|
||||
* Drop Python 2 constructs in Spack (#38720, #38718, #38703)
|
||||
|
||||
## Binary cache and stack updates
|
||||
* e4s arm stack: duplicate and target neoverse v1 (#40369)
|
||||
* Add macOS ML CI stacks (#36586)
|
||||
* E4S Cray CI Stack (#37837)
|
||||
* e4s cray: expand spec list (#38947)
|
||||
* e4s cray sles ci: expand spec list (#39081)
|
||||
|
||||
## Removals, deprecations, and syntax changes
|
||||
* ASP: targets, compilers and providers soft-preferences are only global (#31261)
|
||||
* Parser: fix ambiguity with whitespace in version ranges (#40344)
|
||||
* Module file generation is disabled by default; you'll need to enable it to use it (#37258)
|
||||
* Remove deprecated "extra_instructions" option for containers (#40365)
|
||||
* Stand-alone test feature deprecation postponed to v0.22 (#40600)
|
||||
* buildcache push: make `--allow-root` the default and deprecate the option (#38878)
|
||||
|
||||
## Notable Bugfixes
|
||||
* Bugfix: propagation of multivalued variants (#39833)
|
||||
* Allow `/` in git versions (#39398)
|
||||
* Fetch & patch: actually acquire stage lock, and many more issues (#38903)
|
||||
* Environment/depfile: better escaping of targets with Git versions (#37560)
|
||||
* Prevent "spack external find" to error out on wrong permissions (#38755)
|
||||
* lmod: allow core compiler to be specified with a version range (#37789)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 7,469 total packages, 303 new since `v0.20.0`
|
||||
* 150 new Python packages
|
||||
* 34 new R packages
|
||||
* 353 people contributed to this release
|
||||
* 336 committers to packages
|
||||
* 65 committers to core
|
||||
|
||||
|
||||
# v0.20.3 (2023-10-31)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Fix a bug where `spack mirror set-url` would drop configured connection info (reverts #34210)
|
||||
- Fix a minor issue with package hash computation for Python 3.12 (#40328)
|
||||
|
||||
|
||||
# v0.20.2 (2023-10-03)
|
||||
|
||||
## Features in this release
|
||||
|
||||
Spack now supports Python 3.12 (#40155)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Improve escaping in Tcl module files (#38375)
|
||||
- Make repo cache work on repositories with zero mtime (#39214)
|
||||
- Ignore errors for newer, incompatible buildcache version (#40279)
|
||||
- Print an error when git is required, but missing (#40254)
|
||||
- Ensure missing build dependencies get installed when using `spack install --overwrite` (#40252)
|
||||
- Fix an issue where Spack freezes when the build process unexpectedly exits (#39015)
|
||||
- Fix a bug where installation failures cause an unrelated `NameError` to be thrown (#39017)
|
||||
- Fix an issue where Spack package versions would be incorrectly derived from git tags (#39414)
|
||||
- Fix a bug triggered when file locking fails internally (#39188)
|
||||
- Prevent "spack external find" to error out when a directory cannot be accessed (#38755)
|
||||
- Fix multiple performance regressions in environments (#38771)
|
||||
- Add more ignored modules to `pyproject.toml` for `mypy` (#38769)
|
||||
|
||||
|
||||
# v0.20.1 (2023-07-10)
|
||||
|
||||
## Spack Bugfixes
|
||||
|
||||
- Spec removed from an environment where not actually removed if `--force` was not given (#37877)
|
||||
- Speed-up module file generation (#37739)
|
||||
- Hotfix for a few recipes that treat CMake as a link dependency (#35816)
|
||||
- Fix re-running stand-alone test a second time, which was getting a trailing spurious failure (#37840)
|
||||
- Fixed reading JSON manifest on Cray, reporting non-concrete specs (#37909)
|
||||
- Fixed a few bugs when generating Dockerfiles from Spack (#37766,#37769)
|
||||
- Fixed a few long-standing bugs when generating module files (#36678,#38347,#38465,#38455)
|
||||
- Fixed issues with building Python extensions using an external Python (#38186)
|
||||
- Fixed compiler removal from command line (#38057)
|
||||
- Show external status as [e] (#33792)
|
||||
- Backported `archspec` fixes (#37793)
|
||||
- Improved a few error messages (#37791)
|
||||
|
||||
|
||||
# v0.20.0 (2023-05-21)
|
||||
|
||||
`v0.20.0` is a major feature release.
|
||||
|
54
CITATION.cff
54
CITATION.cff
@@ -27,53 +27,12 @@
|
||||
# And here's the CITATION.cff format:
|
||||
#
|
||||
cff-version: 1.2.0
|
||||
type: software
|
||||
message: "If you are referencing Spack in a publication, please cite the paper below."
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
abstract: >-
|
||||
Large HPC centers spend considerable time supporting software for thousands of users, but the complexity of HPC software is quickly outpacing the capabilities of existing software management tools.
|
||||
Scientific applications require specific versions of compilers, MPI, and other dependency libraries, so using a single, standard software stack is infeasible.
|
||||
However, managing many configurations is difficult because the configuration space is combinatorial in size.
|
||||
We introduce Spack, a tool used at Lawrence Livermore National Laboratory to manage this complexity.
|
||||
Spack provides a novel, re- cursive specification syntax to invoke parametric builds of packages and dependencies.
|
||||
It allows any number of builds to coexist on the same system, and it ensures that installed packages can find their dependencies, regardless of the environment.
|
||||
We show through real-world use cases that Spack supports diverse and demanding applications, bringing order to HPC software chaos.
|
||||
preferred-citation:
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
type: conference-paper
|
||||
url: "https://tgamblin.github.io/pubs/spack-sc15.pdf"
|
||||
doi: "10.1145/2807591.2807623"
|
||||
url: "https://github.com/spack/spack"
|
||||
authors:
|
||||
- family-names: "Gamblin"
|
||||
given-names: "Todd"
|
||||
- family-names: "LeGendre"
|
||||
given-names: "Matthew"
|
||||
- family-names: "Collette"
|
||||
given-names: "Michael R."
|
||||
- family-names: "Lee"
|
||||
given-names: "Gregory L."
|
||||
- family-names: "Moody"
|
||||
given-names: "Adam"
|
||||
- family-names: "de Supinski"
|
||||
given-names: "Bronis R."
|
||||
- family-names: "Futral"
|
||||
given-names: "Scott"
|
||||
conference:
|
||||
name: "Supercomputing 2015 (SC’15)"
|
||||
city: "Austin"
|
||||
region: "Texas"
|
||||
country: "US"
|
||||
date-start: 2015-11-15
|
||||
date-end: 2015-11-20
|
||||
month: 11
|
||||
year: 2015
|
||||
identifiers:
|
||||
- description: "The concept DOI of the work."
|
||||
type: doi
|
||||
value: 10.1145/2807591.2807623
|
||||
- description: "The DOE Document Release Number of the work"
|
||||
type: other
|
||||
value: "LLNL-CONF-669890"
|
||||
authors:
|
||||
- family-names: "Gamblin"
|
||||
given-names: "Todd"
|
||||
- family-names: "LeGendre"
|
||||
@@ -88,3 +47,12 @@ authors:
|
||||
given-names: "Bronis R."
|
||||
- family-names: "Futral"
|
||||
given-names: "Scott"
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
conference:
|
||||
name: "Supercomputing 2015 (SC’15)"
|
||||
city: "Austin"
|
||||
region: "Texas"
|
||||
country: "USA"
|
||||
month: November 15-20
|
||||
year: 2015
|
||||
notes: LLNL-CONF-669890
|
||||
|
@@ -7,7 +7,6 @@
|
||||
[](https://spack.readthedocs.io)
|
||||
[](https://github.com/psf/black)
|
||||
[](https://slack.spack.io)
|
||||
[](https://matrix.to/#/#spack-space:matrix.org)
|
||||
|
||||
Spack is a multi-platform package manager that builds and installs
|
||||
multiple versions and configurations of software. It works on Linux,
|
||||
@@ -63,10 +62,7 @@ Resources:
|
||||
|
||||
* **Slack workspace**: [spackpm.slack.com](https://spackpm.slack.com).
|
||||
To get an invitation, visit [slack.spack.io](https://slack.spack.io).
|
||||
* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org):
|
||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||
not just for discussions, but also Q&A.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions): not just for discussions, also Q&A.
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
|
32
SECURITY.md
32
SECURITY.md
@@ -2,26 +2,24 @@
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We provide security updates for `develop` and for the last two
|
||||
stable (`0.x`) release series of Spack. Security updates will be
|
||||
made available as patch (`0.x.1`, `0.x.2`, etc.) releases.
|
||||
|
||||
We provide security updates for the following releases.
|
||||
For more on Spack's release structure, see
|
||||
[`README.md`](https://github.com/spack/spack#releases).
|
||||
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| develop | :white_check_mark: |
|
||||
| 0.19.x | :white_check_mark: |
|
||||
| 0.18.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
You can report a vulnerability using GitHub's private reporting
|
||||
feature:
|
||||
To report a vulnerability or other security
|
||||
issue, email maintainers@spack.io.
|
||||
|
||||
1. Go to [github.com/spack/spack/security](https://github.com/spack/spack/security).
|
||||
2. Click "Report a vulnerability" in the upper right corner of that page.
|
||||
3. Fill out the form and submit your draft security advisory.
|
||||
|
||||
More details are available in
|
||||
[GitHub's docs](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability).
|
||||
|
||||
You can expect to hear back about security issues within two days.
|
||||
If your security issue is accepted, we will do our best to release
|
||||
a fix within a week. If fixing the issue will take longer than
|
||||
this, we will discuss timeline options with you.
|
||||
You can expect to hear back within two days.
|
||||
If your security issue is accepted, we will do
|
||||
our best to release a fix within a week. If
|
||||
fixing the issue will take longer than this,
|
||||
we will discuss timeline options with you.
|
||||
|
@@ -25,6 +25,8 @@ exit 1
|
||||
# Line above is a shell no-op, and ends a python multi-line comment.
|
||||
# The code above runs this file with our preferred python interpreter.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
@@ -14,7 +14,7 @@
|
||||
::
|
||||
@echo off
|
||||
|
||||
set spack="%SPACK_ROOT%"\bin\spack
|
||||
set spack=%SPACK_ROOT%\bin\spack
|
||||
|
||||
::#######################################################################
|
||||
:: This is a wrapper around the spack command that forwards calls to
|
||||
@@ -51,43 +51,65 @@ setlocal enabledelayedexpansion
|
||||
:: subcommands will never start with '-'
|
||||
:: everything after the subcommand is an arg
|
||||
|
||||
|
||||
:: we cannot allow batch "for" loop to directly process CL args
|
||||
:: a number of batch reserved characters are commonly passed to
|
||||
:: spack and allowing batch's "for" method to process the raw inputs
|
||||
:: results in a large number of formatting issues
|
||||
:: instead, treat the entire CLI as one string
|
||||
:: and split by space manually
|
||||
:: capture cl args in variable named cl_args
|
||||
set cl_args=%*
|
||||
:process_cl_args
|
||||
rem Set first cl argument (denoted by %1) to be processed
|
||||
set t=%1
|
||||
rem shift moves all cl positional arguments left by one
|
||||
rem meaning %2 is now %1, this allows us to iterate over each
|
||||
rem argument
|
||||
shift
|
||||
rem assign next "first" cl argument to cl_args, will be null when
|
||||
rem there are now further arguments to process
|
||||
set cl_args=%1
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
rem tokens=1* returns the first processed token produced
|
||||
rem by tokenizing the input string cl_args on spaces into
|
||||
rem the named variable %%g
|
||||
rem While this make look like a for loop, it only
|
||||
rem executes a single time for each of the cl args
|
||||
rem the actual iterative loop is performed by the
|
||||
rem goto process_cl_args stanza
|
||||
rem we are simply leveraging the "for" method's string
|
||||
rem tokenization
|
||||
for /f "tokens=1*" %%g in ("%cl_args%") do (
|
||||
set t=%%~g
|
||||
rem remainder of string is composed into %%h
|
||||
rem these are the cl args yet to be processed
|
||||
rem assign cl_args var to only the args to be processed
|
||||
rem effectively discarding the current arg %%g
|
||||
rem this will be nul when we have no further tokens to process
|
||||
set cl_args=%%h
|
||||
rem process the first space delineated cl arg
|
||||
rem of this iteration
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
shift
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
shift
|
||||
) else (
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
shift
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
) else (
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
)
|
||||
|
||||
rem if this is not nu;ll, we have more tokens to process
|
||||
rem if this is not nil, we have more tokens to process
|
||||
rem start above process again with remaining unprocessed cl args
|
||||
if defined cl_args goto :process_cl_args
|
||||
|
||||
@@ -192,7 +214,7 @@ goto :end_switch
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
|
146
bin/spack.ps1
146
bin/spack.ps1
@@ -1,146 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
# #######################################################################
|
||||
|
||||
function Compare-CommonArgs {
|
||||
$CMDArgs = $args[0]
|
||||
# These aruments take precedence and call for no futher parsing of arguments
|
||||
# invoke actual Spack entrypoint with that context and exit after
|
||||
"--help", "-h", "--version", "-V" | ForEach-Object {
|
||||
$arg_opt = $_
|
||||
if(($CMDArgs) -and ([bool]($CMDArgs.Where({$_ -eq $arg_opt})))) {
|
||||
return $true
|
||||
}
|
||||
}
|
||||
return $false
|
||||
}
|
||||
|
||||
function Read-SpackArgs {
|
||||
$SpackCMD_params = @()
|
||||
$SpackSubCommand = $NULL
|
||||
$SpackSubCommandArgs = @()
|
||||
$args_ = $args[0]
|
||||
$args_ | ForEach-Object {
|
||||
if (!$SpackSubCommand) {
|
||||
if($_.SubString(0,1) -eq "-")
|
||||
{
|
||||
$SpackCMD_params += $_
|
||||
}
|
||||
else{
|
||||
$SpackSubCommand = $_
|
||||
}
|
||||
}
|
||||
else{
|
||||
$SpackSubCommandArgs += $_
|
||||
}
|
||||
}
|
||||
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
|
||||
}
|
||||
|
||||
function Set-SpackEnv {
|
||||
# This method is responsible
|
||||
# for processing the return from $(spack <command>)
|
||||
# which are returned as System.Object[]'s containing
|
||||
# a list of env commands
|
||||
# Invoke-Expression can only handle one command at a time
|
||||
# so we iterate over the list to invoke the env modification
|
||||
# expressions one at a time
|
||||
foreach($envop in $args[0]){
|
||||
Invoke-Expression $envop
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function Invoke-SpackCD {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" cd -h
|
||||
}
|
||||
else {
|
||||
$LOC = $(python "$Env:SPACK_ROOT/bin/spack" location $SpackSubCommandArgs)
|
||||
if (($NULL -ne $LOC)){
|
||||
if ( Test-Path -Path $LOC){
|
||||
Set-Location $LOC
|
||||
}
|
||||
else{
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
else {
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Invoke-SpackEnv {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs[0]) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" env -h
|
||||
}
|
||||
else {
|
||||
$SubCommandSubCommand = $SpackSubCommandArgs[0]
|
||||
$SubCommandSubCommandArgs = $SpackSubCommandArgs[1..$SpackSubCommandArgs.Count]
|
||||
switch ($SubCommandSubCommand) {
|
||||
"activate" {
|
||||
if (Compare-CommonArgs $SubCommandSubCommandArgs) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif (!$SubCommandSubCommandArgs) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
"deactivate" {
|
||||
if ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python"$Env:SPACK_ROOT/bin/spack" env deactivate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif($SubCommandSubCommandArgs) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" env deactivate -h
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env deactivate "--pwsh")
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Invoke-SpackLoad {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SpackSubCommandArgs.Where({($_ -eq "--pwsh") -or ($_ -eq "--list")}))) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
$SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs = Read-SpackArgs $args
|
||||
|
||||
if (Compare-CommonArgs $SpackCMD_params) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
exit $LASTEXITCODE
|
||||
}
|
||||
|
||||
# Process Spack commands with special conditions
|
||||
# all other commands are piped directly to Spack
|
||||
switch($SpackSubCommand)
|
||||
{
|
||||
"cd" {Invoke-SpackCD}
|
||||
"env" {Invoke-SpackEnv}
|
||||
"load" {Invoke-SpackLoad}
|
||||
"unload" {Invoke-SpackLoad}
|
||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
@@ -9,15 +9,15 @@ bootstrap:
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions-v0.5'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.5
|
||||
- name: 'github-actions-v0.4'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.4
|
||||
- name: 'github-actions-v0.3'
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.3
|
||||
- name: 'spack-install'
|
||||
metadata: $spack/share/spack/bootstrap/spack-install
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
github-actions-v0.5: true
|
||||
github-actions-v0.4: true
|
||||
github-actions-v0.3: true
|
||||
spack-install: true
|
||||
|
@@ -36,9 +36,3 @@ concretizer:
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: true
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
|
@@ -216,11 +216,10 @@ config:
|
||||
# manipulation by unprivileged user (e.g. AFS)
|
||||
allow_sgid: true
|
||||
|
||||
# Whether to show status information during building and installing packages.
|
||||
# This gives information about Spack's current progress as well as the current
|
||||
# and total number of packages. Information is shown both in the terminal
|
||||
# title and inline.
|
||||
install_status: true
|
||||
# Whether to set the terminal title to display status information during
|
||||
# building and installing packages. This gives information about Spack's
|
||||
# current progress as well as the current and total number of packages.
|
||||
terminal_title: false
|
||||
|
||||
# Number of seconds a buildcache's index.json is cached locally before probing
|
||||
# for updates, within a single Spack invocation. Defaults to 10 minutes.
|
||||
@@ -229,11 +228,3 @@ config:
|
||||
flags:
|
||||
# Whether to keep -Werror flags active in package builds.
|
||||
keep_werror: 'none'
|
||||
|
||||
# A mapping of aliases that can be used to define new commands. For instance,
|
||||
# `sp: spec -I` will define a new command `sp` that will execute `spec` with
|
||||
# the `-I` argument. Aliases cannot override existing commands.
|
||||
aliases:
|
||||
concretise: concretize
|
||||
containerise: containerize
|
||||
rm: remove
|
||||
|
@@ -50,4 +50,4 @@ packages:
|
||||
# Apple bundles libuuid in libsystem_c version 1353.100.2,
|
||||
# although the version number used here isn't critical
|
||||
- spec: apple-libuuid@1353.100.2
|
||||
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
|
||||
prefix: /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk
|
||||
|
@@ -1,4 +1,2 @@
|
||||
mirrors:
|
||||
spack-public:
|
||||
binary: false
|
||||
url: https://mirror.spack.io
|
||||
spack-public: https://mirror.spack.io
|
||||
|
@@ -49,7 +49,6 @@ packages:
|
||||
pbs: [openpbs, torque]
|
||||
pil: [py-pillow]
|
||||
pkgconfig: [pkgconf, pkg-config]
|
||||
qmake: [qt-base, qt]
|
||||
rpc: [libtirpc]
|
||||
scalapack: [netlib-scalapack, amdscalapack]
|
||||
sycl: [hipsycl]
|
||||
@@ -60,7 +59,6 @@ packages:
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
zlib-api: [zlib-ng+compat, zlib]
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
|
1
lib/spack/docs/.gitignore
vendored
1
lib/spack/docs/.gitignore
vendored
@@ -1,3 +1,4 @@
|
||||
package_list.html
|
||||
command_index.rst
|
||||
spack*.rst
|
||||
llnl*.rst
|
||||
|
@@ -1,16 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
# We use our own extension of the default style with a few modifications
|
||||
from pygments.styles.default import DefaultStyle
|
||||
from pygments.token import Generic
|
||||
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
background_color = "#f4f4f8"
|
||||
styles[Generic.Output] = "#355"
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
@@ -45,8 +45,7 @@ Listing available packages
|
||||
|
||||
To install software with Spack, you need to know what software is
|
||||
available. You can see a list of available package names at the
|
||||
`packages.spack.io <https://packages.spack.io>`_ website, or
|
||||
using the ``spack list`` command.
|
||||
:ref:`package-list` webpage, or using the ``spack list`` command.
|
||||
|
||||
.. _cmd-spack-list:
|
||||
|
||||
@@ -61,7 +60,7 @@ can install:
|
||||
:ellipsis: 10
|
||||
|
||||
There are thousands of them, so we've truncated the output above, but you
|
||||
can find a `full list here <https://packages.spack.io>`_.
|
||||
can find a :ref:`full list here <package-list>`.
|
||||
Packages are listed by name in alphabetical order.
|
||||
A pattern to match with no wildcards, ``*`` or ``?``,
|
||||
will be treated as though it started and ended with
|
||||
@@ -1526,30 +1525,6 @@ any MPI implementation will do. If another package depends on
|
||||
error. Likewise, if you try to plug in some package that doesn't
|
||||
provide MPI, Spack will raise an error.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Explicit binding of virtual dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
There are packages that provide more than just one virtual dependency. When interacting with them, users
|
||||
might want to utilize just a subset of what they could provide, and use other providers for virtuals they
|
||||
need.
|
||||
|
||||
It is possible to be more explicit and tell Spack which dependency should provide which virtual, using a
|
||||
special syntax:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec strumpack ^[virtuals=mpi] intel-parallel-studio+mkl ^[virtuals=lapack] openblas
|
||||
|
||||
Concretizing the spec above produces the following DAG:
|
||||
|
||||
.. figure:: images/strumpack_virtuals.svg
|
||||
:scale: 60 %
|
||||
:align: center
|
||||
|
||||
where ``intel-parallel-studio`` *could* provide ``mpi``, ``lapack``, and ``blas`` but is used only for the former. The ``lapack``
|
||||
and ``blas`` dependencies are satisfied by ``openblas``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Specifying Specs by Hash
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -48,10 +48,14 @@ Here is an example where a build cache is created in a local directory named
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache push ./spack-cache ninja
|
||||
$ spack buildcache push --allow-root ./spack-cache ninja
|
||||
==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache
|
||||
|
||||
Note that ``ninja`` must be installed locally for this to work.
|
||||
Not that ``ninja`` must be installed locally for this to work.
|
||||
|
||||
We're using the ``--allow-root`` flag to tell Spack that is OK when any of
|
||||
the binaries we're pushing contain references to the local Spack install
|
||||
directory.
|
||||
|
||||
Once you have a build cache, you can add it as a mirror, discussed next.
|
||||
|
||||
@@ -143,7 +147,7 @@ and then install from it exclusively, you would do:
|
||||
|
||||
$ spack mirror add E4S https://cache.e4s.io
|
||||
$ spack buildcache keys --install --trust
|
||||
$ spack install --use-buildcache only <package>
|
||||
$ spack install --use-buildache only <package>
|
||||
|
||||
We use ``--install`` and ``--trust`` to say that we are installing keys to our
|
||||
keyring, and trusting all downloaded keys.
|
||||
@@ -155,183 +159,16 @@ List of popular build caches
|
||||
|
||||
* `Extreme-scale Scientific Software Stack (E4S) <https://e4s-project.github.io/>`_: `build cache <https://oaciss.uoregon.edu/e4s/inventory.html>`_
|
||||
|
||||
|
||||
----------
|
||||
Relocation
|
||||
----------
|
||||
|
||||
When using buildcaches across different machines, it is likely that the install
|
||||
root will be different from the one used to build the binaries.
|
||||
|
||||
To address this issue, Spack automatically relocates all paths encoded in binaries
|
||||
and scripts to their new location upon install.
|
||||
|
||||
Note that there are some cases where this is not possible: if binaries are built in
|
||||
a relatively short path, and then installed to a longer path, there may not be enough
|
||||
space in the binary to encode the new path. In this case, Spack will fail to install
|
||||
the package from the build cache, and a source build is required.
|
||||
|
||||
To reduce the likelihood of this happening, it is highly recommended to add padding to
|
||||
the install root during the build, as specified in the :ref:`config <config-yaml>`
|
||||
section of the configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
config:
|
||||
install_tree:
|
||||
root: /opt/spack
|
||||
padded_length: 128
|
||||
|
||||
|
||||
.. _binary_caches_oci:
|
||||
|
||||
-----------------------------------------
|
||||
OCI / Docker V2 registries as build cache
|
||||
-----------------------------------------
|
||||
|
||||
Spack can also use OCI or Docker V2 registries such as Dockerhub, Quay.io,
|
||||
Github Packages, GitLab Container Registry, JFrog Artifactory, and others
|
||||
as build caches. This is a convenient way to share binaries using public
|
||||
infrastructure, or to cache Spack built binaries in Github Actions and
|
||||
GitLab CI.
|
||||
|
||||
To get started, configure an OCI mirror using ``oci://`` as the scheme,
|
||||
and optionally specify a username and password (or personal access token):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --oci-username username --oci-password password my_registry oci://example.com/my_image
|
||||
|
||||
Spack follows the naming conventions of Docker, with Dockerhub as the default
|
||||
registry. To use Dockerhub, you can omit the registry domain:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --oci-username username --oci-password password my_registry oci://username/my_image
|
||||
|
||||
From here, you can use the mirror as any other build cache:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache push my_registry <specs...> # push to the registry
|
||||
$ spack install <specs...> # install from the registry
|
||||
|
||||
A unique feature of buildcaches on top of OCI registries is that it's incredibly
|
||||
easy to generate get a runnable container image with the binaries installed. This
|
||||
is a great way to make applications available to users without requiring them to
|
||||
install Spack -- all you need is Docker, Podman or any other OCI-compatible container
|
||||
runtime.
|
||||
|
||||
To produce container images, all you need to do is add the ``--base-image`` flag
|
||||
when pushing to the build cache:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache push --base-image ubuntu:20.04 my_registry ninja
|
||||
Pushed to example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
|
||||
|
||||
$ docker run -it example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
|
||||
root@e4c2b6f6b3f4:/# ninja --version
|
||||
1.11.1
|
||||
|
||||
If ``--base-image`` is not specified, distroless images are produced. In practice,
|
||||
you won't be able to run these as containers, since they don't come with libc and
|
||||
other system dependencies. However, they are still compatible with tools like
|
||||
``skopeo``, ``podman``, and ``docker`` for pulling and pushing.
|
||||
|
||||
.. note::
|
||||
The docker ``overlayfs2`` storage driver is limited to 128 layers, above which a
|
||||
``max depth exceeded`` error may be produced when pulling the image. There
|
||||
are `alternative drivers <https://docs.docker.com/storage/storagedriver/>`_.
|
||||
|
||||
------------------------------------
|
||||
Spack build cache for GitHub Actions
|
||||
------------------------------------
|
||||
|
||||
To significantly speed up Spack in GitHub Actions, binaries can be cached in
|
||||
GitHub Packages. This service is an OCI registry that can be linked to a GitHub
|
||||
repository.
|
||||
|
||||
A typical workflow is to include a ``spack.yaml`` environment in your repository
|
||||
that specifies the packages to install, the target architecture, and the build
|
||||
cache to use under ``mirrors``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- python@3.11
|
||||
config:
|
||||
install_tree:
|
||||
root: /opt/spack
|
||||
padded_length: 128
|
||||
packages:
|
||||
all:
|
||||
require: target=x86_64_v2
|
||||
mirrors:
|
||||
local-buildcache: oci://ghcr.io/<organization>/<repository>
|
||||
|
||||
A GitHub action can then be used to install the packages and push them to the
|
||||
build cache:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
name: Install Spack packages
|
||||
|
||||
on: push
|
||||
|
||||
env:
|
||||
SPACK_COLOR: always
|
||||
|
||||
jobs:
|
||||
example:
|
||||
runs-on: ubuntu-22.04
|
||||
permissions:
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Checkout Spack
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: spack/spack
|
||||
path: spack
|
||||
|
||||
- name: Setup Spack
|
||||
run: echo "$PWD/spack/bin" >> "$GITHUB_PATH"
|
||||
|
||||
- name: Concretize
|
||||
run: spack -e . concretize
|
||||
|
||||
- name: Install
|
||||
run: spack -e . install --no-check-signature
|
||||
|
||||
- name: Run tests
|
||||
run: ./my_view/bin/python3 -c 'print("hello world")'
|
||||
|
||||
- name: Push to buildcache
|
||||
run: |
|
||||
spack -e . mirror set --oci-username ${{ github.actor }} --oci-password "${{ secrets.GITHUB_TOKEN }}" local-buildcache
|
||||
spack -e . buildcache push --base-image ubuntu:22.04 --unsigned --update-index local-buildcache
|
||||
if: ${{ !cancelled() }}
|
||||
|
||||
The first time this action runs, it will build the packages from source and
|
||||
push them to the build cache. Subsequent runs will pull the binaries from the
|
||||
build cache. The concretizer will ensure that prebuilt binaries are favored
|
||||
over source builds.
|
||||
|
||||
The build cache entries appear in the GitHub Packages section of your repository,
|
||||
and contain instructions for pulling and running them with ``docker`` or ``podman``.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Spack's public build cache for GitHub Actions
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack offers a public build cache for GitHub Actions with a set of common packages,
|
||||
which lets you get started quickly. See the following resources for more information:
|
||||
|
||||
* `spack/github-actions-buildcache <https://github.com/spack/github-actions-buildcache>`_
|
||||
Initial build and later installation do not necessarily happen at the same
|
||||
location. Spack provides a relocation capability and corrects for RPATHs and
|
||||
non-relocatable scripts. However, many packages compile paths into binary
|
||||
artifacts directly. In such cases, the build instructions of this package would
|
||||
need to be adjusted for better re-locatability.
|
||||
|
||||
.. _cmd-spack-buildcache:
|
||||
|
||||
|
@@ -32,14 +32,9 @@ can't be found. You can readily check if any prerequisite for using Spack is mis
|
||||
|
||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||
|
||||
% echo $?
|
||||
1
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. The return code of this command summarizes the results, if any
|
||||
dependencies are missing the return code is ``1``, otherwise ``0``. Running a command that
|
||||
concretizes a spec, like:
|
||||
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -49,7 +44,7 @@ concretizes a spec, like:
|
||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||
[ ... ]
|
||||
|
||||
automatically triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
Users can also bootstrap all the dependencies needed by Spack in a single command, which
|
||||
might be useful to setup containers or other similar environments:
|
||||
|
@@ -3,23 +3,251 @@
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _build-settings:
|
||||
|
||||
================================
|
||||
Package Settings (packages.yaml)
|
||||
================================
|
||||
|
||||
Spack allows you to customize how your software is built through the
|
||||
``packages.yaml`` file. Using it, you can make Spack prefer particular
|
||||
implementations of virtual dependencies (e.g., MPI or BLAS/LAPACK),
|
||||
or you can make it prefer to build with particular compilers. You can
|
||||
also tell Spack to use *external* software installations already
|
||||
present on your system.
|
||||
|
||||
At a high level, the ``packages.yaml`` file is structured like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
package1:
|
||||
# settings for package1
|
||||
package2:
|
||||
# settings for package2
|
||||
# ...
|
||||
all:
|
||||
# settings that apply to all packages.
|
||||
|
||||
So you can either set build preferences specifically for *one* package,
|
||||
or you can specify that certain settings should apply to *all* packages.
|
||||
The types of settings you can customize are described in detail below.
|
||||
|
||||
Spack's build defaults are in the default
|
||||
``etc/spack/defaults/packages.yaml`` file. You can override them in
|
||||
``~/.spack/packages.yaml`` or ``etc/spack/packages.yaml``. For more
|
||||
details on how this works, see :ref:`configuration-scopes`.
|
||||
|
||||
.. _sec-external-packages:
|
||||
|
||||
-----------------
|
||||
External Packages
|
||||
-----------------
|
||||
|
||||
Spack can be configured to use externally-installed
|
||||
packages rather than building its own packages. This may be desirable
|
||||
if machines ship with system packages, such as a customized MPI
|
||||
that should be used instead of Spack building its own MPI.
|
||||
|
||||
External packages are configured through the ``packages.yaml`` file.
|
||||
Here's an example of an external configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
externals:
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.4.3
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
|
||||
prefix: /opt/openmpi-1.4.3-debug
|
||||
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.6.5-intel
|
||||
|
||||
This example lists three installations of OpenMPI, one built with GCC,
|
||||
one built with GCC and debug information, and another built with Intel.
|
||||
If Spack is asked to build a package that uses one of these MPIs as a
|
||||
dependency, it will use the pre-installed OpenMPI in
|
||||
the given directory. Note that the specified path is the top-level
|
||||
install prefix, not the ``bin`` subdirectory.
|
||||
|
||||
``packages.yaml`` can also be used to specify modules to load instead
|
||||
of the installation prefixes. The following example says that module
|
||||
``CMake/3.7.2`` provides cmake version 3.7.2.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
cmake:
|
||||
externals:
|
||||
- spec: cmake@3.7.2
|
||||
modules:
|
||||
- CMake/3.7.2
|
||||
|
||||
Each ``packages.yaml`` begins with a ``packages:`` attribute, followed
|
||||
by a list of package names. To specify externals, add an ``externals:``
|
||||
attribute under the package name, which lists externals.
|
||||
Each external should specify a ``spec:`` string that should be as
|
||||
well-defined as reasonably possible. If a
|
||||
package lacks a spec component, such as missing a compiler or
|
||||
package version, then Spack will guess the missing component based
|
||||
on its most-favored packages, and it may guess incorrectly.
|
||||
|
||||
Each package version and compiler listed in an external should
|
||||
have entries in Spack's packages and compiler configuration, even
|
||||
though the package and compiler may not ever be built.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Prevent packages from being built from sources
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Adding an external spec in ``packages.yaml`` allows Spack to use an external location,
|
||||
but it does not prevent Spack from building packages from sources. In the above example,
|
||||
Spack might choose for many valid reasons to start building and linking with the
|
||||
latest version of OpenMPI rather than continue using the pre-installed OpenMPI versions.
|
||||
|
||||
To prevent this, the ``packages.yaml`` configuration also allows packages
|
||||
to be flagged as non-buildable. The previous example could be modified to
|
||||
be:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
externals:
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.4.3
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
|
||||
prefix: /opt/openmpi-1.4.3-debug
|
||||
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.6.5-intel
|
||||
buildable: False
|
||||
|
||||
The addition of the ``buildable`` flag tells Spack that it should never build
|
||||
its own version of OpenMPI from sources, and it will instead always rely on a pre-built
|
||||
OpenMPI.
|
||||
|
||||
.. note::
|
||||
|
||||
If ``concretizer:reuse`` is on (see :ref:`concretizer-options` for more information on that flag)
|
||||
pre-built specs include specs already available from a local store, an upstream store, a registered
|
||||
buildcache or specs marked as externals in ``packages.yaml``. If ``concretizer:reuse`` is off, only
|
||||
external specs in ``packages.yaml`` are included in the list of pre-built specs.
|
||||
|
||||
If an external module is specified as not buildable, then Spack will load the
|
||||
external module into the build environment which can be used for linking.
|
||||
|
||||
The ``buildable`` does not need to be paired with external packages.
|
||||
It could also be used alone to forbid packages that may be
|
||||
buggy or otherwise undesirable.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Non-buildable virtual packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Virtual packages in Spack can also be specified as not buildable, and
|
||||
external implementations can be provided. In the example above,
|
||||
OpenMPI is configured as not buildable, but Spack will often prefer
|
||||
other MPI implementations over the externally available OpenMPI. Spack
|
||||
can be configured with every MPI provider not buildable individually,
|
||||
but more conveniently:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
buildable: False
|
||||
openmpi:
|
||||
externals:
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.4.3
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
|
||||
prefix: /opt/openmpi-1.4.3-debug
|
||||
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.6.5-intel
|
||||
|
||||
Spack can then use any of the listed external implementations of MPI
|
||||
to satisfy a dependency, and will choose depending on the compiler and
|
||||
architecture.
|
||||
|
||||
In cases where the concretizer is configured to reuse specs, and other ``mpi`` providers
|
||||
(available via stores or buildcaches) are not wanted, Spack can be configured to require
|
||||
specs matching only the available externals:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
buildable: False
|
||||
require:
|
||||
- one_of: [
|
||||
"openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64",
|
||||
"openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug",
|
||||
"openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
]
|
||||
openmpi:
|
||||
externals:
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.4.3
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
|
||||
prefix: /opt/openmpi-1.4.3-debug
|
||||
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.6.5-intel
|
||||
|
||||
This configuration prevents any spec using MPI and originating from stores or buildcaches to be reused,
|
||||
unless it matches the requirements under ``packages:mpi:require``. For more information on requirements see
|
||||
:ref:`package-requirements`.
|
||||
|
||||
.. _cmd-spack-external-find:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Automatically Find External Packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can run the :ref:`spack external find <spack-external-find>` command
|
||||
to search for system-provided packages and add them to ``packages.yaml``.
|
||||
After running this command your ``packages.yaml`` may include new entries:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
cmake:
|
||||
externals:
|
||||
- spec: cmake@3.17.2
|
||||
prefix: /usr
|
||||
|
||||
Generally this is useful for detecting a small set of commonly-used packages;
|
||||
for now this is generally limited to finding build-only dependencies.
|
||||
Specific limitations include:
|
||||
|
||||
* Packages are not discoverable by default: For a package to be
|
||||
discoverable with ``spack external find``, it needs to add special
|
||||
logic. See :ref:`here <make-package-findable>` for more details.
|
||||
* The logic does not search through module files, it can only detect
|
||||
packages with executables defined in ``PATH``; you can help Spack locate
|
||||
externals which use module files by loading any associated modules for
|
||||
packages that you want Spack to know about before running
|
||||
``spack external find``.
|
||||
* Spack does not overwrite existing entries in the package configuration:
|
||||
If there is an external defined for a spec at any configuration scope,
|
||||
then Spack will not add a new external entry (``spack config blame packages``
|
||||
can help locate all external entries).
|
||||
|
||||
.. _concretizer-options:
|
||||
|
||||
==========================================
|
||||
Concretization Settings (concretizer.yaml)
|
||||
==========================================
|
||||
----------------------
|
||||
Concretizer options
|
||||
----------------------
|
||||
|
||||
The ``concretizer.yaml`` configuration file allows to customize aspects of the
|
||||
algorithm used to select the dependencies you install. The default configuration
|
||||
is the following:
|
||||
``packages.yaml`` gives the concretizer preferences for specific packages,
|
||||
but you can also use ``concretizer.yaml`` to customize aspects of the
|
||||
algorithm it uses to select the dependencies you install:
|
||||
|
||||
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
|
||||
:language: yaml
|
||||
|
||||
--------------------------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Reuse already installed packages
|
||||
--------------------------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
@@ -37,15 +265,11 @@ to enable reuse for a single installation, and you can use:
|
||||
spack install --fresh <spec>
|
||||
|
||||
to do a fresh install if ``reuse`` is enabled by default.
|
||||
``reuse: dependencies`` is the default.
|
||||
``reuse: true`` is the default.
|
||||
|
||||
.. seealso::
|
||||
|
||||
FAQ: :ref:`Why does Spack pick particular versions and variants? <faq-concretizer-precedence>`
|
||||
|
||||
------------------------------------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Selection of the target microarchitectures
|
||||
------------------------------------------
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The options under the ``targets`` attribute control which targets are considered during a solve.
|
||||
Currently the options in this section are only configurable from the ``concretizer.yaml`` file
|
||||
@@ -78,28 +302,321 @@ microarchitectures considered during the solve are constrained to be compatible
|
||||
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||
|
||||
---------------
|
||||
Duplicate nodes
|
||||
---------------
|
||||
.. _package-requirements:
|
||||
|
||||
The ``duplicates`` attribute controls whether the DAG can contain multiple configurations of
|
||||
the same package. This is mainly relevant for build dependencies, which may have their version
|
||||
pinned by some nodes, and thus be required at different versions by different nodes in the same
|
||||
DAG.
|
||||
--------------------
|
||||
Package Requirements
|
||||
--------------------
|
||||
|
||||
The ``strategy`` option controls how the solver deals with duplicates. If the value is ``none``,
|
||||
then a single configuration per package is allowed in the DAG. This means, for instance, that only
|
||||
a single ``cmake`` or a single ``py-setuptools`` version is allowed. The result would be a slightly
|
||||
faster concretization, at the expense of making a few specs unsolvable.
|
||||
Spack can be configured to always use certain compilers, package
|
||||
versions, and variants during concretization through package
|
||||
requirements.
|
||||
|
||||
If the value is ``minimal`` Spack will allow packages tagged as ``build-tools`` to have duplicates.
|
||||
This allows, for instance, to concretize specs whose nodes require different, and incompatible, ranges
|
||||
of some build tool. For instance, in the figure below the latest `py-shapely` requires a newer `py-setuptools`,
|
||||
while `py-numpy` still needs an older version:
|
||||
Package requirements are useful when you find yourself repeatedly
|
||||
specifying the same constraints on the command line, and wish that
|
||||
Spack respects these constraints whether you mention them explicitly
|
||||
or not. Another use case is specifying constraints that should apply
|
||||
to all root specs in an environment, without having to repeat the
|
||||
constraint everywhere.
|
||||
|
||||
.. figure:: images/shapely_duplicates.svg
|
||||
:scale: 70 %
|
||||
:align: center
|
||||
Apart from that, requirements config is more flexible than constraints
|
||||
on the command line, because it can specify constraints on packages
|
||||
*when they occur* as a dependency. In contrast, on the command line it
|
||||
is not possible to specify constraints on dependencies while also keeping
|
||||
those dependencies optional.
|
||||
|
||||
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
|
||||
default behavior is ``duplicates:strategy:minimal``.
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Requirements syntax
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The package requirements configuration is specified in ``packages.yaml``,
|
||||
keyed by package name and expressed using the Spec syntax. In the simplest
|
||||
case you can specify attributes that you always want the package to have
|
||||
by providing a single spec string to ``require``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
libfabric:
|
||||
require: "@1.13.2"
|
||||
|
||||
In the above example, ``libfabric`` will always build with version 1.13.2. If you
|
||||
need to compose multiple configuration scopes ``require`` accepts a list of
|
||||
strings:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
libfabric:
|
||||
require:
|
||||
- "@1.13.2"
|
||||
- "%gcc"
|
||||
|
||||
In this case ``libfabric`` will always build with version 1.13.2 **and** using GCC
|
||||
as a compiler.
|
||||
|
||||
For more complex use cases, require accepts also a list of objects. These objects
|
||||
must have either a ``any_of`` or a ``one_of`` field, containing a list of spec strings,
|
||||
and they can optionally have a ``when`` and a ``message`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- any_of: ["@4.1.5", "%gcc"]
|
||||
message: "in this example only 4.1.5 can build with other compilers"
|
||||
|
||||
``any_of`` is a list of specs. One of those specs must be satisfied
|
||||
and it is also allowed for the concretized spec to match more than one.
|
||||
In the above example, that means you could build ``openmpi@4.1.5%gcc``,
|
||||
``openmpi@4.1.5%clang`` or ``openmpi@3.9%gcc``, but
|
||||
not ``openmpi@3.9%clang``.
|
||||
|
||||
If a custom message is provided, and the requirement is not satisfiable,
|
||||
Spack will print the custom error message:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec openmpi@3.9%clang
|
||||
==> Error: in this example only 4.1.5 can build with other compilers
|
||||
|
||||
We could express a similar requirement using the ``when`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- any_of: ["%gcc"]
|
||||
when: "@:4.1.4"
|
||||
message: "in this example only 4.1.5 can build with other compilers"
|
||||
|
||||
In the example above, if the version turns out to be 4.1.4 or less, we require the compiler to be GCC.
|
||||
For readability, Spack also allows a ``spec`` key accepting a string when there is only a single
|
||||
constraint:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- spec: "%gcc"
|
||||
when: "@:4.1.4"
|
||||
message: "in this example only 4.1.5 can build with other compilers"
|
||||
|
||||
This code snippet and the one before it are semantically equivalent.
|
||||
|
||||
Finally, instead of ``any_of`` you can use ``one_of`` which also takes a list of specs. The final
|
||||
concretized spec must match one and only one of them:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpich:
|
||||
require:
|
||||
- one_of: ["+cuda", "+rocm"]
|
||||
|
||||
In the example above, that means you could build ``mpich+cuda`` or ``mpich+rocm`` but not ``mpich+cuda+rocm``.
|
||||
|
||||
.. note::
|
||||
|
||||
For ``any_of`` and ``one_of``, the order of specs indicates a
|
||||
preference: items that appear earlier in the list are preferred
|
||||
(note that these preferences can be ignored in favor of others).
|
||||
|
||||
.. note::
|
||||
|
||||
When using a conditional requirement, Spack is allowed to actively avoid the triggering
|
||||
condition (the ``when=...`` spec) if that leads to a concrete spec with better scores in
|
||||
the optimization criteria. To check the current optimization criteria and their
|
||||
priorities you can run ``spack solve zlib``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting default requirements
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can also set default requirements for all packages under ``all``
|
||||
like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
require: '%clang'
|
||||
|
||||
which means every spec will be required to use ``clang`` as a compiler.
|
||||
|
||||
Note that in this case ``all`` represents a *default set of requirements* -
|
||||
if there are specific package requirements, then the default requirements
|
||||
under ``all`` are disregarded. For example, with a configuration like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
require: '%clang'
|
||||
cmake:
|
||||
require: '%gcc'
|
||||
|
||||
Spack requires ``cmake`` to use ``gcc`` and all other nodes (including ``cmake``
|
||||
dependencies) to use ``clang``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting requirements on virtual specs
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A requirement on a virtual spec applies whenever that virtual is present in the DAG.
|
||||
This can be useful for fixing which virtual provider you want to use:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
require: 'mvapich2 %gcc'
|
||||
|
||||
With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``.
|
||||
|
||||
Requirements on the virtual spec and on the specific provider are both applied, if
|
||||
present. For instance with a configuration like:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
require: 'mvapich2 %gcc'
|
||||
mvapich2:
|
||||
require: '~cuda'
|
||||
|
||||
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
||||
|
||||
.. _package-preferences:
|
||||
|
||||
-------------------
|
||||
Package Preferences
|
||||
-------------------
|
||||
|
||||
In some cases package requirements can be too strong, and package
|
||||
preferences are the better option. Package preferences do not impose
|
||||
constraints on packages for particular versions or variants values,
|
||||
they rather only set defaults -- the concretizer is free to change
|
||||
them if it must due to other constraints. Also note that package
|
||||
preferences are of lower priority than reuse of already installed
|
||||
packages.
|
||||
|
||||
Here's an example ``packages.yaml`` file that sets preferred packages:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
opencv:
|
||||
compiler: [gcc@4.9]
|
||||
variants: +debug
|
||||
gperftools:
|
||||
version: [2.2, 2.4, 2.3]
|
||||
all:
|
||||
compiler: [gcc@4.4.7, 'gcc@4.6:', intel, clang, pgi]
|
||||
target: [sandybridge]
|
||||
providers:
|
||||
mpi: [mvapich2, mpich, openmpi]
|
||||
|
||||
At a high level, this example is specifying how packages are preferably
|
||||
concretized. The opencv package should prefer using GCC 4.9 and
|
||||
be built with debug options. The gperftools package should prefer version
|
||||
2.2 over 2.4. Every package on the system should prefer mvapich2 for
|
||||
its MPI and GCC 4.4.7 (except for opencv, which overrides this by preferring GCC 4.9).
|
||||
These options are used to fill in implicit defaults. Any of them can be overwritten
|
||||
on the command line if explicitly requested.
|
||||
|
||||
Package preferences accept the follow keys or components under
|
||||
the specific package (or ``all``) section: ``compiler``, ``variants``,
|
||||
``version``, ``providers``, and ``target``. Each component has an
|
||||
ordered list of spec ``constraints``, with earlier entries in the
|
||||
list being preferred over later entries.
|
||||
|
||||
Sometimes a package installation may have constraints that forbid
|
||||
the first concretization rule, in which case Spack will use the first
|
||||
legal concretization rule. Going back to the example, if a user
|
||||
requests gperftools 2.3 or later, then Spack will install version 2.4
|
||||
as the 2.4 version of gperftools is preferred over 2.3.
|
||||
|
||||
An explicit concretization rule in the preferred section will always
|
||||
take preference over unlisted concretizations. In the above example,
|
||||
xlc isn't listed in the compiler list. Every listed compiler from
|
||||
gcc to pgi will thus be preferred over the xlc compiler.
|
||||
|
||||
The syntax for the ``provider`` section differs slightly from other
|
||||
concretization rules. A provider lists a value that packages may
|
||||
``depends_on`` (e.g, MPI) and a list of rules for fulfilling that
|
||||
dependency.
|
||||
|
||||
.. _package_permissions:
|
||||
|
||||
-------------------
|
||||
Package Permissions
|
||||
-------------------
|
||||
|
||||
Spack can be configured to assign permissions to the files installed
|
||||
by a package.
|
||||
|
||||
In the ``packages.yaml`` file under ``permissions``, the attributes
|
||||
``read``, ``write``, and ``group`` control the package
|
||||
permissions. These attributes can be set per-package, or for all
|
||||
packages under ``all``. If permissions are set under ``all`` and for a
|
||||
specific package, the package-specific settings take precedence.
|
||||
|
||||
The ``read`` and ``write`` attributes take one of ``user``, ``group``,
|
||||
and ``world``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
permissions:
|
||||
write: group
|
||||
group: spack
|
||||
my_app:
|
||||
permissions:
|
||||
read: group
|
||||
group: my_team
|
||||
|
||||
The permissions settings describe the broadest level of access to
|
||||
installations of the specified packages. The execute permissions of
|
||||
the file are set to the same level as read permissions for those files
|
||||
that are executable. The default setting for ``read`` is ``world``,
|
||||
and for ``write`` is ``user``. In the example above, installations of
|
||||
``my_app`` will be installed with user and group permissions but no
|
||||
world permissions, and owned by the group ``my_team``. All other
|
||||
packages will be installed with user and group write privileges, and
|
||||
world read privileges. Those packages will be owned by the group
|
||||
``spack``.
|
||||
|
||||
The ``group`` attribute assigns a Unix-style group to a package. All
|
||||
files installed by the package will be owned by the assigned group,
|
||||
and the sticky group bit will be set on the install prefix and all
|
||||
directories inside the install prefix. This will ensure that even
|
||||
manually placed files within the install prefix are owned by the
|
||||
assigned group. If no group is assigned, Spack will allow the OS
|
||||
default behavior to go as expected.
|
||||
|
||||
----------------------------
|
||||
Assigning Package Attributes
|
||||
----------------------------
|
||||
|
||||
You can assign class-level attributes in the configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpileaks:
|
||||
# Override existing attributes
|
||||
url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
|
||||
# ... or add new ones
|
||||
x: 1
|
||||
|
||||
Attributes set this way will be accessible to any method executed
|
||||
in the package.py file (e.g. the ``install()`` method). Values for these
|
||||
attributes may be any value parseable by yaml.
|
||||
|
||||
These can only be applied to specific packages, not "all" or
|
||||
virtual packages.
|
||||
|
@@ -127,9 +127,9 @@ check out a commit from the ``master`` branch, you would want to add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("autoconf", type="build", when="@master")
|
||||
depends_on("automake", type="build", when="@master")
|
||||
depends_on("libtool", type="build", when="@master")
|
||||
depends_on('autoconf', type='build', when='@master')
|
||||
depends_on('automake', type='build', when='@master')
|
||||
depends_on('libtool', type='build', when='@master')
|
||||
|
||||
It is typically redundant to list the ``m4`` macro processor package as a
|
||||
dependency, since ``autoconf`` already depends on it.
|
||||
@@ -145,7 +145,7 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
|
||||
.. code-block:: python
|
||||
|
||||
def autoreconf(self, spec, prefix):
|
||||
which("bash")("autogen.sh")
|
||||
which('bash')('autogen.sh')
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""
|
||||
patching configure or Makefile.in files
|
||||
@@ -186,9 +186,9 @@ To opt out of this feature, use the following setting:
|
||||
To enable it conditionally on different architectures, define a property and
|
||||
make the package depend on ``gnuconfig`` as a build dependency:
|
||||
|
||||
.. code-block:: python
|
||||
.. code-block
|
||||
|
||||
depends_on("gnuconfig", when="@1.0:")
|
||||
depends_on('gnuconfig', when='@1.0:')
|
||||
|
||||
@property
|
||||
def patch_config_files(self):
|
||||
@@ -230,7 +230,7 @@ version, this can be done like so:
|
||||
|
||||
@property
|
||||
def force_autoreconf(self):
|
||||
return self.version == Version("1.2.3")
|
||||
return self.version == Version('1.2.3')
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Finding configure flags
|
||||
@@ -278,22 +278,13 @@ function like so:
|
||||
def configure_args(self):
|
||||
args = []
|
||||
|
||||
if self.spec.satisfies("+mpi"):
|
||||
args.append("--enable-mpi")
|
||||
if '+mpi' in self.spec:
|
||||
args.append('--enable-mpi')
|
||||
else:
|
||||
args.append("--disable-mpi")
|
||||
args.append('--disable-mpi')
|
||||
|
||||
return args
|
||||
|
||||
|
||||
Alternatively, you can use the :ref:`enable_or_disable <autotools_enable_or_disable>` helper:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
return [self.enable_or_disable("mpi")]
|
||||
|
||||
|
||||
Note that we are explicitly disabling MPI support if it is not
|
||||
requested. This is important, as many Autotools packages will enable
|
||||
options by default if the dependencies are found, and disable them
|
||||
@@ -304,11 +295,9 @@ and `here <https://wiki.gentoo.org/wiki/Project:Quality_Assurance/Automagic_depe
|
||||
for a rationale as to why these so-called "automagic" dependencies
|
||||
are a problem.
|
||||
|
||||
.. note::
|
||||
|
||||
By default, Autotools installs packages to ``/usr``. We don't want this,
|
||||
so Spack automatically adds ``--prefix=/path/to/installation/prefix``
|
||||
to your list of ``configure_args``. You don't need to add this yourself.
|
||||
By default, Autotools installs packages to ``/usr``. We don't want this,
|
||||
so Spack automatically adds ``--prefix=/path/to/installation/prefix``
|
||||
to your list of ``configure_args``. You don't need to add this yourself.
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Helper functions
|
||||
@@ -319,8 +308,6 @@ You may have noticed that most of the Autotools flags are of the form
|
||||
``--without-baz``. Since these flags are so common, Spack provides a
|
||||
couple of helper functions to make your life easier.
|
||||
|
||||
.. _autotools_enable_or_disable:
|
||||
|
||||
"""""""""""""""""
|
||||
enable_or_disable
|
||||
"""""""""""""""""
|
||||
@@ -332,11 +319,11 @@ typically used to enable or disable some feature within the package.
|
||||
.. code-block:: python
|
||||
|
||||
variant(
|
||||
"memchecker",
|
||||
'memchecker',
|
||||
default=False,
|
||||
description="Memchecker support for debugging [degrades performance]"
|
||||
description='Memchecker support for debugging [degrades performance]'
|
||||
)
|
||||
config_args.extend(self.enable_or_disable("memchecker"))
|
||||
config_args.extend(self.enable_or_disable('memchecker'))
|
||||
|
||||
In this example, specifying the variant ``+memchecker`` will generate
|
||||
the following configuration options:
|
||||
@@ -356,15 +343,15 @@ the ``with_or_without`` method.
|
||||
.. code-block:: python
|
||||
|
||||
variant(
|
||||
"schedulers",
|
||||
'schedulers',
|
||||
values=disjoint_sets(
|
||||
("auto",), ("alps", "lsf", "tm", "slurm", "sge", "loadleveler")
|
||||
).with_non_feature_values("auto", "none"),
|
||||
('auto',), ('alps', 'lsf', 'tm', 'slurm', 'sge', 'loadleveler')
|
||||
).with_non_feature_values('auto', 'none'),
|
||||
description="List of schedulers for which support is enabled; "
|
||||
"'auto' lets openmpi determine",
|
||||
)
|
||||
if not spec.satisfies("schedulers=auto"):
|
||||
config_args.extend(self.with_or_without("schedulers"))
|
||||
if 'schedulers=auto' not in spec:
|
||||
config_args.extend(self.with_or_without('schedulers'))
|
||||
|
||||
In this example, specifying the variant ``schedulers=slurm,sge`` will
|
||||
generate the following configuration options:
|
||||
@@ -389,16 +376,16 @@ generated, using the ``activation_value`` argument to
|
||||
.. code-block:: python
|
||||
|
||||
variant(
|
||||
"fabrics",
|
||||
'fabrics',
|
||||
values=disjoint_sets(
|
||||
("auto",), ("psm", "psm2", "verbs", "mxm", "ucx", "libfabric")
|
||||
).with_non_feature_values("auto", "none"),
|
||||
('auto',), ('psm', 'psm2', 'verbs', 'mxm', 'ucx', 'libfabric')
|
||||
).with_non_feature_values('auto', 'none'),
|
||||
description="List of fabrics that are enabled; "
|
||||
"'auto' lets openmpi determine",
|
||||
)
|
||||
if not spec.satisfies("fabrics=auto"):
|
||||
config_args.extend(self.with_or_without("fabrics",
|
||||
activation_value="prefix"))
|
||||
if 'fabrics=auto' not in spec:
|
||||
config_args.extend(self.with_or_without('fabrics',
|
||||
activation_value='prefix'))
|
||||
|
||||
``activation_value`` accepts a callable that generates the configure
|
||||
parameter value given the variant value; but the special value
|
||||
@@ -422,16 +409,16 @@ When Spack variants and configure flags do not correspond one-to-one, the
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("debug_tools", default=False)
|
||||
config_args += self.enable_or_disable("debug-tools", variant="debug_tools")
|
||||
variant('debug_tools', default=False)
|
||||
config_args += self.enable_or_disable('debug-tools', variant='debug_tools')
|
||||
|
||||
Or when one variant controls multiple flags:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("debug_tools", default=False)
|
||||
config_args += self.with_or_without("memchecker", variant="debug_tools")
|
||||
config_args += self.with_or_without("profiler", variant="debug_tools")
|
||||
variant('debug_tools', default=False)
|
||||
config_args += self.with_or_without('memchecker', variant='debug_tools')
|
||||
config_args += self.with_or_without('profiler', variant='debug_tools')
|
||||
|
||||
|
||||
""""""""""""""""""""
|
||||
@@ -445,8 +432,8 @@ For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("profiler", when="@2.0:")
|
||||
config_args += self.with_or_without("profiler")
|
||||
variant('profiler', when='@2.0:')
|
||||
config_args += self.with_or_without('profiler')
|
||||
|
||||
will neither add ``--with-profiler`` nor ``--without-profiler`` when the version is
|
||||
below ``2.0``.
|
||||
@@ -465,10 +452,10 @@ the variant values require atypical behavior.
|
||||
def with_or_without_verbs(self, activated):
|
||||
# Up through version 1.6, this option was named --with-openib.
|
||||
# In version 1.7, it was renamed to be --with-verbs.
|
||||
opt = "verbs" if self.spec.satisfies("@1.7:") else "openib"
|
||||
opt = 'verbs' if self.spec.satisfies('@1.7:') else 'openib'
|
||||
if not activated:
|
||||
return f"--without-{opt}"
|
||||
return f"--with-{opt}={self.spec['rdma-core'].prefix}"
|
||||
return '--without-{0}'.format(opt)
|
||||
return '--with-{0}={1}'.format(opt, self.spec['rdma-core'].prefix)
|
||||
|
||||
Defining ``with_or_without_verbs`` overrides the behavior of a
|
||||
``fabrics=verbs`` variant, changing the configure-time option to
|
||||
@@ -492,7 +479,7 @@ do this like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
configure_directory = "src"
|
||||
configure_directory = 'src'
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Building out of source
|
||||
@@ -504,7 +491,7 @@ This can be done using the ``build_directory`` variable:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = "spack-build"
|
||||
build_directory = 'spack-build'
|
||||
|
||||
By default, Spack will build the package in the same directory that
|
||||
contains the ``configure`` script
|
||||
@@ -527,8 +514,8 @@ library or build the documentation, you can add these like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_targets = ["all", "docs"]
|
||||
install_targets = ["install", "docs"]
|
||||
build_targets = ['all', 'docs']
|
||||
install_targets = ['install', 'docs']
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
|
@@ -9,32 +9,9 @@
|
||||
Bundle
|
||||
------
|
||||
|
||||
``BundlePackage`` represents a set of packages that are expected to work
|
||||
well together, such as a collection of commonly used software libraries.
|
||||
The associated software is specified as dependencies.
|
||||
|
||||
If it makes sense, variants, conflicts, and requirements can be added to
|
||||
the package. :ref:`Variants <variants>` ensure that common build options
|
||||
are consistent across the packages supporting them. :ref:`Conflicts
|
||||
and requirements <packaging_conflicts>` prevent attempts to build with known
|
||||
bugs or limitations.
|
||||
|
||||
For example, if ``MyBundlePackage`` is known to only build on ``linux``,
|
||||
it could use the ``require`` directive as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
require("platform=linux", msg="MyBundlePackage only builds on linux")
|
||||
|
||||
Spack has a number of built-in bundle packages, such as:
|
||||
|
||||
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
|
||||
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
|
||||
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
|
||||
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
|
||||
|
||||
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
|
||||
``Libc`` is a virtual bundle package for the C standard library.
|
||||
``BundlePackage`` represents a set of packages that are expected to work well
|
||||
together, such as a collection of commonly used software libraries. The
|
||||
associated software is specified as bundle dependencies.
|
||||
|
||||
|
||||
^^^^^^^^
|
||||
|
@@ -87,7 +87,7 @@ A typical usage of these methods may look something like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def initconfig_mpi_entries(self):
|
||||
def initconfig_mpi_entries(self)
|
||||
# Get existing MPI configurations
|
||||
entries = super(self, Foo).initconfig_mpi_entries()
|
||||
|
||||
@@ -95,25 +95,25 @@ A typical usage of these methods may look something like this:
|
||||
# This spec has an MPI variant, and we need to enable MPI when it is on.
|
||||
# This hypothetical package controls MPI with the ``FOO_MPI`` option to
|
||||
# cmake.
|
||||
if self.spec.satisfies("+mpi"):
|
||||
entries.append(cmake_cache_option("FOO_MPI", True, "enable mpi"))
|
||||
if '+mpi' in self.spec:
|
||||
entries.append(cmake_cache_option('FOO_MPI', True, "enable mpi"))
|
||||
else:
|
||||
entries.append(cmake_cache_option("FOO_MPI", False, "disable mpi"))
|
||||
entries.append(cmake_cache_option('FOO_MPI', False, "disable mpi"))
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
# Package specific options
|
||||
entries = []
|
||||
|
||||
entries.append("#Entries for build options")
|
||||
entries.append('#Entries for build options')
|
||||
|
||||
bar_on = self.spec.satisfies("+bar")
|
||||
entries.append(cmake_cache_option("FOO_BAR", bar_on, "toggle bar"))
|
||||
bar_on = '+bar' in self.spec
|
||||
entries.append(cmake_cache_option('FOO_BAR', bar_on, 'toggle bar'))
|
||||
|
||||
entries.append("#Entries for dependencies")
|
||||
entries.append('#Entries for dependencies')
|
||||
|
||||
if self.spec["blas"].name == "baz": # baz is our blas provider
|
||||
entries.append(cmake_cache_string("FOO_BLAS", "baz", "Use baz"))
|
||||
entries.append(cmake_cache_path("BAZ_PREFIX", self.spec["baz"].prefix))
|
||||
if self.spec['blas'].name == 'baz': # baz is our blas provider
|
||||
entries.append(cmake_cache_string('FOO_BLAS', 'baz', 'Use baz'))
|
||||
entries.append(cmake_cache_path('BAZ_PREFIX', self.spec['baz'].prefix))
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
|
@@ -82,7 +82,7 @@ class already contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("cmake", type="build")
|
||||
depends_on('cmake', type='build')
|
||||
|
||||
|
||||
If you need to specify a particular version requirement, you can
|
||||
@@ -90,7 +90,7 @@ override this in your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("cmake@2.8.12:", type="build")
|
||||
depends_on('cmake@2.8.12:', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -137,10 +137,10 @@ and without the :meth:`~spack.build_systems.cmake.CMakeBuilder.define` and
|
||||
|
||||
def cmake_args(self):
|
||||
args = [
|
||||
"-DWHATEVER:STRING=somevalue",
|
||||
self.define("ENABLE_BROKEN_FEATURE", False),
|
||||
self.define_from_variant("DETECT_HDF5", "hdf5"),
|
||||
self.define_from_variant("THREADS"), # True if +threads
|
||||
'-DWHATEVER:STRING=somevalue',
|
||||
self.define('ENABLE_BROKEN_FEATURE', False),
|
||||
self.define_from_variant('DETECT_HDF5', 'hdf5'),
|
||||
self.define_from_variant('THREADS'), # True if +threads
|
||||
]
|
||||
|
||||
return args
|
||||
@@ -151,10 +151,10 @@ and CMake simply ignores the empty command line argument. For example the follow
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("example", default=True, when="@2.0:")
|
||||
variant('example', default=True, when='@2.0:')
|
||||
|
||||
def cmake_args(self):
|
||||
return [self.define_from_variant("EXAMPLE", "example")]
|
||||
return [self.define_from_variant('EXAMPLE', 'example')]
|
||||
|
||||
will generate ``'cmake' '-DEXAMPLE=ON' ...`` when `@2.0: +example` is met, but will
|
||||
result in ``'cmake' '' ...`` when the spec version is below ``2.0``.
|
||||
@@ -193,9 +193,9 @@ a variant to control this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("build_type", default="RelWithDebInfo",
|
||||
description="CMake build type",
|
||||
values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"))
|
||||
variant('build_type', default='RelWithDebInfo',
|
||||
description='CMake build type',
|
||||
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
|
||||
|
||||
However, not every CMake package accepts all four of these options.
|
||||
Grep the ``CMakeLists.txt`` file to see if the default values are
|
||||
@@ -205,9 +205,9 @@ package overrides the default variant with:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("build_type", default="DebugRelease",
|
||||
description="The build type to build",
|
||||
values=("Debug", "Release", "DebugRelease"))
|
||||
variant('build_type', default='DebugRelease',
|
||||
description='The build type to build',
|
||||
values=('Debug', 'Release', 'DebugRelease'))
|
||||
|
||||
For more information on ``CMAKE_BUILD_TYPE``, see:
|
||||
https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||
@@ -250,7 +250,7 @@ generator is Ninja. To switch to the Ninja generator, simply add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
generator = "Ninja"
|
||||
generator = 'Ninja'
|
||||
|
||||
|
||||
``CMakePackage`` defaults to "Unix Makefiles". If you switch to the
|
||||
@@ -258,7 +258,7 @@ Ninja generator, make sure to add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("ninja", type="build")
|
||||
depends_on('ninja', type='build')
|
||||
|
||||
to the package as well. Aside from that, you shouldn't need to do
|
||||
anything else. Spack will automatically detect that you are using
|
||||
@@ -288,7 +288,7 @@ like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
root_cmakelists_dir = "src"
|
||||
root_cmakelists_dir = 'src'
|
||||
|
||||
|
||||
Note that this path is relative to the root of the extracted tarball,
|
||||
@@ -304,7 +304,7 @@ different sub-directory, simply override ``build_directory`` like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = "my-build"
|
||||
build_directory = 'my-build'
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build and install targets
|
||||
@@ -324,8 +324,8 @@ library or build the documentation, you can add these like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_targets = ["all", "docs"]
|
||||
install_targets = ["install", "docs"]
|
||||
build_targets = ['all', 'docs']
|
||||
install_targets = ['install', 'docs']
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
|
@@ -54,8 +54,8 @@ to terminate such build attempts with a suitable message:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
conflicts("cuda_arch=none", when="+cuda",
|
||||
msg="CUDA architecture is required")
|
||||
conflicts('cuda_arch=none', when='+cuda',
|
||||
msg='CUDA architecture is required')
|
||||
|
||||
Similarly, if your software does not support all versions of the property,
|
||||
you could add ``conflicts`` to your package for those versions. For example,
|
||||
@@ -66,13 +66,13 @@ custom message should a user attempt such a build:
|
||||
.. code-block:: python
|
||||
|
||||
unsupported_cuda_archs = [
|
||||
"10", "11", "12", "13",
|
||||
"20", "21",
|
||||
"30", "32", "35", "37"
|
||||
'10', '11', '12', '13',
|
||||
'20', '21',
|
||||
'30', '32', '35', '37'
|
||||
]
|
||||
for value in unsupported_cuda_archs:
|
||||
conflicts(f"cuda_arch={value}", when="+cuda",
|
||||
msg=f"CUDA architecture {value} is not supported")
|
||||
conflicts('cuda_arch={0}'.format(value), when='+cuda',
|
||||
msg='CUDA architecture {0} is not supported'.format(value))
|
||||
|
||||
^^^^^^^
|
||||
Methods
|
||||
@@ -107,16 +107,16 @@ class of your package. For example, you can add it to your
|
||||
spec = self.spec
|
||||
args = []
|
||||
...
|
||||
if spec.satisfies("+cuda"):
|
||||
if '+cuda' in spec:
|
||||
# Set up the cuda macros needed by the build
|
||||
args.append("-DWITH_CUDA=ON")
|
||||
cuda_arch_list = spec.variants["cuda_arch"].value
|
||||
args.append('-DWITH_CUDA=ON')
|
||||
cuda_arch_list = spec.variants['cuda_arch'].value
|
||||
cuda_arch = cuda_arch_list[0]
|
||||
if cuda_arch != "none":
|
||||
args.append(f"-DCUDA_FLAGS=-arch=sm_{cuda_arch}")
|
||||
if cuda_arch != 'none':
|
||||
args.append('-DCUDA_FLAGS=-arch=sm_{0}'.format(cuda_arch))
|
||||
else:
|
||||
# Ensure build with cuda is disabled
|
||||
args.append("-DWITH_CUDA=OFF")
|
||||
args.append('-DWITH_CUDA=OFF')
|
||||
...
|
||||
return args
|
||||
|
||||
@@ -125,7 +125,7 @@ You will need to customize options as needed for your build.
|
||||
|
||||
This example also illustrates how to check for the ``cuda`` variant using
|
||||
``self.spec`` and how to retrieve the ``cuda_arch`` variant's value, which
|
||||
is a list, using ``self.spec.variants["cuda_arch"].value``.
|
||||
is a list, using ``self.spec.variants['cuda_arch'].value``.
|
||||
|
||||
With over 70 packages using ``CudaPackage`` as of January 2021 there are
|
||||
lots of examples to choose from to get more ideas for using this package.
|
||||
|
@@ -57,13 +57,13 @@ If you look at the ``perl`` package, you'll see:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ["configure", "build", "install"]
|
||||
phases = ['configure', 'build', 'install']
|
||||
|
||||
Similarly, ``cmake`` defines:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
phases = ["bootstrap", "build", "install"]
|
||||
phases = ['bootstrap', 'build', 'install']
|
||||
|
||||
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
|
||||
class to run the ``bootstrap``, ``build``, and ``install`` functions
|
||||
@@ -78,7 +78,7 @@ If we look at ``perl``, we see that it defines a ``configure`` method:
|
||||
.. code-block:: python
|
||||
|
||||
def configure(self, spec, prefix):
|
||||
configure = Executable("./Configure")
|
||||
configure = Executable('./Configure')
|
||||
configure(*self.configure_args())
|
||||
|
||||
There is also a corresponding ``configure_args`` function that handles
|
||||
@@ -92,7 +92,7 @@ phases are pretty simple:
|
||||
make()
|
||||
|
||||
def install(self, spec, prefix):
|
||||
make("install")
|
||||
make('install')
|
||||
|
||||
The ``cmake`` package looks very similar, but with a ``bootstrap``
|
||||
function instead of ``configure``:
|
||||
@@ -100,14 +100,14 @@ function instead of ``configure``:
|
||||
.. code-block:: python
|
||||
|
||||
def bootstrap(self, spec, prefix):
|
||||
bootstrap = Executable("./bootstrap")
|
||||
bootstrap = Executable('./bootstrap')
|
||||
bootstrap(*self.bootstrap_args())
|
||||
|
||||
def build(self, spec, prefix):
|
||||
make()
|
||||
|
||||
def install(self, spec, prefix):
|
||||
make("install")
|
||||
make('install')
|
||||
|
||||
Again, there is a ``boostrap_args`` function that determines the
|
||||
correct bootstrap flags to use.
|
||||
@@ -128,16 +128,16 @@ before or after a particular phase. For example, in ``perl``, we see:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@run_after("install")
|
||||
@run_after('install')
|
||||
def install_cpanm(self):
|
||||
spec = self.spec
|
||||
|
||||
if spec.satisfies("+cpanm"):
|
||||
with working_dir(join_path("cpanm", "cpanm")):
|
||||
perl = spec["perl"].command
|
||||
perl("Makefile.PL")
|
||||
if '+cpanm' in spec:
|
||||
with working_dir(join_path('cpanm', 'cpanm')):
|
||||
perl = spec['perl'].command
|
||||
perl('Makefile.PL')
|
||||
make()
|
||||
make("install")
|
||||
make('install')
|
||||
|
||||
This extra step automatically installs ``cpanm`` in addition to the
|
||||
base Perl installation.
|
||||
@@ -174,10 +174,10 @@ In the ``perl`` package, we can see:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@run_after("build")
|
||||
@run_after('build')
|
||||
@on_package_attributes(run_tests=True)
|
||||
def test(self):
|
||||
make("test")
|
||||
make('test')
|
||||
|
||||
As you can guess, this runs ``make test`` *after* building the package,
|
||||
if and only if testing is requested. Again, this is not specific to
|
||||
@@ -189,7 +189,7 @@ custom build systems, it can be added to existing build systems as well.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@run_after("install")
|
||||
@run_after('install')
|
||||
@on_package_attributes(run_tests=True)
|
||||
|
||||
works as expected. However, if you reverse the ordering:
|
||||
@@ -197,7 +197,7 @@ custom build systems, it can be added to existing build systems as well.
|
||||
.. code-block:: python
|
||||
|
||||
@on_package_attributes(run_tests=True)
|
||||
@run_after("install")
|
||||
@run_after('install')
|
||||
|
||||
the tests will always be run regardless of whether or not
|
||||
``--test=root`` is requested. See https://github.com/spack/spack/issues/3833
|
||||
|
@@ -25,8 +25,8 @@ use Spack to build packages with the tools.
|
||||
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
||||
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
||||
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
||||
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
|
||||
list of available oneAPI packages, or use::
|
||||
packages. See the :ref:`package-list` for the full list of available
|
||||
oneAPI packages or use::
|
||||
|
||||
spack list -d oneAPI
|
||||
|
||||
@@ -53,24 +53,18 @@ Install the oneAPI compilers::
|
||||
|
||||
Add the compilers to your ``compilers.yaml`` so spack can use them::
|
||||
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
|
||||
|
||||
Verify that the compilers are available::
|
||||
|
||||
spack compiler list
|
||||
|
||||
Note that 2024 and later releases do not include ``icc``. Before 2024,
|
||||
the package layout was different::
|
||||
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
|
||||
|
||||
The ``intel-oneapi-compilers`` package includes 2 families of
|
||||
compilers:
|
||||
|
||||
* ``intel``: ``icc``, ``icpc``, ``ifort``. Intel's *classic*
|
||||
compilers. 2024 and later releases contain ``ifort``, but not
|
||||
``icc`` and ``icpc``.
|
||||
compilers.
|
||||
* ``oneapi``: ``icx``, ``icpx``, ``ifx``. Intel's new generation of
|
||||
compilers based on LLVM.
|
||||
|
||||
@@ -82,55 +76,6 @@ To build with with ``icx``, do ::
|
||||
|
||||
spack install patchelf%oneapi
|
||||
|
||||
|
||||
Using oneAPI Spack environment
|
||||
-------------------------------
|
||||
|
||||
In this example, we build lammps with ``icx`` using Spack environment for oneAPI packages created by Intel. The
|
||||
compilers are installed with Spack like in example above.
|
||||
|
||||
Install the oneAPI compilers::
|
||||
|
||||
spack install intel-oneapi-compilers
|
||||
|
||||
Add the compilers to your ``compilers.yaml`` so Spack can use them::
|
||||
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/bin
|
||||
|
||||
Verify that the compilers are available::
|
||||
|
||||
spack compiler list
|
||||
|
||||
Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activate Intel oneAPI CPU environment::
|
||||
|
||||
git clone https://github.com/spack/spack-configs
|
||||
spack env activate spack-configs/INTEL/CPU
|
||||
spack concretize -f
|
||||
|
||||
`Intel oneAPI CPU environment <https://github.com/spack/spack-configs/blob/main/INTEL/CPU/spack.yaml>`_ contains applications tested and validated by Intel, this list is constantly extended. And currently it supports:
|
||||
|
||||
- `Devito <https://www.devitoproject.org/>`_
|
||||
- `GROMACS <https://www.gromacs.org/>`_
|
||||
- `HPCG <https://www.hpcg-benchmark.org/>`_
|
||||
- `HPL <https://netlib.org/benchmark/hpl/>`_
|
||||
- `LAMMPS <https://www.lammps.org/#gsc.tab=0>`_
|
||||
- `OpenFOAM <https://www.openfoam.com/>`_
|
||||
- `Quantum Espresso <https://www.quantum-espresso.org/>`_
|
||||
- `STREAM <https://www.cs.virginia.edu/stream/>`_
|
||||
- `WRF <https://github.com/wrf-model/WRF>`_
|
||||
|
||||
To build lammps with oneAPI compiler from this environment just run::
|
||||
|
||||
spack install lammps
|
||||
|
||||
Compiled binaries can be find using::
|
||||
|
||||
spack cd -i lammps
|
||||
|
||||
You can do the same for all other applications from this environment.
|
||||
|
||||
|
||||
Using oneAPI MPI to Satisfy a Virtual Dependence
|
||||
------------------------------------------------------
|
||||
|
||||
@@ -152,7 +97,8 @@ Compilers
|
||||
To use the compilers, add some information about the installation to
|
||||
``compilers.yaml``. For most users, it is sufficient to do::
|
||||
|
||||
spack compiler add /opt/intel/oneapi/compiler/latest/bin
|
||||
spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin/intel64
|
||||
spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin
|
||||
|
||||
Adapt the paths above if you did not install the tools in the default
|
||||
location. After adding the compilers, using them is the same
|
||||
@@ -161,12 +107,6 @@ Another option is to manually add the configuration to
|
||||
``compilers.yaml`` as described in :ref:`Compiler configuration
|
||||
<compiler-config>`.
|
||||
|
||||
Before 2024, the directory structure was different::
|
||||
|
||||
spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin/intel64
|
||||
spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin
|
||||
|
||||
|
||||
Libraries
|
||||
---------
|
||||
|
||||
|
@@ -392,7 +392,7 @@ See section
|
||||
:ref:`Configuration Scopes <configuration-scopes>`
|
||||
for an explanation about the different files
|
||||
and section
|
||||
:ref:`Build customization <packages-config>`
|
||||
:ref:`Build customization <build-settings>`
|
||||
for specifics and examples for ``packages.yaml`` files.
|
||||
|
||||
.. If your system administrator did not provide modules for pre-installed Intel
|
||||
@@ -934,9 +934,9 @@ a *virtual* ``mkl`` package is declared in Spack.
|
||||
.. code-block:: python
|
||||
|
||||
# Examples for absolute and conditional dependencies:
|
||||
depends_on("mkl")
|
||||
depends_on("mkl", when="+mkl")
|
||||
depends_on("mkl", when="fftw=mkl")
|
||||
depends_on('mkl')
|
||||
depends_on('mkl', when='+mkl')
|
||||
depends_on('mkl', when='fftw=mkl')
|
||||
|
||||
The ``MKLROOT`` environment variable (part of the documented API) will be set
|
||||
during all stages of client package installation, and is available to both
|
||||
@@ -972,8 +972,8 @@ a *virtual* ``mkl`` package is declared in Spack.
|
||||
def configure_args(self):
|
||||
args = []
|
||||
...
|
||||
args.append("--with-blas=%s" % self.spec["blas"].libs.ld_flags)
|
||||
args.append("--with-lapack=%s" % self.spec["lapack"].libs.ld_flags)
|
||||
args.append('--with-blas=%s' % self.spec['blas'].libs.ld_flags)
|
||||
args.append('--with-lapack=%s' % self.spec['lapack'].libs.ld_flags)
|
||||
...
|
||||
|
||||
.. tip::
|
||||
@@ -989,13 +989,13 @@ a *virtual* ``mkl`` package is declared in Spack.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
self.spec["blas"].headers.include_flags
|
||||
self.spec['blas'].headers.include_flags
|
||||
|
||||
and to generate linker options (``-L<dir> -llibname ...``), use the same as above,
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
self.spec["blas"].libs.ld_flags
|
||||
self.spec['blas'].libs.ld_flags
|
||||
|
||||
See
|
||||
:ref:`MakefilePackage <makefilepackage>`
|
||||
|
@@ -88,7 +88,7 @@ override the ``luarocks_args`` method like so:
|
||||
.. code-block:: python
|
||||
|
||||
def luarocks_args(self):
|
||||
return ["flag1", "flag2"]
|
||||
return ['flag1', 'flag2']
|
||||
|
||||
One common use of this is to override warnings or flags for newer compilers, as in:
|
||||
|
||||
|
@@ -59,7 +59,7 @@ using GNU Make, you should add a dependency on ``gmake``:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("gmake", type="build")
|
||||
depends_on('gmake', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -93,8 +93,8 @@ there are any other variables you need to set, you can do this in the
|
||||
.. code-block:: python
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
env["PREFIX"] = prefix
|
||||
env["BLASLIB"] = spec["blas"].libs.ld_flags
|
||||
env['PREFIX'] = prefix
|
||||
env['BLASLIB'] = spec['blas'].libs.ld_flags
|
||||
|
||||
|
||||
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cbench/package.py>`_
|
||||
@@ -113,7 +113,7 @@ you can do this like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_targets = ["CC=cc"]
|
||||
build_targets = ['CC=cc']
|
||||
|
||||
|
||||
If you do need access to the spec, you can create a property like so:
|
||||
@@ -125,8 +125,8 @@ If you do need access to the spec, you can create a property like so:
|
||||
spec = self.spec
|
||||
|
||||
return [
|
||||
"CC=cc",
|
||||
f"BLASLIB={spec['blas'].libs.ld_flags}",
|
||||
'CC=cc',
|
||||
'BLASLIB={0}'.format(spec['blas'].libs.ld_flags),
|
||||
]
|
||||
|
||||
|
||||
@@ -145,12 +145,12 @@ and a ``filter_file`` method to help with this. For example:
|
||||
.. code-block:: python
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
makefile = FileFilter("Makefile")
|
||||
makefile = FileFilter('Makefile')
|
||||
|
||||
makefile.filter(r"^\s*CC\s*=.*", f"CC = {spack_cc}")
|
||||
makefile.filter(r"^\s*CXX\s*=.*", f"CXX = {spack_cxx}")
|
||||
makefile.filter(r"^\s*F77\s*=.*", f"F77 = {spack_f77}")
|
||||
makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}")
|
||||
makefile.filter(r'^\s*CC\s*=.*', 'CC = ' + spack_cc)
|
||||
makefile.filter(r'^\s*CXX\s*=.*', 'CXX = ' + spack_cxx)
|
||||
makefile.filter(r'^\s*F77\s*=.*', 'F77 = ' + spack_f77)
|
||||
makefile.filter(r'^\s*FC\s*=.*', 'FC = ' + spack_fc)
|
||||
|
||||
|
||||
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/stream/package.py>`_
|
||||
@@ -181,16 +181,16 @@ well for storing variables:
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
config = {
|
||||
"CC": "cc",
|
||||
"MAKE": "make",
|
||||
'CC': 'cc',
|
||||
'MAKE': 'make',
|
||||
}
|
||||
|
||||
if spec.satisfies("+blas"):
|
||||
config["BLAS_LIBS"] = spec["blas"].libs.joined()
|
||||
if '+blas' in spec:
|
||||
config['BLAS_LIBS'] = spec['blas'].libs.joined()
|
||||
|
||||
with open("make.inc", "w") as inc:
|
||||
with open('make.inc', 'w') as inc:
|
||||
for key in config:
|
||||
inc.write(f"{key} = {config[key]}\n")
|
||||
inc.write('{0} = {1}\n'.format(key, config[key]))
|
||||
|
||||
|
||||
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/elk/package.py>`_
|
||||
@@ -204,14 +204,14 @@ them in a list:
|
||||
|
||||
def edit(self, spec, prefix):
|
||||
config = [
|
||||
f"INSTALL_DIR = {prefix}",
|
||||
"INCLUDE_DIR = $(INSTALL_DIR)/include",
|
||||
"LIBRARY_DIR = $(INSTALL_DIR)/lib",
|
||||
'INSTALL_DIR = {0}'.format(prefix),
|
||||
'INCLUDE_DIR = $(INSTALL_DIR)/include',
|
||||
'LIBRARY_DIR = $(INSTALL_DIR)/lib',
|
||||
]
|
||||
|
||||
with open("make.inc", "w") as inc:
|
||||
with open('make.inc', 'w') as inc:
|
||||
for var in config:
|
||||
inc.write(f"{var}\n")
|
||||
inc.write('{0}\n'.format(var))
|
||||
|
||||
|
||||
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/hpl/package.py>`_
|
||||
@@ -284,7 +284,7 @@ can tell Spack where to locate it like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = "src"
|
||||
build_directory = 'src'
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -299,8 +299,8 @@ install the package:
|
||||
|
||||
def install(self, spec, prefix):
|
||||
mkdir(prefix.bin)
|
||||
install("foo", prefix.bin)
|
||||
install_tree("lib", prefix.lib)
|
||||
install('foo', prefix.bin)
|
||||
install_tree('lib', prefix.lib)
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -48,8 +48,8 @@ class automatically adds the following dependencies:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("java", type=("build", "run"))
|
||||
depends_on("maven", type="build")
|
||||
depends_on('java', type=('build', 'run'))
|
||||
depends_on('maven', type='build')
|
||||
|
||||
|
||||
In the ``pom.xml`` file, you may see sections like:
|
||||
@@ -72,8 +72,8 @@ should add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("java@7:", type="build")
|
||||
depends_on("maven@3.5.4:", type="build")
|
||||
depends_on('java@7:', type='build')
|
||||
depends_on('maven@3.5.4:', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -88,9 +88,9 @@ the build phase. For example:
|
||||
|
||||
def build_args(self):
|
||||
return [
|
||||
"-Pdist,native",
|
||||
"-Dtar",
|
||||
"-Dmaven.javadoc.skip=true"
|
||||
'-Pdist,native',
|
||||
'-Dtar',
|
||||
'-Dmaven.javadoc.skip=true'
|
||||
]
|
||||
|
||||
|
||||
|
@@ -86,8 +86,8 @@ the ``MesonPackage`` base class already contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("meson", type="build")
|
||||
depends_on("ninja", type="build")
|
||||
depends_on('meson', type='build')
|
||||
depends_on('ninja', type='build')
|
||||
|
||||
|
||||
If you need to specify a particular version requirement, you can
|
||||
@@ -95,8 +95,8 @@ override this in your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("meson@0.43.0:", type="build")
|
||||
depends_on("ninja", type="build")
|
||||
depends_on('meson@0.43.0:', type='build')
|
||||
depends_on('ninja', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -121,7 +121,7 @@ override the ``meson_args`` method like so:
|
||||
.. code-block:: python
|
||||
|
||||
def meson_args(self):
|
||||
return ["--warnlevel=3"]
|
||||
return ['--warnlevel=3']
|
||||
|
||||
|
||||
This method can be used to pass flags as well as variables.
|
||||
|
@@ -118,7 +118,7 @@ so ``PerlPackage`` contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends("perl")
|
||||
extends('perl')
|
||||
|
||||
|
||||
If your package requires a specific version of Perl, you should
|
||||
@@ -132,14 +132,14 @@ properly. If your package uses ``Makefile.PL`` to build, add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("perl-extutils-makemaker", type="build")
|
||||
depends_on('perl-extutils-makemaker', type='build')
|
||||
|
||||
|
||||
If your package uses ``Build.PL`` to build, add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("perl-module-build", type="build")
|
||||
depends_on('perl-module-build', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
@@ -165,11 +165,11 @@ arguments to ``Makefile.PL`` or ``Build.PL`` by overriding
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
expat = self.spec["expat"].prefix
|
||||
expat = self.spec['expat'].prefix
|
||||
|
||||
return [
|
||||
"EXPATLIBPATH={0}".format(expat.lib),
|
||||
"EXPATINCPATH={0}".format(expat.include),
|
||||
'EXPATLIBPATH={0}'.format(expat.lib),
|
||||
'EXPATINCPATH={0}'.format(expat.include),
|
||||
]
|
||||
|
||||
|
||||
|
@@ -152,16 +152,16 @@ set. Once set, ``pypi`` will be used to define the ``homepage``,
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
homepage = "https://pypi.org/project/setuptools/"
|
||||
url = "https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip"
|
||||
list_url = "https://pypi.org/simple/setuptools/"
|
||||
homepage = 'https://pypi.org/project/setuptools/'
|
||||
url = 'https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip'
|
||||
list_url = 'https://pypi.org/simple/setuptools/'
|
||||
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
pypi = "setuptools/setuptools-49.2.0.zip"
|
||||
pypi = 'setuptools/setuptools-49.2.0.zip'
|
||||
|
||||
|
||||
If a package has a different homepage listed on PyPI, you can
|
||||
@@ -208,7 +208,7 @@ dependencies to your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("py-setuptools@42:", type="build")
|
||||
depends_on('py-setuptools@42:', type='build')
|
||||
|
||||
|
||||
Note that ``py-wheel`` is already listed as a build dependency in the
|
||||
@@ -232,7 +232,7 @@ Look for dependencies under the following keys:
|
||||
* ``dependencies`` under ``[project]``
|
||||
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=("build", "run")``.
|
||||
add them with ``type=('build', 'run')``.
|
||||
|
||||
* ``[project.optional-dependencies]``
|
||||
|
||||
@@ -279,12 +279,12 @@ distutils library, and has almost the exact same API. In addition to
|
||||
* ``setup_requires``
|
||||
|
||||
These packages are usually only needed at build-time, so you can
|
||||
add them with ``type="build"``.
|
||||
add them with ``type='build'``.
|
||||
|
||||
* ``install_requires``
|
||||
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=("build", "run")``.
|
||||
add them with ``type=('build', 'run')``.
|
||||
|
||||
* ``extras_require``
|
||||
|
||||
@@ -296,7 +296,7 @@ distutils library, and has almost the exact same API. In addition to
|
||||
|
||||
These are packages that are required to run the unit tests for the
|
||||
package. These dependencies can be specified using the
|
||||
``type="test"`` dependency type. However, the PyPI tarballs rarely
|
||||
``type='test'`` dependency type. However, the PyPI tarballs rarely
|
||||
contain unit tests, so there is usually no reason to add these.
|
||||
|
||||
See https://setuptools.pypa.io/en/latest/userguide/dependency_management.html
|
||||
@@ -321,7 +321,7 @@ older versions of flit may use the following keys:
|
||||
* ``requires`` under ``[tool.flit.metadata]``
|
||||
|
||||
These packages are required for building and installation. You can
|
||||
add them with ``type=("build", "run")``.
|
||||
add them with ``type=('build', 'run')``.
|
||||
|
||||
* ``[tool.flit.metadata.requires-extra]``
|
||||
|
||||
@@ -434,12 +434,12 @@ the BLAS/LAPACK library you want pkg-config to search for:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("py-pip@22.1:", type="build")
|
||||
depends_on('py-pip@22.1:', type='build')
|
||||
|
||||
def config_settings(self, spec, prefix):
|
||||
return {
|
||||
"blas": spec["blas"].libs.names[0],
|
||||
"lapack": spec["lapack"].libs.names[0],
|
||||
'blas': spec['blas'].libs.names[0],
|
||||
'lapack': spec['lapack'].libs.names[0],
|
||||
}
|
||||
|
||||
|
||||
@@ -463,10 +463,10 @@ has an optional dependency on ``libyaml`` that can be enabled like so:
|
||||
|
||||
def global_options(self, spec, prefix):
|
||||
options = []
|
||||
if spec.satisfies("+libyaml"):
|
||||
options.append("--with-libyaml")
|
||||
if '+libyaml' in spec:
|
||||
options.append('--with-libyaml')
|
||||
else:
|
||||
options.append("--without-libyaml")
|
||||
options.append('--without-libyaml')
|
||||
return options
|
||||
|
||||
|
||||
@@ -492,10 +492,10 @@ allows you to specify the directories to search for ``libyaml``:
|
||||
|
||||
def install_options(self, spec, prefix):
|
||||
options = []
|
||||
if spec.satisfies("+libyaml"):
|
||||
if '+libyaml' in spec:
|
||||
options.extend([
|
||||
spec["libyaml"].libs.search_flags,
|
||||
spec["libyaml"].headers.include_flags,
|
||||
spec['libyaml'].libs.search_flags,
|
||||
spec['libyaml'].headers.include_flags,
|
||||
])
|
||||
return options
|
||||
|
||||
@@ -556,7 +556,7 @@ detected are wrong, you can provide the names yourself by overriding
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import_modules = ["six"]
|
||||
import_modules = ['six']
|
||||
|
||||
|
||||
Sometimes the list of module names to import depends on how the
|
||||
@@ -571,9 +571,9 @@ This can be expressed like so:
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
modules = ["yaml"]
|
||||
if self.spec.satisfies("+libyaml"):
|
||||
modules.append("yaml.cyaml")
|
||||
modules = ['yaml']
|
||||
if '+libyaml' in self.spec:
|
||||
modules.append('yaml.cyaml')
|
||||
return modules
|
||||
|
||||
|
||||
@@ -586,14 +586,14 @@ Instead of defining the ``import_modules`` explicitly, only the subset
|
||||
of module names to be skipped can be defined by using ``skip_modules``.
|
||||
If a defined module has submodules, they are skipped as well, e.g.,
|
||||
in case the ``plotting`` modules should be excluded from the
|
||||
automatically detected ``import_modules`` ``["nilearn", "nilearn.surface",
|
||||
"nilearn.plotting", "nilearn.plotting.data"]`` set:
|
||||
automatically detected ``import_modules`` ``['nilearn', 'nilearn.surface',
|
||||
'nilearn.plotting', 'nilearn.plotting.data']`` set:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
skip_modules = ["nilearn.plotting"]
|
||||
skip_modules = ['nilearn.plotting']
|
||||
|
||||
This will set ``import_modules`` to ``["nilearn", "nilearn.surface"]``
|
||||
This will set ``import_modules`` to ``['nilearn', 'nilearn.surface']``
|
||||
|
||||
Import tests can be run during the installation using ``spack install
|
||||
--test=root`` or at any time after the installation using
|
||||
@@ -612,11 +612,11 @@ after the ``install`` phase:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@run_after("install")
|
||||
@run_after('install')
|
||||
@on_package_attributes(run_tests=True)
|
||||
def install_test(self):
|
||||
with working_dir("spack-test", create=True):
|
||||
python("-c", "import numpy; numpy.test('full', verbose=2)")
|
||||
with working_dir('spack-test', create=True):
|
||||
python('-c', 'import numpy; numpy.test("full", verbose=2)')
|
||||
|
||||
|
||||
when testing is enabled during the installation (i.e., ``spack install
|
||||
@@ -638,7 +638,7 @@ provides Python bindings in a ``python`` directory, you can use:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = "python"
|
||||
build_directory = 'python'
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -83,7 +83,7 @@ base class already contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("qt", type="build")
|
||||
depends_on('qt', type='build')
|
||||
|
||||
|
||||
If you want to specify a particular version requirement, or need to
|
||||
@@ -91,7 +91,7 @@ link to the ``qt`` libraries, you can override this in your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("qt@5.6.0:")
|
||||
depends_on('qt@5.6.0:')
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to qmake
|
||||
@@ -103,7 +103,7 @@ override the ``qmake_args`` method like so:
|
||||
.. code-block:: python
|
||||
|
||||
def qmake_args(self):
|
||||
return ["-recursive"]
|
||||
return ['-recursive']
|
||||
|
||||
|
||||
This method can be used to pass flags as well as variables.
|
||||
@@ -118,7 +118,7 @@ sub-directory by adding the following to the package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
build_directory = "src"
|
||||
build_directory = 'src'
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@@ -81,27 +81,28 @@ class of your package. For example, you can add it to your
|
||||
class MyRocmPackage(CMakePackage, ROCmPackage):
|
||||
...
|
||||
# Ensure +rocm and amdgpu_targets are passed to dependencies
|
||||
depends_on("mydeppackage", when="+rocm")
|
||||
depends_on('mydeppackage', when='+rocm')
|
||||
for val in ROCmPackage.amdgpu_targets:
|
||||
depends_on(f"mydeppackage amdgpu_target={val}",
|
||||
when=f"amdgpu_target={val}")
|
||||
depends_on('mydeppackage amdgpu_target={0}'.format(val),
|
||||
when='amdgpu_target={0}'.format(val))
|
||||
...
|
||||
|
||||
def cmake_args(self):
|
||||
spec = self.spec
|
||||
args = []
|
||||
...
|
||||
if spec.satisfies("+rocm"):
|
||||
if '+rocm' in spec:
|
||||
# Set up the hip macros needed by the build
|
||||
args.extend([
|
||||
"-DENABLE_HIP=ON",
|
||||
f"-DHIP_ROOT_DIR={spec['hip'].prefix}"])
|
||||
rocm_archs = spec.variants["amdgpu_target"].value
|
||||
if "none" not in rocm_archs:
|
||||
args.append(f"-DHIP_HIPCC_FLAGS=--amdgpu-target={','.join(rocm_archs}")
|
||||
'-DENABLE_HIP=ON',
|
||||
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)])
|
||||
rocm_archs = spec.variants['amdgpu_target'].value
|
||||
if 'none' not in rocm_archs:
|
||||
args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
|
||||
.format(",".join(rocm_archs)))
|
||||
else:
|
||||
# Ensure build with hip is disabled
|
||||
args.append("-DENABLE_HIP=OFF")
|
||||
args.append('-DENABLE_HIP=OFF')
|
||||
...
|
||||
return args
|
||||
...
|
||||
@@ -113,7 +114,7 @@ build.
|
||||
|
||||
This example also illustrates how to check for the ``rocm`` variant using
|
||||
``self.spec`` and how to retrieve the ``amdgpu_target`` variant's value
|
||||
using ``self.spec.variants["amdgpu_target"].value``.
|
||||
using ``self.spec.variants['amdgpu_target'].value``.
|
||||
|
||||
All five packages using ``ROCmPackage`` as of January 2021 also use the
|
||||
:ref:`CudaPackage <cudapackage>`. So it is worth looking at those packages
|
||||
|
@@ -163,28 +163,28 @@ attributes that can be used to set ``homepage``, ``url``, ``list_url``, and
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
cran = "caret"
|
||||
cran = 'caret'
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
homepage = "https://cloud.r-project.org/package=caret"
|
||||
url = "https://cloud.r-project.org/src/contrib/caret_6.0-86.tar.gz"
|
||||
list_url = "https://cloud.r-project.org/src/contrib/Archive/caret"
|
||||
homepage = 'https://cloud.r-project.org/package=caret'
|
||||
url = 'https://cloud.r-project.org/src/contrib/caret_6.0-86.tar.gz'
|
||||
list_url = 'https://cloud.r-project.org/src/contrib/Archive/caret'
|
||||
|
||||
Likewise, the following ``bioc`` attribute:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
bioc = "BiocVersion"
|
||||
bioc = 'BiocVersion'
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
homepage = "https://bioconductor.org/packages/BiocVersion/"
|
||||
git = "https://git.bioconductor.org/packages/BiocVersion"
|
||||
homepage = 'https://bioconductor.org/packages/BiocVersion/'
|
||||
git = 'https://git.bioconductor.org/packages/BiocVersion'
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -200,7 +200,7 @@ base class contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends("r")
|
||||
extends('r')
|
||||
|
||||
|
||||
Take a close look at the homepage for ``caret``. If you look at the
|
||||
@@ -209,7 +209,7 @@ You should add this to your package like so:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("r@3.2.0:", type=("build", "run"))
|
||||
depends_on('r@3.2.0:', type=('build', 'run'))
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
@@ -227,7 +227,7 @@ and list all of their dependencies in the following sections:
|
||||
* LinkingTo
|
||||
|
||||
As far as Spack is concerned, all 3 of these dependency types
|
||||
correspond to ``type=("build", "run")``, so you don't have to worry
|
||||
correspond to ``type=('build', 'run')``, so you don't have to worry
|
||||
about the details. If you are curious what they mean,
|
||||
https://github.com/spack/spack/issues/2951 has a pretty good summary:
|
||||
|
||||
@@ -330,7 +330,7 @@ the dependency:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("r-lattice@0.20:", type=("build", "run"))
|
||||
depends_on('r-lattice@0.20:', type=('build', 'run'))
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
@@ -361,20 +361,20 @@ like so:
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
mpi_name = self.spec["mpi"].name
|
||||
mpi_name = self.spec['mpi'].name
|
||||
|
||||
# The type of MPI. Supported values are:
|
||||
# OPENMPI, LAM, MPICH, MPICH2, or CRAY
|
||||
if mpi_name == "openmpi":
|
||||
Rmpi_type = "OPENMPI"
|
||||
elif mpi_name == "mpich":
|
||||
Rmpi_type = "MPICH2"
|
||||
if mpi_name == 'openmpi':
|
||||
Rmpi_type = 'OPENMPI'
|
||||
elif mpi_name == 'mpich':
|
||||
Rmpi_type = 'MPICH2'
|
||||
else:
|
||||
raise InstallError("Unsupported MPI type")
|
||||
raise InstallError('Unsupported MPI type')
|
||||
|
||||
return [
|
||||
"--with-Rmpi-type={0}".format(Rmpi_type),
|
||||
"--with-mpi={0}".format(spec["mpi"].prefix),
|
||||
'--with-Rmpi-type={0}'.format(Rmpi_type),
|
||||
'--with-mpi={0}'.format(spec['mpi'].prefix),
|
||||
]
|
||||
|
||||
|
||||
|
@@ -84,8 +84,8 @@ The ``*.gemspec`` file may contain something like:
|
||||
|
||||
.. code-block:: ruby
|
||||
|
||||
summary = "An implementation of the AsciiDoc text processor and publishing toolchain"
|
||||
description = "A fast, open source text processor and publishing toolchain for converting AsciiDoc content to HTML 5, DocBook 5, and other formats."
|
||||
summary = 'An implementation of the AsciiDoc text processor and publishing toolchain'
|
||||
description = 'A fast, open source text processor and publishing toolchain for converting AsciiDoc content to HTML 5, DocBook 5, and other formats.'
|
||||
|
||||
|
||||
Either of these can be used for the description of the Spack package.
|
||||
@@ -98,7 +98,7 @@ The ``*.gemspec`` file may contain something like:
|
||||
|
||||
.. code-block:: ruby
|
||||
|
||||
homepage = "https://asciidoctor.org"
|
||||
homepage = 'https://asciidoctor.org'
|
||||
|
||||
|
||||
This should be used as the official homepage of the Spack package.
|
||||
@@ -112,21 +112,21 @@ the base class contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends("ruby")
|
||||
extends('ruby')
|
||||
|
||||
|
||||
The ``*.gemspec`` file may contain something like:
|
||||
|
||||
.. code-block:: ruby
|
||||
|
||||
required_ruby_version = ">= 2.3.0"
|
||||
required_ruby_version = '>= 2.3.0'
|
||||
|
||||
|
||||
This can be added to the Spack package using:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("ruby@2.3.0:", type=("build", "run"))
|
||||
depends_on('ruby@2.3.0:', type=('build', 'run'))
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
@@ -57,7 +57,7 @@ overridden like so:
|
||||
.. code-block:: python
|
||||
|
||||
def test(self):
|
||||
scons("check")
|
||||
scons('check')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^
|
||||
@@ -88,7 +88,7 @@ base class already contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("scons", type="build")
|
||||
depends_on('scons', type='build')
|
||||
|
||||
|
||||
If you want to specify a particular version requirement, you can override
|
||||
@@ -96,7 +96,7 @@ this in your package:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("scons@2.3.0:", type="build")
|
||||
depends_on('scons@2.3.0:', type='build')
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -238,14 +238,14 @@ the package build phase. This is done by overriding ``build_args`` like so:
|
||||
|
||||
def build_args(self, spec, prefix):
|
||||
args = [
|
||||
f"PREFIX={prefix}",
|
||||
f"ZLIB={spec['zlib'].prefix}",
|
||||
'PREFIX={0}'.format(prefix),
|
||||
'ZLIB={0}'.format(spec['zlib'].prefix),
|
||||
]
|
||||
|
||||
if spec.satisfies("+debug"):
|
||||
args.append("DEBUG=yes")
|
||||
if '+debug' in spec:
|
||||
args.append('DEBUG=yes')
|
||||
else:
|
||||
args.append("DEBUG=no")
|
||||
args.append('DEBUG=no')
|
||||
|
||||
return args
|
||||
|
||||
@@ -275,8 +275,8 @@ environment variables. For example, cantera has the following option:
|
||||
* env_vars: [ string ]
|
||||
Environment variables to propagate through to SCons. Either the
|
||||
string "all" or a comma separated list of variable names, e.g.
|
||||
"LD_LIBRARY_PATH,HOME".
|
||||
- default: "LD_LIBRARY_PATH,PYTHONPATH"
|
||||
'LD_LIBRARY_PATH,HOME'.
|
||||
- default: 'LD_LIBRARY_PATH,PYTHONPATH'
|
||||
|
||||
|
||||
In the case of cantera, using ``env_vars=all`` allows us to use
|
||||
|
@@ -32,7 +32,7 @@ By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ sip-build --verbose --target-dir ...
|
||||
$ python configure.py --bindir ... --destdir ...
|
||||
$ make
|
||||
$ make install
|
||||
|
||||
@@ -41,30 +41,30 @@ By default, these phases run:
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Each SIP package comes with a custom configuration file written in Python.
|
||||
For newer packages, this is called ``project.py``, while in older packages,
|
||||
it may be called ``configure.py``. This script contains instructions to build
|
||||
the project.
|
||||
Each SIP package comes with a custom ``configure.py`` build script,
|
||||
written in Python. This script contains instructions to build the project.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
``SIPPackage`` requires several dependencies. Python and SIP are needed at build-time
|
||||
to run the aforementioned configure script. Python is also needed at run-time to
|
||||
actually use the installed Python library. And as we are building Python bindings
|
||||
for C/C++ libraries, Python is also needed as a link dependency. All of these
|
||||
dependencies are automatically added via the base class.
|
||||
``SIPPackage`` requires several dependencies. Python is needed to run
|
||||
the ``configure.py`` build script, and to run the resulting Python
|
||||
libraries. Qt is needed to provide the ``qmake`` command. SIP is also
|
||||
needed to build the package. All of these dependencies are automatically
|
||||
added via the base class
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends("python", type=("build", "link", "run"))
|
||||
depends_on("py-sip", type="build")
|
||||
extends('python')
|
||||
|
||||
depends_on('qt', type='build')
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to ``sip-build``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
depends_on('py-sip', type='build')
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to ``configure.py``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Each phase comes with a ``<phase_args>`` function that can be used to pass
|
||||
arguments to that particular phase. For example, if you need to pass
|
||||
@@ -72,11 +72,11 @@ arguments to the configure phase, you can use:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
return ["--no-python-dbus"]
|
||||
def configure_args(self, spec, prefix):
|
||||
return ['--no-python-dbus']
|
||||
|
||||
|
||||
A list of valid options can be found by running ``sip-build --help``.
|
||||
A list of valid options can be found by running ``python configure.py --help``.
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
@@ -124,7 +124,7 @@ are wrong, you can provide the names yourself by overriding
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
import_modules = ["PyQt5"]
|
||||
import_modules = ['PyQt5']
|
||||
|
||||
|
||||
These tests often catch missing dependencies and non-RPATHed
|
||||
|
@@ -63,8 +63,8 @@ run package-specific unit tests.
|
||||
.. code-block:: python
|
||||
|
||||
def installtest(self):
|
||||
with working_dir("test"):
|
||||
pytest = which("py.test")
|
||||
with working_dir('test'):
|
||||
pytest = which('py.test')
|
||||
pytest()
|
||||
|
||||
|
||||
@@ -93,7 +93,7 @@ the following dependency automatically:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@2.5:", type="build")
|
||||
depends_on('python@2.5:', type='build')
|
||||
|
||||
|
||||
Waf only supports Python 2.5 and up.
|
||||
@@ -113,7 +113,7 @@ phase, you can use:
|
||||
args = []
|
||||
|
||||
if self.run_tests:
|
||||
args.append("--test")
|
||||
args.append('--test')
|
||||
|
||||
return args
|
||||
|
||||
|
@@ -48,6 +48,9 @@
|
||||
os.environ["COLIFY_SIZE"] = "25x120"
|
||||
os.environ["COLUMNS"] = "120"
|
||||
|
||||
# Generate full package list if needed
|
||||
subprocess.call(["spack", "list", "--format=html", "--update=package_list.html"])
|
||||
|
||||
# Generate a command index if an update is needed
|
||||
subprocess.call(
|
||||
[
|
||||
@@ -94,7 +97,9 @@ class PatchedPythonDomain(PythonDomain):
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
if "refspecific" in node:
|
||||
del node["refspecific"]
|
||||
return super().resolve_xref(env, fromdocname, builder, typ, target, node, contnode)
|
||||
return super(PatchedPythonDomain, self).resolve_xref(
|
||||
env, fromdocname, builder, typ, target, node, contnode
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
@@ -144,6 +149,7 @@ def setup(sphinx):
|
||||
# Get nice vector graphics
|
||||
graphviz_output_format = "svg"
|
||||
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["_templates"]
|
||||
|
||||
@@ -204,7 +210,6 @@ def setup(sphinx):
|
||||
("py:class", "clingo.Control"),
|
||||
("py:class", "six.moves.urllib.parse.ParseResult"),
|
||||
("py:class", "TextIO"),
|
||||
("py:class", "hashlib._Hash"),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
@@ -212,8 +217,6 @@ def setup(sphinx):
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
("py:class", "spack.spec.InstallStatus"),
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
]
|
||||
|
||||
@@ -230,8 +233,30 @@ def setup(sphinx):
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
# show_authors = False
|
||||
sys.path.append("./_pygments")
|
||||
pygments_style = "style.SpackStyle"
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
# We use our own extension of the default style with a few modifications
|
||||
from pygments.style import Style
|
||||
from pygments.styles.default import DefaultStyle
|
||||
from pygments.token import Comment, Generic, Text
|
||||
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
background_color = "#f4f4f8"
|
||||
styles[Generic.Output] = "#355"
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
||||
|
||||
|
||||
import pkg_resources
|
||||
|
||||
dist = pkg_resources.Distribution(__file__)
|
||||
sys.path.append(".") # make 'conf' module findable
|
||||
ep = pkg_resources.EntryPoint.parse("spack = conf:SpackStyle", dist=dist)
|
||||
dist._ep_map = {"pygments.styles": {"plugin1": ep}}
|
||||
pkg_resources.working_set.add(dist)
|
||||
|
||||
pygments_style = "spack"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
@@ -316,15 +341,16 @@ def setup(sphinx):
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = "Spackdoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
# 'papersize': 'letterpaper',
|
||||
#'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
# 'pointsize': '10pt',
|
||||
#'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
# 'preamble': '',
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
|
@@ -292,29 +292,14 @@ It is also worth noting that:
|
||||
non_bindable_shared_objects = ["libinterface.so"]
|
||||
|
||||
----------------------
|
||||
``install_status``
|
||||
``terminal_title``
|
||||
----------------------
|
||||
|
||||
When set to ``true``, Spack will show information about its current progress
|
||||
as well as the current and total package numbers. Progress is shown both
|
||||
in the terminal title and inline. Setting it to ``false`` will not show any
|
||||
progress information.
|
||||
By setting this option to ``true``, Spack will update the terminal's title to
|
||||
provide information about its current progress as well as the current and
|
||||
total package numbers.
|
||||
|
||||
To work properly, this requires your terminal to reset its title after
|
||||
Spack has finished its work, otherwise Spack's status information will
|
||||
remain in the terminal's title indefinitely. Most terminals should already
|
||||
be set up this way and clear Spack's status information.
|
||||
|
||||
-----------
|
||||
``aliases``
|
||||
-----------
|
||||
|
||||
Aliases can be used to define new Spack commands. They can be either shortcuts
|
||||
for longer commands or include specific arguments for convenience. For instance,
|
||||
if users want to use ``spack install``'s ``-v`` argument all the time, they can
|
||||
create a new alias called ``inst`` that will always call ``install -v``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
aliases:
|
||||
inst: install -v
|
||||
|
@@ -17,7 +17,7 @@ case you want to skip directly to specific docs:
|
||||
* :ref:`config.yaml <config-yaml>`
|
||||
* :ref:`mirrors.yaml <mirrors>`
|
||||
* :ref:`modules.yaml <modules>`
|
||||
* :ref:`packages.yaml <packages-config>`
|
||||
* :ref:`packages.yaml <build-settings>`
|
||||
* :ref:`repos.yaml <repositories>`
|
||||
|
||||
You can also add any of these as inline configuration in the YAML
|
||||
@@ -243,11 +243,9 @@ lower-precedence settings. Completely ignoring higher-level configuration
|
||||
options is supported with the ``::`` notation for keys (see
|
||||
:ref:`config-overrides` below).
|
||||
|
||||
There are also special notations for string concatenation and precendense override:
|
||||
|
||||
* ``+:`` will force *prepending* strings or lists. For lists, this is the default behavior.
|
||||
* ``-:`` works similarly, but for *appending* values.
|
||||
|
||||
There are also special notations for string concatenation and precendense override.
|
||||
Using the ``+:`` notation can be used to force *prepending* strings or lists. For lists, this is identical
|
||||
to the default behavior. Using the ``-:`` works similarly, but for *appending* values.
|
||||
:ref:`config-prepend-append`
|
||||
|
||||
^^^^^^^^^^^
|
||||
|
@@ -24,16 +24,6 @@ image, or to set up a proper entrypoint to run the image. These tasks are
|
||||
usually both necessary and repetitive, so Spack comes with a command
|
||||
to generate recipes for container images starting from a ``spack.yaml``.
|
||||
|
||||
.. seealso::
|
||||
|
||||
This page is a reference for generating recipes to build container images.
|
||||
It means that your environment is built from scratch inside the container
|
||||
runtime.
|
||||
|
||||
Since v0.21, Spack can also create container images from existing package installations
|
||||
on your host system. See :ref:`binary_caches_oci` for more information on
|
||||
that topic.
|
||||
|
||||
--------------------
|
||||
A Quick Introduction
|
||||
--------------------
|
||||
@@ -222,12 +212,18 @@ under the ``container`` attribute of environments:
|
||||
final:
|
||||
- libgomp
|
||||
|
||||
# Extra instructions
|
||||
extra_instructions:
|
||||
final: |
|
||||
RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ "' >> ~/.bashrc
|
||||
|
||||
# Labels for the image
|
||||
labels:
|
||||
app: "gromacs"
|
||||
mpi: "mpich"
|
||||
|
||||
A detailed description of the options available can be found in the :ref:`container_config_options` section.
|
||||
A detailed description of the options available can be found in the
|
||||
:ref:`container_config_options` section.
|
||||
|
||||
-------------------
|
||||
Setting Base Images
|
||||
@@ -529,13 +525,6 @@ the example below:
|
||||
COPY data /share/myapp/data
|
||||
{% endblock %}
|
||||
|
||||
The Dockerfile is generated by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e /opt/environment containerize
|
||||
|
||||
Note that the environment must be active for spack to read the template.
|
||||
The recipe that gets generated contains the two extra instruction that we added in our template extension:
|
||||
|
||||
.. code-block:: Dockerfile
|
||||
@@ -647,7 +636,7 @@ to customize the generation of container recipes:
|
||||
- No
|
||||
* - ``os_packages:command``
|
||||
- Tool used to manage system packages
|
||||
- ``apt``, ``yum``, ``dnf``, ``dnf_epel``, ``zypper``, ``apk``, ``yum_amazon``
|
||||
- ``apt``, ``yum``, ``zypper``, ``apk``, ``yum_amazon``
|
||||
- Only with custom base images
|
||||
* - ``os_packages:update``
|
||||
- Whether or not to update the list of available packages
|
||||
|
@@ -310,11 +310,53 @@ Once all of the dependencies are installed, you can try building the documentati
|
||||
$ make clean
|
||||
$ make
|
||||
|
||||
If you see any warning or error messages, you will have to correct those before your PR
|
||||
is accepted. If you are editing the documentation, you should be running the
|
||||
documentation tests to make sure there are no errors. Documentation changes can result
|
||||
in some obfuscated warning messages. If you don't understand what they mean, feel free
|
||||
to ask when you submit your PR.
|
||||
If you see any warning or error messages, you will have to correct those before
|
||||
your PR is accepted.
|
||||
|
||||
If you are editing the documentation, you should obviously be running the
|
||||
documentation tests. But even if you are simply adding a new package, your
|
||||
changes could cause the documentation tests to fail:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
package_list.rst:8745: WARNING: Block quote ends without a blank line; unexpected unindent.
|
||||
|
||||
At first, this error message will mean nothing to you, since you didn't edit
|
||||
that file. Until you look at line 8745 of the file in question:
|
||||
|
||||
.. code-block:: rst
|
||||
|
||||
Description:
|
||||
NetCDF is a set of software libraries and self-describing, machine-
|
||||
independent data formats that support the creation, access, and sharing
|
||||
of array-oriented scientific data.
|
||||
|
||||
Our documentation includes :ref:`a list of all Spack packages <package-list>`.
|
||||
If you add a new package, its docstring is added to this page. The problem in
|
||||
this case was that the docstring looked like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Netcdf(Package):
|
||||
"""
|
||||
NetCDF is a set of software libraries and self-describing,
|
||||
machine-independent data formats that support the creation,
|
||||
access, and sharing of array-oriented scientific data.
|
||||
"""
|
||||
|
||||
Docstrings cannot start with a newline character, or else Sphinx will complain.
|
||||
Instead, they should look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Netcdf(Package):
|
||||
"""NetCDF is a set of software libraries and self-describing,
|
||||
machine-independent data formats that support the creation,
|
||||
access, and sharing of array-oriented scientific data."""
|
||||
|
||||
Documentation changes can result in much more obfuscated warning messages.
|
||||
If you don't understand what they mean, feel free to ask when you submit
|
||||
your PR.
|
||||
|
||||
--------
|
||||
Coverage
|
||||
|
@@ -916,9 +916,9 @@ function, as shown in the example below:
|
||||
.. code-block:: yaml
|
||||
|
||||
projections:
|
||||
zlib: "{name}-{version}"
|
||||
^mpi: "{name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}"
|
||||
all: "{name}-{version}/{compiler.name}-{compiler.version}"
|
||||
zlib: {name}-{version}
|
||||
^mpi: {name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}
|
||||
all: {name}-{version}/{compiler.name}-{compiler.version}
|
||||
|
||||
The entries in the projections configuration file must all be either
|
||||
specs or the keyword ``all``. For each spec, the projection used will
|
||||
@@ -1132,11 +1132,11 @@ index once every package is pushed. Note how this target uses the generated
|
||||
example/push/%: example/install/%
|
||||
@mkdir -p $(dir $@)
|
||||
$(info About to push $(SPEC) to a buildcache)
|
||||
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
|
||||
@touch $@
|
||||
|
||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||
$(info Updating the buildcache index)
|
||||
$(SPACK) -e . buildcache update-index $(BUILDCACHE_DIR)
|
||||
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
|
||||
$(info Done!)
|
||||
@touch $@
|
||||
|
@@ -1,77 +0,0 @@
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
==========================
|
||||
Frequently Asked Questions
|
||||
==========================
|
||||
|
||||
This page contains answers to frequently asked questions about Spack.
|
||||
If you have questions that are not answered here, feel free to ask on
|
||||
`Slack <https://slack.spack.io>`_ or `GitHub Discussions
|
||||
<https://github.com/spack/spack/discussions>`_. If you've learned the
|
||||
answer to a question that you think should be here, please consider
|
||||
contributing to this page.
|
||||
|
||||
.. _faq-concretizer-precedence:
|
||||
|
||||
-----------------------------------------------------
|
||||
Why does Spack pick particular versions and variants?
|
||||
-----------------------------------------------------
|
||||
|
||||
This question comes up in a variety of forms:
|
||||
|
||||
1. Why does Spack seem to ignore my package preferences from ``packages.yaml`` config?
|
||||
2. Why does Spack toggle a variant instead of using the default from the ``package.py`` file?
|
||||
|
||||
The short answer is that Spack always picks an optimal configuration
|
||||
based on a complex set of criteria\ [#f1]_. These criteria are more nuanced
|
||||
than always choosing the latest versions or default variants.
|
||||
|
||||
.. note::
|
||||
|
||||
As a rule of thumb: requirements + constraints > reuse > preferences > defaults.
|
||||
|
||||
The following set of criteria (from lowest to highest precedence) explain
|
||||
common cases where concretization output may seem surprising at first.
|
||||
|
||||
1. :ref:`Package preferences <package-preferences>` configured in ``packages.yaml``
|
||||
override variant defaults from ``package.py`` files, and influence the optimal
|
||||
ordering of versions. Preferences are specified as follows:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
foo:
|
||||
version: [1.0, 1.1]
|
||||
variants: ~mpi
|
||||
|
||||
2. :ref:`Reuse concretization <concretizer-options>` configured in ``concretizer.yaml``
|
||||
overrides preferences, since it's typically faster to reuse an existing spec than to
|
||||
build a preferred one from sources. When build caches are enabled, specs may be reused
|
||||
from a remote location too. Reuse concretization is configured as follows:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
reuse: dependencies # other options are 'true' and 'false'
|
||||
|
||||
3. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
|
||||
and constraints from the command line as well as ``package.py`` files override all
|
||||
of the above. Requirements are specified as follows:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
foo:
|
||||
require:
|
||||
- "@1.2: +mpi"
|
||||
|
||||
Requirements and constraints restrict the set of possible solutions, while reuse
|
||||
behavior and preferences influence what an optimal solution looks like.
|
||||
|
||||
|
||||
.. rubric:: Footnotes
|
||||
|
||||
.. [#f1] The exact list of criteria can be retrieved with the ``spack solve`` command
|
@@ -1,113 +0,0 @@
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
==========================
|
||||
Using External GPU Support
|
||||
==========================
|
||||
|
||||
Many packages come with a ``+cuda`` or ``+rocm`` variant. With no added
|
||||
configuration Spack will download and install the needed components.
|
||||
It may be preferable to use existing system support: the following sections
|
||||
help with using a system installation of GPU libraries.
|
||||
|
||||
-----------------------------------
|
||||
Using an External ROCm Installation
|
||||
-----------------------------------
|
||||
|
||||
Spack breaks down ROCm into many separate component packages. The following
|
||||
is an example ``packages.yaml`` that organizes a consistent set of ROCm
|
||||
components for use by dependent packages:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [rocmcc@=5.3.0]
|
||||
variants: amdgpu_target=gfx90a
|
||||
hip:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hip@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/hip
|
||||
hsa-rocr-dev:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hsa-rocr-dev@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
llvm-amdgpu:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: llvm-amdgpu@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/llvm/
|
||||
comgr:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: comgr@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
hipsparse:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipsparse@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
hipblas:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipblas@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
rocblas:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocblas@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
rocprim:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocprim@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/rocprim/
|
||||
|
||||
This is in combination with the following compiler definition:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: rocmcc@=5.3.0
|
||||
paths:
|
||||
cc: /opt/rocm-5.3.0/bin/amdclang
|
||||
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
||||
f77: null
|
||||
fc: /opt/rocm-5.3.0/bin/amdflang
|
||||
operating_system: rhel8
|
||||
target: x86_64
|
||||
|
||||
This includes the following considerations:
|
||||
|
||||
- Each of the listed externals specifies ``buildable: false`` to force Spack
|
||||
to use only the externals we defined.
|
||||
- ``spack external find`` can automatically locate some of the ``hip``/``rocm``
|
||||
packages, but not all of them, and furthermore not in a manner that
|
||||
guarantees a complementary set if multiple ROCm installations are available.
|
||||
- The ``prefix`` is the same for several components, but note that others
|
||||
require listing one of the subdirectories as a prefix.
|
||||
|
||||
-----------------------------------
|
||||
Using an External CUDA Installation
|
||||
-----------------------------------
|
||||
|
||||
CUDA is split into fewer components and is simpler to specify:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
variants:
|
||||
- cuda_arch=70
|
||||
cuda:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: cuda@11.0.2
|
||||
prefix: /opt/cuda/cuda-11.0.2/
|
||||
|
||||
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.
|
Binary file not shown.
Before Width: | Height: | Size: 296 KiB |
File diff suppressed because it is too large
Load Diff
Before Width: | Height: | Size: 108 KiB |
@@ -1,534 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><!-- Generated by graphviz version 2.40.1 (20161225.0304)
|
||||
--><!-- Title: G Pages: 1 --><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="3044pt" height="1683pt" viewBox="0.00 0.00 3043.65 1682.80">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 1678.8)">
|
||||
<title>G</title>
|
||||
<polygon fill="#ffffff" stroke="transparent" points="-4,4 -4,-1678.8 3039.6456,-1678.8 3039.6456,4 -4,4"/>
|
||||
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn -->
|
||||
<g id="node1" class="node">
|
||||
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2407.965,-1198.3002C2407.965,-1198.3002 1948.1742,-1198.3002 1948.1742,-1198.3002 1942.1742,-1198.3002 1936.1742,-1192.3002 1936.1742,-1186.3002 1936.1742,-1186.3002 1936.1742,-1123.6998 1936.1742,-1123.6998 1936.1742,-1117.6998 1942.1742,-1111.6998 1948.1742,-1111.6998 1948.1742,-1111.6998 2407.965,-1111.6998 2407.965,-1111.6998 2413.965,-1111.6998 2419.965,-1117.6998 2419.965,-1123.6998 2419.965,-1123.6998 2419.965,-1186.3002 2419.965,-1186.3002 2419.965,-1192.3002 2413.965,-1198.3002 2407.965,-1198.3002"/>
|
||||
<text text-anchor="middle" x="2178.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">netlib-scalapack@2.2.0%gcc@9.4.0/hkcrbrt</text>
|
||||
</g>
|
||||
<!-- o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="node8" class="node">
|
||||
<title>o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M901.2032,-1039.5002C901.2032,-1039.5002 486.936,-1039.5002 486.936,-1039.5002 480.936,-1039.5002 474.936,-1033.5002 474.936,-1027.5002 474.936,-1027.5002 474.936,-964.8998 474.936,-964.8998 474.936,-958.8998 480.936,-952.8998 486.936,-952.8998 486.936,-952.8998 901.2032,-952.8998 901.2032,-952.8998 907.2032,-952.8998 913.2032,-958.8998 913.2032,-964.8998 913.2032,-964.8998 913.2032,-1027.5002 913.2032,-1027.5002 913.2032,-1033.5002 907.2032,-1039.5002 901.2032,-1039.5002"/>
|
||||
<text text-anchor="middle" x="694.0696" y="-989" font-family="Monaco" font-size="24.00" fill="#000000">openblas@0.3.21%gcc@9.4.0/o524geb</text>
|
||||
</g>
|
||||
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge10" class="edge">
|
||||
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1936.1981,-1113.832C1933.0949,-1113.4088 1930.0059,-1112.9948 1926.9392,-1112.5915 1575.405,-1066.3348 1485.3504,-1074.0879 1131.9752,-1040.5955 1064.2267,-1034.1713 990.6114,-1026.9648 923.4066,-1020.2975"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1936.4684,-1111.8504C1933.3606,-1111.4265 1930.2716,-1111.0125 1927.2,-1110.6085 1575.2335,-1064.3422 1485.1789,-1072.0953 1132.164,-1038.6045 1064.4216,-1032.1808 990.8062,-1024.9744 923.604,-1018.3073"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="923.505,-1015.7853 913.2081,-1018.2801 922.8133,-1022.751 923.505,-1015.7853"/>
|
||||
<text text-anchor="middle" x="1368.79" y="-1067.6346" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
|
||||
</g>
|
||||
<!-- 2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="node23" class="node">
|
||||
<title>2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2767.3081,-1039.5002C2767.3081,-1039.5002 2166.8311,-1039.5002 2166.8311,-1039.5002 2160.8311,-1039.5002 2154.8311,-1033.5002 2154.8311,-1027.5002 2154.8311,-1027.5002 2154.8311,-964.8998 2154.8311,-964.8998 2154.8311,-958.8998 2160.8311,-952.8998 2166.8311,-952.8998 2166.8311,-952.8998 2767.3081,-952.8998 2767.3081,-952.8998 2773.3081,-952.8998 2779.3081,-958.8998 2779.3081,-964.8998 2779.3081,-964.8998 2779.3081,-1027.5002 2779.3081,-1027.5002 2779.3081,-1033.5002 2773.3081,-1039.5002 2767.3081,-1039.5002"/>
|
||||
<text text-anchor="middle" x="2467.0696" y="-989" font-family="Monaco" font-size="24.00" fill="#000000">intel-parallel-studio@cluster.2020.4%gcc@9.4.0/2w3nq3n</text>
|
||||
</g>
|
||||
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge29" class="edge">
|
||||
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2256.5586,-1110.7308C2294.3103,-1089.9869 2339.6329,-1065.083 2378.4976,-1043.7276"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2257.5217,-1112.4836C2295.2735,-1091.7397 2340.5961,-1066.8358 2379.4607,-1045.4804"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2381.116,-1047.4235 2388.1946,-1039.5403 2377.745,-1041.2886 2381.116,-1047.4235"/>
|
||||
<text text-anchor="middle" x="2286.6606" y="-1079.8414" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="node27" class="node">
|
||||
<title>gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1539.1928,-1039.5002C1539.1928,-1039.5002 1152.9464,-1039.5002 1152.9464,-1039.5002 1146.9464,-1039.5002 1140.9464,-1033.5002 1140.9464,-1027.5002 1140.9464,-1027.5002 1140.9464,-964.8998 1140.9464,-964.8998 1140.9464,-958.8998 1146.9464,-952.8998 1152.9464,-952.8998 1152.9464,-952.8998 1539.1928,-952.8998 1539.1928,-952.8998 1545.1928,-952.8998 1551.1928,-958.8998 1551.1928,-964.8998 1551.1928,-964.8998 1551.1928,-1027.5002 1551.1928,-1027.5002 1551.1928,-1033.5002 1545.1928,-1039.5002 1539.1928,-1039.5002"/>
|
||||
<text text-anchor="middle" x="1346.0696" y="-989" font-family="Monaco" font-size="24.00" fill="#000000">cmake@3.25.1%gcc@9.4.0/gguve5i</text>
|
||||
</g>
|
||||
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge17" class="edge">
|
||||
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1950.9968,-1111.6597C1829.5529,-1088.4802 1680.8338,-1060.0949 1561.2457,-1037.2697"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.7091,-1033.795 1551.2303,-1035.3581 1560.3967,-1040.6709 1561.7091,-1033.795"/>
|
||||
</g>
|
||||
<!-- i4avrindvhcamhurzbfdaggbj2zgsrrh -->
|
||||
<g id="node2" class="node">
|
||||
<title>i4avrindvhcamhurzbfdaggbj2zgsrrh</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1536.3649,-86.7002C1536.3649,-86.7002 1155.7743,-86.7002 1155.7743,-86.7002 1149.7743,-86.7002 1143.7743,-80.7002 1143.7743,-74.7002 1143.7743,-74.7002 1143.7743,-12.0998 1143.7743,-12.0998 1143.7743,-6.0998 1149.7743,-.0998 1155.7743,-.0998 1155.7743,-.0998 1536.3649,-.0998 1536.3649,-.0998 1542.3649,-.0998 1548.3649,-6.0998 1548.3649,-12.0998 1548.3649,-12.0998 1548.3649,-74.7002 1548.3649,-74.7002 1548.3649,-80.7002 1542.3649,-86.7002 1536.3649,-86.7002"/>
|
||||
<text text-anchor="middle" x="1346.0696" y="-36.2" font-family="Monaco" font-size="24.00" fill="#000000">pkgconf@1.8.0%gcc@9.4.0/i4avrin</text>
|
||||
</g>
|
||||
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl -->
|
||||
<g id="node3" class="node">
|
||||
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M849.3673,-721.9002C849.3673,-721.9002 480.7719,-721.9002 480.7719,-721.9002 474.7719,-721.9002 468.7719,-715.9002 468.7719,-709.9002 468.7719,-709.9002 468.7719,-647.2998 468.7719,-647.2998 468.7719,-641.2998 474.7719,-635.2998 480.7719,-635.2998 480.7719,-635.2998 849.3673,-635.2998 849.3673,-635.2998 855.3673,-635.2998 861.3673,-641.2998 861.3673,-647.2998 861.3673,-647.2998 861.3673,-709.9002 861.3673,-709.9002 861.3673,-715.9002 855.3673,-721.9002 849.3673,-721.9002"/>
|
||||
<text text-anchor="middle" x="665.0696" y="-671.4" font-family="Monaco" font-size="24.00" fill="#000000">perl@5.36.0%gcc@9.4.0/ywrpvv2</text>
|
||||
</g>
|
||||
<!-- h3ujmb3ts4kxxxv77knh2knuystuerbx -->
|
||||
<g id="node7" class="node">
|
||||
<title>h3ujmb3ts4kxxxv77knh2knuystuerbx</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M392.4016,-563.1002C392.4016,-563.1002 19.7376,-563.1002 19.7376,-563.1002 13.7376,-563.1002 7.7376,-557.1002 7.7376,-551.1002 7.7376,-551.1002 7.7376,-488.4998 7.7376,-488.4998 7.7376,-482.4998 13.7376,-476.4998 19.7376,-476.4998 19.7376,-476.4998 392.4016,-476.4998 392.4016,-476.4998 398.4016,-476.4998 404.4016,-482.4998 404.4016,-488.4998 404.4016,-488.4998 404.4016,-551.1002 404.4016,-551.1002 404.4016,-557.1002 398.4016,-563.1002 392.4016,-563.1002"/>
|
||||
<text text-anchor="middle" x="206.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">bzip2@1.0.8%gcc@9.4.0/h3ujmb3</text>
|
||||
</g>
|
||||
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl->h3ujmb3ts4kxxxv77knh2knuystuerbx -->
|
||||
<g id="edge9" class="edge">
|
||||
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl->h3ujmb3ts4kxxxv77knh2knuystuerbx</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M539.3189,-636.1522C477.7157,-614.8394 403.4197,-589.1353 340.5959,-567.4002"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M539.9728,-634.2622C478.3696,-612.9494 404.0736,-587.2452 341.2498,-565.5101"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="341.9365,-563.1023 331.3417,-563.1403 339.6478,-569.7176 341.9365,-563.1023"/>
|
||||
</g>
|
||||
<!-- uabgssx6lsgrevwbttslldnr5nzguprj -->
|
||||
<g id="node19" class="node">
|
||||
<title>uabgssx6lsgrevwbttslldnr5nzguprj</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1298.2296,-563.1002C1298.2296,-563.1002 937.9096,-563.1002 937.9096,-563.1002 931.9096,-563.1002 925.9096,-557.1002 925.9096,-551.1002 925.9096,-551.1002 925.9096,-488.4998 925.9096,-488.4998 925.9096,-482.4998 931.9096,-476.4998 937.9096,-476.4998 937.9096,-476.4998 1298.2296,-476.4998 1298.2296,-476.4998 1304.2296,-476.4998 1310.2296,-482.4998 1310.2296,-488.4998 1310.2296,-488.4998 1310.2296,-551.1002 1310.2296,-551.1002 1310.2296,-557.1002 1304.2296,-563.1002 1298.2296,-563.1002"/>
|
||||
<text text-anchor="middle" x="1118.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">gdbm@1.23%gcc@9.4.0/uabgssx</text>
|
||||
</g>
|
||||
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl->uabgssx6lsgrevwbttslldnr5nzguprj -->
|
||||
<g id="edge44" class="edge">
|
||||
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl->uabgssx6lsgrevwbttslldnr5nzguprj</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M788.523,-634.2635C849.3209,-612.9507 922.6457,-587.2465 984.6483,-565.5114"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M789.1847,-636.1509C849.9825,-614.8381 923.3073,-589.1339 985.3099,-567.3988"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="986.1559,-569.7515 994.435,-563.1403 983.8402,-563.1456 986.1559,-569.7515"/>
|
||||
</g>
|
||||
<!-- gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb -->
|
||||
<g id="node20" class="node">
|
||||
<title>gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M896.1744,-563.1002C896.1744,-563.1002 433.9648,-563.1002 433.9648,-563.1002 427.9648,-563.1002 421.9648,-557.1002 421.9648,-551.1002 421.9648,-551.1002 421.9648,-488.4998 421.9648,-488.4998 421.9648,-482.4998 427.9648,-476.4998 433.9648,-476.4998 433.9648,-476.4998 896.1744,-476.4998 896.1744,-476.4998 902.1744,-476.4998 908.1744,-482.4998 908.1744,-488.4998 908.1744,-488.4998 908.1744,-551.1002 908.1744,-551.1002 908.1744,-557.1002 902.1744,-563.1002 896.1744,-563.1002"/>
|
||||
<text text-anchor="middle" x="665.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">berkeley-db@18.1.40%gcc@9.4.0/gkw4dg2</text>
|
||||
</g>
|
||||
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl->gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb -->
|
||||
<g id="edge23" class="edge">
|
||||
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl->gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M664.0696,-635.2072C664.0696,-616.1263 664.0696,-593.5257 664.0696,-573.4046"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M666.0696,-635.2072C666.0696,-616.1263 666.0696,-593.5257 666.0696,-573.4046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="668.5697,-573.1403 665.0696,-563.1403 661.5697,-573.1404 668.5697,-573.1403"/>
|
||||
</g>
|
||||
<!-- nizxi5u5bbrzhzwfy2qb7hatlhuswlrz -->
|
||||
<g id="node24" class="node">
|
||||
<title>nizxi5u5bbrzhzwfy2qb7hatlhuswlrz</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2195.2248,-563.1002C2195.2248,-563.1002 1840.9144,-563.1002 1840.9144,-563.1002 1834.9144,-563.1002 1828.9144,-557.1002 1828.9144,-551.1002 1828.9144,-551.1002 1828.9144,-488.4998 1828.9144,-488.4998 1828.9144,-482.4998 1834.9144,-476.4998 1840.9144,-476.4998 1840.9144,-476.4998 2195.2248,-476.4998 2195.2248,-476.4998 2201.2248,-476.4998 2207.2248,-482.4998 2207.2248,-488.4998 2207.2248,-488.4998 2207.2248,-551.1002 2207.2248,-551.1002 2207.2248,-557.1002 2201.2248,-563.1002 2195.2248,-563.1002"/>
|
||||
<text text-anchor="middle" x="2018.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">zlib@1.2.13%gcc@9.4.0/nizxi5u</text>
|
||||
</g>
|
||||
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl->nizxi5u5bbrzhzwfy2qb7hatlhuswlrz -->
|
||||
<g id="edge4" class="edge">
|
||||
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl->nizxi5u5bbrzhzwfy2qb7hatlhuswlrz</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M861.3292,-654.5584C1116.9929,-624.5514 1561.4447,-572.3867 1818.5758,-542.2075"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M861.5624,-656.5447C1117.2261,-626.5378 1561.6778,-574.373 1818.8089,-544.1939"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1819.373,-546.6449 1828.8968,-542.003 1818.5569,-539.6926 1819.373,-546.6449"/>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id -->
|
||||
<g id="node4" class="node">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2383.212,-1674.7002C2383.212,-1674.7002 1972.9272,-1674.7002 1972.9272,-1674.7002 1966.9272,-1674.7002 1960.9272,-1668.7002 1960.9272,-1662.7002 1960.9272,-1662.7002 1960.9272,-1600.0998 1960.9272,-1600.0998 1960.9272,-1594.0998 1966.9272,-1588.0998 1972.9272,-1588.0998 1972.9272,-1588.0998 2383.212,-1588.0998 2383.212,-1588.0998 2389.212,-1588.0998 2395.212,-1594.0998 2395.212,-1600.0998 2395.212,-1600.0998 2395.212,-1662.7002 2395.212,-1662.7002 2395.212,-1668.7002 2389.212,-1674.7002 2383.212,-1674.7002"/>
|
||||
<text text-anchor="middle" x="2178.0696" y="-1624.2" font-family="Monaco" font-size="24.00" fill="#000000">strumpack@7.0.1%gcc@9.4.0/idvshq5</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->hkcrbrtf2qex6rvzuok5tzdrbam55pdn -->
|
||||
<g id="edge33" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->hkcrbrtf2qex6rvzuok5tzdrbam55pdn</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2177.0696,-1587.8598C2177.0696,-1500.5185 2177.0696,-1304.1624 2177.0696,-1208.8885"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2179.0696,-1587.8598C2179.0696,-1500.5185 2179.0696,-1304.1624 2179.0696,-1208.8885"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2181.5697,-1208.611 2178.0696,-1198.611 2174.5697,-1208.611 2181.5697,-1208.611"/>
|
||||
<text text-anchor="middle" x="2125.9224" y="-1397.5399" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=scalapack</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge8" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1960.6199,-1629.1097C1600.5855,-1621.4505 897.1143,-1596.5054 662.748,-1516.9469 459.8544,-1447.9506 281.1117,-1289.236 401.2427,-1111.0377 418.213,-1086.3492 472.759,-1062.01 530.3793,-1041.9698"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1960.6625,-1627.1101C1600.6564,-1619.4517 897.1852,-1594.5067 663.3912,-1515.0531 461.1823,-1446.4551 282.4397,-1287.7405 402.8965,-1112.1623 419.028,-1088.1757 473.574,-1063.8364 531.0362,-1043.8589"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="532.0142,-1046.1665 540.3395,-1039.6137 529.7449,-1039.5445 532.0142,-1046.1665"/>
|
||||
<text text-anchor="middle" x="1175.5163" y="-1600.8866" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
|
||||
</g>
|
||||
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh -->
|
||||
<g id="node12" class="node">
|
||||
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M3003.3872,-1357.1002C3003.3872,-1357.1002 2606.752,-1357.1002 2606.752,-1357.1002 2600.752,-1357.1002 2594.752,-1351.1002 2594.752,-1345.1002 2594.752,-1345.1002 2594.752,-1282.4998 2594.752,-1282.4998 2594.752,-1276.4998 2600.752,-1270.4998 2606.752,-1270.4998 2606.752,-1270.4998 3003.3872,-1270.4998 3003.3872,-1270.4998 3009.3872,-1270.4998 3015.3872,-1276.4998 3015.3872,-1282.4998 3015.3872,-1282.4998 3015.3872,-1345.1002 3015.3872,-1345.1002 3015.3872,-1351.1002 3009.3872,-1357.1002 3003.3872,-1357.1002"/>
|
||||
<text text-anchor="middle" x="2805.0696" y="-1306.6" font-family="Monaco" font-size="24.00" fill="#000000">parmetis@4.0.3%gcc@9.4.0/imopnxj</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->imopnxjmv7cwzyiecdw2saq42qvpnauh -->
|
||||
<g id="edge51" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->imopnxjmv7cwzyiecdw2saq42qvpnauh</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2393.6993,-1587.0809C2455.3565,-1569.7539 2521.1771,-1546.2699 2577.5864,-1515.1245 2649.1588,-1475.6656 2717.4141,-1409.6691 2759.9512,-1363.9364"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2394.2404,-1589.0062C2456.0286,-1571.6376 2521.8491,-1548.1536 2578.5528,-1516.8755 2650.5491,-1477.1034 2718.8043,-1411.107 2761.4156,-1365.2986"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2763.3454,-1366.8938 2767.5512,-1357.1695 2758.1992,-1362.1485 2763.3454,-1366.8938"/>
|
||||
</g>
|
||||
<!-- ern66gyp6qmhmpod4jaynxx4weoberfm -->
|
||||
<g id="node13" class="node">
|
||||
<title>ern66gyp6qmhmpod4jaynxx4weoberfm</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2928.3784,-1198.3002C2928.3784,-1198.3002 2563.7608,-1198.3002 2563.7608,-1198.3002 2557.7608,-1198.3002 2551.7608,-1192.3002 2551.7608,-1186.3002 2551.7608,-1186.3002 2551.7608,-1123.6998 2551.7608,-1123.6998 2551.7608,-1117.6998 2557.7608,-1111.6998 2563.7608,-1111.6998 2563.7608,-1111.6998 2928.3784,-1111.6998 2928.3784,-1111.6998 2934.3784,-1111.6998 2940.3784,-1117.6998 2940.3784,-1123.6998 2940.3784,-1123.6998 2940.3784,-1186.3002 2940.3784,-1186.3002 2940.3784,-1192.3002 2934.3784,-1198.3002 2928.3784,-1198.3002"/>
|
||||
<text text-anchor="middle" x="2746.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">metis@5.1.0%gcc@9.4.0/ern66gy</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->ern66gyp6qmhmpod4jaynxx4weoberfm -->
|
||||
<g id="edge25" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->ern66gyp6qmhmpod4jaynxx4weoberfm</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2371.6269,-1587.103C2443.5875,-1567.249 2513.691,-1542.0963 2537.3223,-1515.3355 2611.3482,-1433.6645 2525.4748,-1364.8484 2585.2274,-1269.8608 2602.2478,-1243.3473 2627.3929,-1221.1402 2652.8797,-1203.3777"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2372.1589,-1589.0309C2444.2629,-1569.1315 2514.3664,-1543.9788 2538.8169,-1516.6645 2612.5989,-1432.1038 2526.7255,-1363.2878 2586.9118,-1270.9392 2603.5717,-1244.8464 2628.7168,-1222.6393 2654.0229,-1205.0188"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2655.7411,-1206.8749 2662.0621,-1198.3722 2651.8184,-1201.0773 2655.7411,-1206.8749"/>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf -->
|
||||
<g id="node14" class="node">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1964.017,-1357.1002C1964.017,-1357.1002 1532.1222,-1357.1002 1532.1222,-1357.1002 1526.1222,-1357.1002 1520.1222,-1351.1002 1520.1222,-1345.1002 1520.1222,-1345.1002 1520.1222,-1282.4998 1520.1222,-1282.4998 1520.1222,-1276.4998 1526.1222,-1270.4998 1532.1222,-1270.4998 1532.1222,-1270.4998 1964.017,-1270.4998 1964.017,-1270.4998 1970.017,-1270.4998 1976.017,-1276.4998 1976.017,-1282.4998 1976.017,-1282.4998 1976.017,-1345.1002 1976.017,-1345.1002 1976.017,-1351.1002 1970.017,-1357.1002 1964.017,-1357.1002"/>
|
||||
<text text-anchor="middle" x="1748.0696" y="-1306.6" font-family="Monaco" font-size="24.00" fill="#000000">butterflypack@2.2.2%gcc@9.4.0/nqiyrxl</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->nqiyrxlid6tikfpvoqdpvsjt5drs2obf -->
|
||||
<g id="edge26" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->nqiyrxlid6tikfpvoqdpvsjt5drs2obf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2118.5874,-1588.7094C2039.1194,-1530.0139 1897.9154,-1425.72 1814.4793,-1364.0937"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2119.7757,-1587.1006C2040.3076,-1528.4052 1899.1036,-1424.1112 1815.6675,-1362.485"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1817.0581,-1360.404 1806.9348,-1357.2781 1812.8992,-1366.0347 1817.0581,-1360.404"/>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu -->
|
||||
<g id="node16" class="node">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1106.2192,-1515.9002C1106.2192,-1515.9002 683.92,-1515.9002 683.92,-1515.9002 677.92,-1515.9002 671.92,-1509.9002 671.92,-1503.9002 671.92,-1503.9002 671.92,-1441.2998 671.92,-1441.2998 671.92,-1435.2998 677.92,-1429.2998 683.92,-1429.2998 683.92,-1429.2998 1106.2192,-1429.2998 1106.2192,-1429.2998 1112.2192,-1429.2998 1118.2192,-1435.2998 1118.2192,-1441.2998 1118.2192,-1441.2998 1118.2192,-1503.9002 1118.2192,-1503.9002 1118.2192,-1509.9002 1112.2192,-1515.9002 1106.2192,-1515.9002"/>
|
||||
<text text-anchor="middle" x="895.0696" y="-1465.4" font-family="Monaco" font-size="24.00" fill="#000000">slate@2022.07.00%gcc@9.4.0/4bu62ky</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->4bu62kyfuh4ikdkuyxfxjxanf7e7qopu -->
|
||||
<g id="edge5" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->4bu62kyfuh4ikdkuyxfxjxanf7e7qopu</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1960.6663,-1605.4991C1729.5518,-1576.8935 1365.2868,-1531.8075 1128.237,-1502.4673"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1960.912,-1603.5143C1729.7975,-1574.9086 1365.5325,-1529.8227 1128.4827,-1500.4825"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1128.5789,-1497.9754 1118.2247,-1500.2204 1127.719,-1504.9224 1128.5789,-1497.9754"/>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge20" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2395.1113,-1591.5061C2621.5772,-1545.7968 2953.3457,-1462.5053 3023.2362,-1356.6473 3049.986,-1316.785 3021.2047,-1131.5143 3003.3326,-1112.2759 2971.8969,-1077.7826 2884.3944,-1052.6467 2789.1441,-1034.9179"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2395.507,-1593.4665C2622.0642,-1547.7366 2953.8327,-1464.4452 3024.903,-1357.7527 3051.9623,-1316.478 3023.181,-1131.2073 3004.8066,-1110.9241 2972.4491,-1075.8603 2884.9466,-1050.7244 2789.5102,-1032.9517"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2789.9449,-1030.4898 2779.4781,-1032.132 2788.6845,-1037.3754 2789.9449,-1030.4898"/>
|
||||
<text text-anchor="middle" x="2611.7445" y="-1537.8321" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- 7rzbmgoxhmm2jhellkgcjmn62uklf22x -->
|
||||
<g id="node25" class="node">
|
||||
<title>7rzbmgoxhmm2jhellkgcjmn62uklf22x</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1749.1952,-1515.9002C1749.1952,-1515.9002 1398.944,-1515.9002 1398.944,-1515.9002 1392.944,-1515.9002 1386.944,-1509.9002 1386.944,-1503.9002 1386.944,-1503.9002 1386.944,-1441.2998 1386.944,-1441.2998 1386.944,-1435.2998 1392.944,-1429.2998 1398.944,-1429.2998 1398.944,-1429.2998 1749.1952,-1429.2998 1749.1952,-1429.2998 1755.1952,-1429.2998 1761.1952,-1435.2998 1761.1952,-1441.2998 1761.1952,-1441.2998 1761.1952,-1503.9002 1761.1952,-1503.9002 1761.1952,-1509.9002 1755.1952,-1515.9002 1749.1952,-1515.9002"/>
|
||||
<text text-anchor="middle" x="1574.0696" y="-1465.4" font-family="Monaco" font-size="24.00" fill="#000000">zfp@0.5.5%gcc@9.4.0/7rzbmgo</text>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->7rzbmgoxhmm2jhellkgcjmn62uklf22x -->
|
||||
<g id="edge36" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->7rzbmgoxhmm2jhellkgcjmn62uklf22x</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2012.7697,-1588.9743C1930.7903,-1567.4208 1831.729,-1541.3762 1748.4742,-1519.4874"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2013.2782,-1587.0401C1931.2989,-1565.4866 1832.2376,-1539.442 1748.9827,-1517.5531"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1749.477,-1515.0982 1738.9157,-1515.9403 1747.697,-1521.8681 1749.477,-1515.0982"/>
|
||||
</g>
|
||||
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge3" class="edge">
|
||||
<title>idvshq5nqmygzd4uo62mdispwgxsw7id->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2229.2864,-1587.9836C2336.2076,-1492.3172 2562.5717,-1260.0833 2429.0696,-1111.6 2372.2327,-1048.3851 1860.8259,-1017.0375 1561.5401,-1003.9799"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.5673,-1000.4779 1551.4253,-1003.5421 1561.2645,-1007.4714 1561.5673,-1000.4779"/>
|
||||
</g>
|
||||
<!-- mujlx42xgttdc6u6rmiftsktpsrcmpbs -->
|
||||
<g id="node5" class="node">
|
||||
<title>mujlx42xgttdc6u6rmiftsktpsrcmpbs</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M912.4048,-1198.3002C912.4048,-1198.3002 475.7344,-1198.3002 475.7344,-1198.3002 469.7344,-1198.3002 463.7344,-1192.3002 463.7344,-1186.3002 463.7344,-1186.3002 463.7344,-1123.6998 463.7344,-1123.6998 463.7344,-1117.6998 469.7344,-1111.6998 475.7344,-1111.6998 475.7344,-1111.6998 912.4048,-1111.6998 912.4048,-1111.6998 918.4048,-1111.6998 924.4048,-1117.6998 924.4048,-1123.6998 924.4048,-1123.6998 924.4048,-1186.3002 924.4048,-1186.3002 924.4048,-1192.3002 918.4048,-1198.3002 912.4048,-1198.3002"/>
|
||||
<text text-anchor="middle" x="694.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">blaspp@2022.07.00%gcc@9.4.0/mujlx42</text>
|
||||
</g>
|
||||
<!-- mujlx42xgttdc6u6rmiftsktpsrcmpbs->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge16" class="edge">
|
||||
<title>mujlx42xgttdc6u6rmiftsktpsrcmpbs->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M693.0696,-1111.6072C693.0696,-1092.5263 693.0696,-1069.9257 693.0696,-1049.8046"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M695.0696,-1111.6072C695.0696,-1092.5263 695.0696,-1069.9257 695.0696,-1049.8046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="697.5697,-1049.5403 694.0696,-1039.5403 690.5697,-1049.5404 697.5697,-1049.5403"/>
|
||||
<text text-anchor="middle" x="657.8516" y="-1079.8482" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas</text>
|
||||
</g>
|
||||
<!-- mujlx42xgttdc6u6rmiftsktpsrcmpbs->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge28" class="edge">
|
||||
<title>mujlx42xgttdc6u6rmiftsktpsrcmpbs->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M872.2315,-1111.6072C960.9952,-1089.988 1068.311,-1063.8504 1158.3512,-1041.9204"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1159.2354,-1045.3074 1168.1232,-1039.5403 1157.5789,-1038.5062 1159.2354,-1045.3074"/>
|
||||
</g>
|
||||
<!-- htzjns66gmq6pjofohp26djmjnpbegho -->
|
||||
<g id="node6" class="node">
|
||||
<title>htzjns66gmq6pjofohp26djmjnpbegho</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M2663.3553,-880.7002C2663.3553,-880.7002 2270.7839,-880.7002 2270.7839,-880.7002 2264.7839,-880.7002 2258.7839,-874.7002 2258.7839,-868.7002 2258.7839,-868.7002 2258.7839,-806.0998 2258.7839,-806.0998 2258.7839,-800.0998 2264.7839,-794.0998 2270.7839,-794.0998 2270.7839,-794.0998 2663.3553,-794.0998 2663.3553,-794.0998 2669.3553,-794.0998 2675.3553,-800.0998 2675.3553,-806.0998 2675.3553,-806.0998 2675.3553,-868.7002 2675.3553,-868.7002 2675.3553,-874.7002 2669.3553,-880.7002 2663.3553,-880.7002"/>
|
||||
<text text-anchor="middle" x="2467.0696" y="-830.2" font-family="Monaco" font-size="24.00" fill="#000000">patchelf@0.16.1%gcc@9.4.0/htzjns6</text>
|
||||
</g>
|
||||
<!-- xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6 -->
|
||||
<g id="node15" class="node">
|
||||
<title>xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M394.2232,-404.3002C394.2232,-404.3002 17.916,-404.3002 17.916,-404.3002 11.916,-404.3002 5.916,-398.3002 5.916,-392.3002 5.916,-392.3002 5.916,-329.6998 5.916,-329.6998 5.916,-323.6998 11.916,-317.6998 17.916,-317.6998 17.916,-317.6998 394.2232,-317.6998 394.2232,-317.6998 400.2232,-317.6998 406.2232,-323.6998 406.2232,-329.6998 406.2232,-329.6998 406.2232,-392.3002 406.2232,-392.3002 406.2232,-398.3002 400.2232,-404.3002 394.2232,-404.3002"/>
|
||||
<text text-anchor="middle" x="206.0696" y="-353.8" font-family="Monaco" font-size="24.00" fill="#000000">diffutils@3.8%gcc@9.4.0/xm3ldz3</text>
|
||||
</g>
|
||||
<!-- h3ujmb3ts4kxxxv77knh2knuystuerbx->xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6 -->
|
||||
<g id="edge1" class="edge">
|
||||
<title>h3ujmb3ts4kxxxv77knh2knuystuerbx->xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M206.0696,-476.4072C206.0696,-457.3263 206.0696,-434.7257 206.0696,-414.6046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="209.5697,-414.3403 206.0696,-404.3403 202.5697,-414.3404 209.5697,-414.3403"/>
|
||||
</g>
|
||||
<!-- o524gebsxavobkte3k5fglgwnedfkadf->ywrpvv2hgooeepdke33exkqrtdpd5gkl -->
|
||||
<g id="edge11" class="edge">
|
||||
<title>o524gebsxavobkte3k5fglgwnedfkadf->ywrpvv2hgooeepdke33exkqrtdpd5gkl</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M690.0981,-952.705C684.8522,-895.2533 675.6173,-794.1153 669.9514,-732.0637"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="673.4345,-731.7184 669.0396,-722.0781 666.4635,-732.355 673.4345,-731.7184"/>
|
||||
</g>
|
||||
<!-- 4vsmjofkhntilgzh4zebluqak5mdsu3x -->
|
||||
<g id="node9" class="node">
|
||||
<title>4vsmjofkhntilgzh4zebluqak5mdsu3x</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1977.9121,-721.9002C1977.9121,-721.9002 1386.2271,-721.9002 1386.2271,-721.9002 1380.2271,-721.9002 1374.2271,-715.9002 1374.2271,-709.9002 1374.2271,-709.9002 1374.2271,-647.2998 1374.2271,-647.2998 1374.2271,-641.2998 1380.2271,-635.2998 1386.2271,-635.2998 1386.2271,-635.2998 1977.9121,-635.2998 1977.9121,-635.2998 1983.9121,-635.2998 1989.9121,-641.2998 1989.9121,-647.2998 1989.9121,-647.2998 1989.9121,-709.9002 1989.9121,-709.9002 1989.9121,-715.9002 1983.9121,-721.9002 1977.9121,-721.9002"/>
|
||||
<text text-anchor="middle" x="1682.0696" y="-671.4" font-family="Monaco" font-size="24.00" fill="#000000">ca-certificates-mozilla@2023-01-10%gcc@9.4.0/4vsmjof</text>
|
||||
</g>
|
||||
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow -->
|
||||
<g id="node10" class="node">
|
||||
<title>xiro2z6na56qdd4czjhj54eag3ekbiow</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M988.1824,-1357.1002C988.1824,-1357.1002 533.9568,-1357.1002 533.9568,-1357.1002 527.9568,-1357.1002 521.9568,-1351.1002 521.9568,-1345.1002 521.9568,-1345.1002 521.9568,-1282.4998 521.9568,-1282.4998 521.9568,-1276.4998 527.9568,-1270.4998 533.9568,-1270.4998 533.9568,-1270.4998 988.1824,-1270.4998 988.1824,-1270.4998 994.1824,-1270.4998 1000.1824,-1276.4998 1000.1824,-1282.4998 1000.1824,-1282.4998 1000.1824,-1345.1002 1000.1824,-1345.1002 1000.1824,-1351.1002 994.1824,-1357.1002 988.1824,-1357.1002"/>
|
||||
<text text-anchor="middle" x="761.0696" y="-1306.6" font-family="Monaco" font-size="24.00" fill="#000000">lapackpp@2022.07.00%gcc@9.4.0/xiro2z6</text>
|
||||
</g>
|
||||
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow->mujlx42xgttdc6u6rmiftsktpsrcmpbs -->
|
||||
<g id="edge37" class="edge">
|
||||
<title>xiro2z6na56qdd4czjhj54eag3ekbiow->mujlx42xgttdc6u6rmiftsktpsrcmpbs</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M741.8402,-1270.7959C733.6789,-1251.4525 723.9915,-1228.4917 715.4149,-1208.1641"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M743.6829,-1270.0185C735.5216,-1250.675 725.8342,-1227.7143 717.2576,-1207.3866"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="719.4676,-1206.1933 712.3555,-1198.3403 713.0181,-1208.9144 719.4676,-1206.1933"/>
|
||||
</g>
|
||||
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge35" class="edge">
|
||||
<title>xiro2z6na56qdd4czjhj54eag3ekbiow->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M597.2326,-1271.3826C534.1471,-1251.0571 472.8527,-1225.5904 454.2471,-1198.9688 432.1275,-1166.6075 433.5639,-1144.2113 454.2226,-1111.0684 472.6194,-1081.8657 500.3255,-1060.004 530.6572,-1043.4601"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M597.8458,-1269.4789C534.9144,-1249.2102 473.6201,-1223.7435 455.8921,-1197.8312 434.1234,-1166.7355 435.5598,-1144.3393 455.9166,-1112.1316 473.8583,-1083.4358 501.5644,-1061.5741 531.6142,-1045.2163"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="532.9062,-1047.362 540.1422,-1039.6231 529.6595,-1041.1605 532.9062,-1047.362"/>
|
||||
<text text-anchor="middle" x="474.3109" y="-1250.2598" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
|
||||
</g>
|
||||
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge45" class="edge">
|
||||
<title>xiro2z6na56qdd4czjhj54eag3ekbiow->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M833.5823,-1270.3956C865.3249,-1250.0918 902.2709,-1224.6296 933.0696,-1198.4 973.2414,-1164.1878 969.8532,-1140.395 1014.0696,-1111.6 1058.5051,-1082.6623 1111.0286,-1060.0733 1161.029,-1042.8573"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1162.313,-1046.1177 1170.6621,-1039.5953 1160.0678,-1039.4876 1162.313,-1046.1177"/>
|
||||
</g>
|
||||
<!-- j5rupoqliu7kasm6xndl7ui32wgawkru -->
|
||||
<g id="node11" class="node">
|
||||
<title>j5rupoqliu7kasm6xndl7ui32wgawkru</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1527.3625,-245.5002C1527.3625,-245.5002 1164.7767,-245.5002 1164.7767,-245.5002 1158.7767,-245.5002 1152.7767,-239.5002 1152.7767,-233.5002 1152.7767,-233.5002 1152.7767,-170.8998 1152.7767,-170.8998 1152.7767,-164.8998 1158.7767,-158.8998 1164.7767,-158.8998 1164.7767,-158.8998 1527.3625,-158.8998 1527.3625,-158.8998 1533.3625,-158.8998 1539.3625,-164.8998 1539.3625,-170.8998 1539.3625,-170.8998 1539.3625,-233.5002 1539.3625,-233.5002 1539.3625,-239.5002 1533.3625,-245.5002 1527.3625,-245.5002"/>
|
||||
<text text-anchor="middle" x="1346.0696" y="-195" font-family="Monaco" font-size="24.00" fill="#000000">ncurses@6.4%gcc@9.4.0/j5rupoq</text>
|
||||
</g>
|
||||
<!-- j5rupoqliu7kasm6xndl7ui32wgawkru->i4avrindvhcamhurzbfdaggbj2zgsrrh -->
|
||||
<g id="edge15" class="edge">
|
||||
<title>j5rupoqliu7kasm6xndl7ui32wgawkru->i4avrindvhcamhurzbfdaggbj2zgsrrh</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1346.0696,-158.8072C1346.0696,-139.7263 1346.0696,-117.1257 1346.0696,-97.0046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1349.5697,-96.7403 1346.0696,-86.7403 1342.5697,-96.7404 1349.5697,-96.7403"/>
|
||||
<text text-anchor="middle" x="1292.7436" y="-127.0482" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=pkgconfig</text>
|
||||
</g>
|
||||
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh->ern66gyp6qmhmpod4jaynxx4weoberfm -->
|
||||
<g id="edge19" class="edge">
|
||||
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh->ern66gyp6qmhmpod4jaynxx4weoberfm</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2788.0102,-1270.7555C2780.8234,-1251.412 2772.2926,-1228.4513 2764.7402,-1208.1236"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2789.885,-1270.0589C2782.6982,-1250.7155 2774.1674,-1227.7547 2766.615,-1207.4271"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2768.9358,-1206.4953 2762.1721,-1198.3403 2762.3741,-1208.9332 2768.9358,-1206.4953"/>
|
||||
</g>
|
||||
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge12" class="edge">
|
||||
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2907.2846,-1269.5018C2936.475,-1251.8137 2964.9158,-1228.1116 2981.1904,-1197.9236 2999.477,-1164.2363 3005.2125,-1141.4693 2981.289,-1112.225 2954.5472,-1078.5579 2876.5297,-1053.8974 2789.2983,-1036.3535"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2908.3216,-1271.2119C2937.7554,-1253.3501 2966.1962,-1229.648 2982.9488,-1198.8764 3001.4164,-1164.7249 3007.1519,-1141.9579 2982.8502,-1110.975 2955.15,-1076.6509 2877.1325,-1051.9904 2789.6927,-1034.3928"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2790.125,-1031.93 2779.6364,-1033.4269 2788.7692,-1038.7974 2790.125,-1031.93"/>
|
||||
<text text-anchor="middle" x="2836.0561" y="-1059.5023" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge49" class="edge">
|
||||
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2883.731,-1270.4691C2909.4451,-1251.9243 2934.9956,-1227.7144 2949.0696,-1198.4 2965.7663,-1163.6227 2975.3506,-1139.841 2949.0696,-1111.6 2925.7161,-1086.5049 1993.0368,-1031.9055 1561.3071,-1007.9103"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.3813,-1004.4092 1551.2026,-1007.3492 1560.9931,-1011.3984 1561.3813,-1004.4092"/>
|
||||
</g>
|
||||
<!-- ern66gyp6qmhmpod4jaynxx4weoberfm->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge50" class="edge">
|
||||
<title>ern66gyp6qmhmpod4jaynxx4weoberfm->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2551.6031,-1113.7387C2547.0531,-1112.9948 2542.537,-1112.2802 2538.0696,-1111.6 2198.5338,-1059.8997 1800.8632,-1026.8711 1561.4583,-1009.9443"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.4619,-1006.436 1551.2407,-1009.2249 1560.9702,-1013.4187 1561.4619,-1006.436"/>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->hkcrbrtf2qex6rvzuok5tzdrbam55pdn -->
|
||||
<g id="edge34" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->hkcrbrtf2qex6rvzuok5tzdrbam55pdn</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1865.2226,-1269.4691C1922.6966,-1248.2438 1991.964,-1222.6632 2050.6644,-1200.985"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1865.9154,-1271.3453C1923.3894,-1250.12 1992.6569,-1224.5394 2051.3572,-1202.8612"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2052.5441,-1205.088 2060.7123,-1198.3403 2050.119,-1198.5215 2052.5441,-1205.088"/>
|
||||
<text text-anchor="middle" x="1910.9073" y="-1238.6056" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=scalapack</text>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge52" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1519.9696,-1290.6844C1394.6018,-1273.3057 1237.6631,-1244.7294 1102.7507,-1199.3478 1021.8138,-1171.8729 1008.1992,-1149.8608 932.6248,-1112.4956 887.1715,-1089.9216 836.578,-1065.4054 793.6914,-1044.8018"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1520.2442,-1288.7034C1394.9601,-1271.3381 1238.0214,-1242.7618 1103.3885,-1197.4522 1023.5148,-1170.8208 1009.9002,-1148.8087 933.5144,-1110.7044 888.0436,-1088.1218 837.4502,-1063.6056 794.5574,-1042.999"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="795.6235,-1040.7377 785.0938,-1039.565 792.5939,-1047.0482 795.6235,-1040.7377"/>
|
||||
<text text-anchor="middle" x="1046.8307" y="-1202.5988" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
|
||||
</g>
|
||||
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef -->
|
||||
<g id="node21" class="node">
|
||||
<title>lfh3aovn65e66cs24qiehq3nd2ddojef</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1547.9922,-1198.3002C1547.9922,-1198.3002 1144.147,-1198.3002 1144.147,-1198.3002 1138.147,-1198.3002 1132.147,-1192.3002 1132.147,-1186.3002 1132.147,-1186.3002 1132.147,-1123.6998 1132.147,-1123.6998 1132.147,-1117.6998 1138.147,-1111.6998 1144.147,-1111.6998 1144.147,-1111.6998 1547.9922,-1111.6998 1547.9922,-1111.6998 1553.9922,-1111.6998 1559.9922,-1117.6998 1559.9922,-1123.6998 1559.9922,-1123.6998 1559.9922,-1186.3002 1559.9922,-1186.3002 1559.9922,-1192.3002 1553.9922,-1198.3002 1547.9922,-1198.3002"/>
|
||||
<text text-anchor="middle" x="1346.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">arpack-ng@3.8.0%gcc@9.4.0/lfh3aov</text>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->lfh3aovn65e66cs24qiehq3nd2ddojef -->
|
||||
<g id="edge46" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->lfh3aovn65e66cs24qiehq3nd2ddojef</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1637.8539,-1271.3373C1584.2332,-1250.1557 1519.6324,-1224.6368 1464.827,-1202.9873"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1638.5887,-1269.4771C1584.968,-1248.2956 1520.3672,-1222.7767 1465.5618,-1201.1272"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1466.3716,-1198.7592 1455.785,-1198.3403 1463.7998,-1205.2696 1466.3716,-1198.7592"/>
|
||||
</g>
|
||||
<!-- 57joith2sqq6sehge54vlloyolm36mdu -->
|
||||
<g id="node22" class="node">
|
||||
<title>57joith2sqq6sehge54vlloyolm36mdu</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1906.2352,-1198.3002C1906.2352,-1198.3002 1589.904,-1198.3002 1589.904,-1198.3002 1583.904,-1198.3002 1577.904,-1192.3002 1577.904,-1186.3002 1577.904,-1186.3002 1577.904,-1123.6998 1577.904,-1123.6998 1577.904,-1117.6998 1583.904,-1111.6998 1589.904,-1111.6998 1589.904,-1111.6998 1906.2352,-1111.6998 1906.2352,-1111.6998 1912.2352,-1111.6998 1918.2352,-1117.6998 1918.2352,-1123.6998 1918.2352,-1123.6998 1918.2352,-1186.3002 1918.2352,-1186.3002 1918.2352,-1192.3002 1912.2352,-1198.3002 1906.2352,-1198.3002"/>
|
||||
<text text-anchor="middle" x="1748.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">sed@4.8%gcc@9.4.0/57joith</text>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->57joith2sqq6sehge54vlloyolm36mdu -->
|
||||
<g id="edge27" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->57joith2sqq6sehge54vlloyolm36mdu</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1748.0696,-1270.4072C1748.0696,-1251.3263 1748.0696,-1228.7257 1748.0696,-1208.6046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1751.5697,-1208.3403 1748.0696,-1198.3403 1744.5697,-1208.3404 1751.5697,-1208.3403"/>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge24" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1975.9734,-1301.684C2148.2819,-1288.3961 2365.6859,-1259.5384 2428.3689,-1197.6866 2466.9261,-1160.1438 2472.9783,-1095.7153 2471.5152,-1049.9701"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1976.1272,-1303.678C2148.5451,-1290.3788 2365.949,-1261.521 2429.7703,-1199.1134 2468.9173,-1160.3309 2474.9695,-1095.9024 2473.5142,-1049.9065"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2476.0078,-1049.7027 2472.0657,-1039.8686 2469.0147,-1050.0146 2476.0078,-1049.7027"/>
|
||||
<text text-anchor="middle" x="2207.8884" y="-1273.0053" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge6" class="edge">
|
||||
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1520.1614,-1301.6771C1362.9712,-1287.992 1173.582,-1259.0928 1123.0696,-1198.4 1098.3914,-1168.7481 1103.0165,-1144.5563 1123.0696,-1111.6 1140.5998,-1082.79 1167.9002,-1060.8539 1197.4647,-1044.2681"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1199.1408,-1047.3408 1206.2789,-1039.5114 1195.8163,-1041.1806 1199.1408,-1047.3408"/>
|
||||
</g>
|
||||
<!-- ogcucq2eod3xusvvied5ol2iobui4nsb -->
|
||||
<g id="node18" class="node">
|
||||
<title>ogcucq2eod3xusvvied5ol2iobui4nsb</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M400.2088,-245.5002C400.2088,-245.5002 11.9304,-245.5002 11.9304,-245.5002 5.9304,-245.5002 -.0696,-239.5002 -.0696,-233.5002 -.0696,-233.5002 -.0696,-170.8998 -.0696,-170.8998 -.0696,-164.8998 5.9304,-158.8998 11.9304,-158.8998 11.9304,-158.8998 400.2088,-158.8998 400.2088,-158.8998 406.2088,-158.8998 412.2088,-164.8998 412.2088,-170.8998 412.2088,-170.8998 412.2088,-233.5002 412.2088,-233.5002 412.2088,-239.5002 406.2088,-245.5002 400.2088,-245.5002"/>
|
||||
<text text-anchor="middle" x="206.0696" y="-195" font-family="Monaco" font-size="24.00" fill="#000000">libiconv@1.17%gcc@9.4.0/ogcucq2</text>
|
||||
</g>
|
||||
<!-- xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6->ogcucq2eod3xusvvied5ol2iobui4nsb -->
|
||||
<g id="edge47" class="edge">
|
||||
<title>xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6->ogcucq2eod3xusvvied5ol2iobui4nsb</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M205.0696,-317.6072C205.0696,-298.5263 205.0696,-275.9257 205.0696,-255.8046"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M207.0696,-317.6072C207.0696,-298.5263 207.0696,-275.9257 207.0696,-255.8046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="209.5697,-255.5403 206.0696,-245.5403 202.5697,-255.5404 209.5697,-255.5403"/>
|
||||
<text text-anchor="middle" x="165.5739" y="-285.8482" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=iconv</text>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->mujlx42xgttdc6u6rmiftsktpsrcmpbs -->
|
||||
<g id="edge42" class="edge">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->mujlx42xgttdc6u6rmiftsktpsrcmpbs</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M672.6614,-1430.2151C600.7916,-1411.3548 534.1254,-1386.9583 512.2667,-1357.7962 489.0909,-1326.029 493.54,-1304.0273 512.1928,-1269.9192 527.5256,-1242.0821 552.3382,-1220.1508 578.9347,-1203.0434"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M673.169,-1428.2806C601.4789,-1409.4766 534.8127,-1385.0802 513.8725,-1356.6038 491.0512,-1326.4254 495.5003,-1304.4237 513.9464,-1270.8808 528.8502,-1243.5806 553.6627,-1221.6493 580.016,-1204.7259"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="581.46,-1206.7724 588.1193,-1198.532 577.7747,-1200.8211 581.46,-1206.7724"/>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge43" class="edge">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M680.4783,-1430.2246C600.8632,-1410.3933 522.8724,-1385.2921 493.3877,-1357.9314 411.1392,-1281.1573 374.1678,-1206.1582 435.2305,-1111.0561 454.3431,-1081.6726 482.5021,-1059.8261 513.5088,-1043.3725"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M680.9617,-1428.2839C601.476,-1408.4895 523.4851,-1383.3883 494.7515,-1356.4686 412.9331,-1280.273 375.9616,-1205.2739 436.9087,-1112.1439 455.569,-1083.2528 483.728,-1061.4063 514.4455,-1045.1396"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="515.8631,-1047.2236 523.1893,-1039.5699 512.6893,-1040.9844 515.8631,-1047.2236"/>
|
||||
<text text-anchor="middle" x="453.0969" y="-1356.92" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas</text>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->xiro2z6na56qdd4czjhj54eag3ekbiow -->
|
||||
<g id="edge38" class="edge">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->xiro2z6na56qdd4czjhj54eag3ekbiow</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M857.6892,-1429.8521C840.9235,-1409.9835 820.9375,-1386.2985 803.4466,-1365.5705"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M859.2178,-1428.5623C842.4521,-1408.6937 822.466,-1385.0087 804.9751,-1364.2807"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="806.7654,-1362.5258 797.6414,-1357.1403 801.4156,-1367.0402 806.7654,-1362.5258"/>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge13" class="edge">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1118.1783,-1450.5735C1412.4221,-1422.447 1902.6188,-1374.0528 1984.8578,-1356.2227 2203.916,-1308.9943 2329.6342,-1377.1305 2461.2658,-1197.8052 2492.3675,-1156.1664 2488.743,-1094.1171 2480.3694,-1050.0521"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1118.3686,-1452.5644C1412.6186,-1424.4374 1902.8153,-1376.0432 1985.2814,-1358.1773 2202.963,-1310.7526 2328.6812,-1378.8889 2462.8734,-1198.9948 2494.3641,-1156.0498 2490.7395,-1094.0005 2482.3343,-1049.6791"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2484.7438,-1048.9818 2479.3189,-1039.8812 2477.8845,-1050.3784 2484.7438,-1048.9818"/>
|
||||
<text text-anchor="middle" x="1820.4407" y="-1379.7188" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge32" class="edge">
|
||||
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M947.2173,-1428.5496C968.7089,-1408.5917 992.2747,-1383.3345 1008.2117,-1356.6861 1067.0588,-1259.8646 1008.3745,-1197.6371 1084.3226,-1110.9351 1110.3076,-1081.7965 1144.7149,-1059.7578 1180.1804,-1043.0531"/>
|
||||
<path fill="none" stroke="#daa520" stroke-width="2" d="M948.5783,-1430.0151C970.1712,-1409.9561 993.737,-1384.6989 1009.9275,-1357.7139 1068.5139,-1258.4924 1009.8295,-1196.2649 1085.8166,-1112.2649 1111.3864,-1083.4807 1145.7936,-1061.442 1181.0322,-1044.8626"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1182.4567,-1046.9607 1190.1008,-1039.6246 1179.5503,-1040.5926 1182.4567,-1046.9607"/>
|
||||
</g>
|
||||
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo -->
|
||||
<g id="node17" class="node">
|
||||
<title>5xerf6imlgo4xlubacr4mljacc3edexo</title>
|
||||
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1822.3657,-880.7002C1822.3657,-880.7002 1437.7735,-880.7002 1437.7735,-880.7002 1431.7735,-880.7002 1425.7735,-874.7002 1425.7735,-868.7002 1425.7735,-868.7002 1425.7735,-806.0998 1425.7735,-806.0998 1425.7735,-800.0998 1431.7735,-794.0998 1437.7735,-794.0998 1437.7735,-794.0998 1822.3657,-794.0998 1822.3657,-794.0998 1828.3657,-794.0998 1834.3657,-800.0998 1834.3657,-806.0998 1834.3657,-806.0998 1834.3657,-868.7002 1834.3657,-868.7002 1834.3657,-874.7002 1828.3657,-880.7002 1822.3657,-880.7002"/>
|
||||
<text text-anchor="middle" x="1630.0696" y="-830.2" font-family="Monaco" font-size="24.00" fill="#000000">openssl@1.1.1s%gcc@9.4.0/5xerf6i</text>
|
||||
</g>
|
||||
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo->ywrpvv2hgooeepdke33exkqrtdpd5gkl -->
|
||||
<g id="edge22" class="edge">
|
||||
<title>5xerf6imlgo4xlubacr4mljacc3edexo->ywrpvv2hgooeepdke33exkqrtdpd5gkl</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1425.7129,-803.7711C1262.7545,-776.9548 1035.5151,-739.5603 871.9084,-712.6373"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="872.1525,-709.1305 861.7169,-710.9602 871.0158,-716.0376 872.1525,-709.1305"/>
|
||||
</g>
|
||||
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo->4vsmjofkhntilgzh4zebluqak5mdsu3x -->
|
||||
<g id="edge48" class="edge">
|
||||
<title>5xerf6imlgo4xlubacr4mljacc3edexo->4vsmjofkhntilgzh4zebluqak5mdsu3x</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1644.2788,-794.0072C1650.5843,-774.7513 1658.0636,-751.9107 1664.6976,-731.6514"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1668.0917,-732.533 1667.8776,-721.9403 1661.4393,-730.3546 1668.0917,-732.533"/>
|
||||
</g>
|
||||
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo->nizxi5u5bbrzhzwfy2qb7hatlhuswlrz -->
|
||||
<g id="edge41" class="edge">
|
||||
<title>5xerf6imlgo4xlubacr4mljacc3edexo->nizxi5u5bbrzhzwfy2qb7hatlhuswlrz</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1834.3289,-793.5645C1906.6817,-774.1673 1975.9199,-749.2273 1998.2925,-721.3707 2031.5218,-680.681 2032.1636,-617.9031 2027.044,-573.3921"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1834.8468,-795.4962C1907.3595,-776.0489 1976.5977,-751.1089 1999.8467,-722.6293 2033.5217,-680.7015 2034.1635,-617.9235 2029.0309,-573.1639"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2031.4885,-572.6712 2026.7474,-563.1964 2024.5451,-573.5598 2031.4885,-572.6712"/>
|
||||
</g>
|
||||
<!-- v32wejd4d5lc6uka4qlrogwh5xae2h3r -->
|
||||
<g id="node26" class="node">
|
||||
<title>v32wejd4d5lc6uka4qlrogwh5xae2h3r</title>
|
||||
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1306.1776,-404.3002C1306.1776,-404.3002 929.9616,-404.3002 929.9616,-404.3002 923.9616,-404.3002 917.9616,-398.3002 917.9616,-392.3002 917.9616,-392.3002 917.9616,-329.6998 917.9616,-329.6998 917.9616,-323.6998 923.9616,-317.6998 929.9616,-317.6998 929.9616,-317.6998 1306.1776,-317.6998 1306.1776,-317.6998 1312.1776,-317.6998 1318.1776,-323.6998 1318.1776,-329.6998 1318.1776,-329.6998 1318.1776,-392.3002 1318.1776,-392.3002 1318.1776,-398.3002 1312.1776,-404.3002 1306.1776,-404.3002"/>
|
||||
<text text-anchor="middle" x="1118.0696" y="-353.8" font-family="Monaco" font-size="24.00" fill="#000000">readline@8.2%gcc@9.4.0/v32wejd</text>
|
||||
</g>
|
||||
<!-- uabgssx6lsgrevwbttslldnr5nzguprj->v32wejd4d5lc6uka4qlrogwh5xae2h3r -->
|
||||
<g id="edge7" class="edge">
|
||||
<title>uabgssx6lsgrevwbttslldnr5nzguprj->v32wejd4d5lc6uka4qlrogwh5xae2h3r</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1117.0696,-476.4072C1117.0696,-457.3263 1117.0696,-434.7257 1117.0696,-414.6046"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1119.0696,-476.4072C1119.0696,-457.3263 1119.0696,-434.7257 1119.0696,-414.6046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1121.5697,-414.3403 1118.0696,-404.3403 1114.5697,-414.3404 1121.5697,-414.3403"/>
|
||||
</g>
|
||||
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef->o524gebsxavobkte3k5fglgwnedfkadf -->
|
||||
<g id="edge14" class="edge">
|
||||
<title>lfh3aovn65e66cs24qiehq3nd2ddojef->o524gebsxavobkte3k5fglgwnedfkadf</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1167.6711,-1112.5788C1078.9073,-1090.9596 971.5916,-1064.822 881.5513,-1042.892"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1168.1444,-1110.6356C1079.3806,-1089.0165 972.0649,-1062.8788 882.0246,-1040.9488"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="882.5603,-1038.5062 872.016,-1039.5403 880.9038,-1045.3074 882.5603,-1038.5062"/>
|
||||
<text text-anchor="middle" x="963.904" y="-1079.817" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
|
||||
</g>
|
||||
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef->2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
|
||||
<g id="edge31" class="edge">
|
||||
<title>lfh3aovn65e66cs24qiehq3nd2ddojef->2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1559.7922,-1112.1043C1562.8511,-1111.5975 1565.8904,-1111.1002 1568.9103,-1110.6128 1759.2182,-1079.8992 1973.2397,-1052.1328 2144.6143,-1031.5343"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1560.1191,-1114.0774C1563.1741,-1113.5712 1566.2134,-1113.0739 1569.2289,-1112.5872 1759.4755,-1081.8826 1973.497,-1054.1161 2144.8529,-1033.52"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2145.1529,-1036.002 2154.6648,-1031.3357 2144.3191,-1029.0518 2145.1529,-1036.002"/>
|
||||
<text text-anchor="middle" x="1828.178" y="-1072.4692" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
|
||||
</g>
|
||||
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge21" class="edge">
|
||||
<title>lfh3aovn65e66cs24qiehq3nd2ddojef->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1346.0696,-1111.6072C1346.0696,-1092.5263 1346.0696,-1069.9257 1346.0696,-1049.8046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1349.5697,-1049.5403 1346.0696,-1039.5403 1342.5697,-1049.5404 1349.5697,-1049.5403"/>
|
||||
</g>
|
||||
<!-- 2w3nq3n3hcj2tqlvcpewsryamltlu5tw->htzjns66gmq6pjofohp26djmjnpbegho -->
|
||||
<g id="edge30" class="edge">
|
||||
<title>2w3nq3n3hcj2tqlvcpewsryamltlu5tw->htzjns66gmq6pjofohp26djmjnpbegho</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2467.0696,-952.8072C2467.0696,-933.7263 2467.0696,-911.1257 2467.0696,-891.0046"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2470.5697,-890.7403 2467.0696,-880.7403 2463.5697,-890.7404 2470.5697,-890.7403"/>
|
||||
</g>
|
||||
<!-- 7rzbmgoxhmm2jhellkgcjmn62uklf22x->gguve5icmo5e4cw5o3hvvfsxremc46if -->
|
||||
<g id="edge2" class="edge">
|
||||
<title>7rzbmgoxhmm2jhellkgcjmn62uklf22x->gguve5icmo5e4cw5o3hvvfsxremc46if</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1422.351,-1429.2133C1312.2528,-1388.8872 1171.1589,-1316.8265 1103.0696,-1198.4 1083.8409,-1164.956 1082.4563,-1144.2088 1103.0696,-1111.6 1121.4102,-1082.5864 1149.2483,-1060.7204 1179.6189,-1044.2895"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1181.4205,-1047.2977 1188.6801,-1039.5809 1178.1927,-1041.0863 1181.4205,-1047.2977"/>
|
||||
</g>
|
||||
<!-- v32wejd4d5lc6uka4qlrogwh5xae2h3r->j5rupoqliu7kasm6xndl7ui32wgawkru -->
|
||||
<g id="edge39" class="edge">
|
||||
<title>v32wejd4d5lc6uka4qlrogwh5xae2h3r->j5rupoqliu7kasm6xndl7ui32wgawkru</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1179.8001,-316.7866C1209.2065,-296.3053 1244.4355,-271.7686 1274.8343,-250.5961"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1180.9431,-318.4278C1210.3495,-297.9465 1245.5785,-273.4098 1275.9774,-252.2373"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1277.6375,-254.1277 1283.8429,-245.5403 1273.6367,-248.3836 1277.6375,-254.1277"/>
|
||||
</g>
|
||||
<!-- gguve5icmo5e4cw5o3hvvfsxremc46if->j5rupoqliu7kasm6xndl7ui32wgawkru -->
|
||||
<g id="edge18" class="edge">
|
||||
<title>gguve5icmo5e4cw5o3hvvfsxremc46if->j5rupoqliu7kasm6xndl7ui32wgawkru</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1345.0696,-952.7909C1345.0696,-891.6316 1345.0696,-776.6094 1345.0696,-678.6 1345.0696,-678.6 1345.0696,-678.6 1345.0696,-519.8 1345.0696,-426.9591 1345.0696,-318.8523 1345.0696,-255.7237"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1347.0696,-952.7909C1347.0696,-891.6316 1347.0696,-776.6094 1347.0696,-678.6 1347.0696,-678.6 1347.0696,-678.6 1347.0696,-519.8 1347.0696,-426.9591 1347.0696,-318.8523 1347.0696,-255.7237"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1349.5697,-255.6091 1346.0696,-245.6091 1342.5697,-255.6092 1349.5697,-255.6091"/>
|
||||
</g>
|
||||
<!-- gguve5icmo5e4cw5o3hvvfsxremc46if->5xerf6imlgo4xlubacr4mljacc3edexo -->
|
||||
<g id="edge40" class="edge">
|
||||
<title>gguve5icmo5e4cw5o3hvvfsxremc46if->5xerf6imlgo4xlubacr4mljacc3edexo</title>
|
||||
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1423.1858,-951.9344C1460.2844,-931.1905 1504.8229,-906.2866 1543.0151,-884.9312"/>
|
||||
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1424.1619,-953.68C1461.2605,-932.9361 1505.799,-908.0322 1543.9912,-886.6769"/>
|
||||
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1545.5391,-888.6757 1552.5592,-880.7403 1542.1228,-882.5659 1545.5391,-888.6757"/>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
Before Width: | Height: | Size: 58 KiB |
@@ -54,16 +54,8 @@ or refer to the full manual below.
|
||||
features
|
||||
getting_started
|
||||
basic_usage
|
||||
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
|
||||
replace_conda_homebrew
|
||||
frequently_asked_questions
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Links
|
||||
|
||||
Tutorial (spack-tutorial.rtfd.io) <https://spack-tutorial.readthedocs.io>
|
||||
Packages (packages.spack.io) <https://packages.spack.io>
|
||||
Binaries (binaries.spack.io) <https://cache.spack.io>
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
@@ -71,7 +63,7 @@ or refer to the full manual below.
|
||||
|
||||
configuration
|
||||
config_yaml
|
||||
packages_yaml
|
||||
bootstrapping
|
||||
build_settings
|
||||
environments
|
||||
containers
|
||||
@@ -79,13 +71,11 @@ or refer to the full manual below.
|
||||
module_file_support
|
||||
repositories
|
||||
binary_caches
|
||||
bootstrapping
|
||||
command_index
|
||||
package_list
|
||||
chain
|
||||
extensions
|
||||
pipelines
|
||||
signing
|
||||
gpu_configuration
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
@@ -275,12 +275,10 @@ of the installed software. For instance, in the snippet below:
|
||||
set:
|
||||
BAR: 'bar'
|
||||
# This anonymous spec selects any package that
|
||||
# depends on mpi. The double colon at the
|
||||
# depends on openmpi. The double colon at the
|
||||
# end clears the set of rules that matched so far.
|
||||
^mpi::
|
||||
^openmpi::
|
||||
environment:
|
||||
prepend_path:
|
||||
PATH: '{^mpi.prefix}/bin'
|
||||
set:
|
||||
BAR: 'baz'
|
||||
# Selects any zlib package
|
||||
@@ -295,9 +293,7 @@ of the installed software. For instance, in the snippet below:
|
||||
- FOOBAR
|
||||
|
||||
you are instructing Spack to set the environment variable ``BAR=bar`` for every module,
|
||||
unless the associated spec satisfies the abstract dependency ``^mpi`` in which case
|
||||
``BAR=baz``, and the directory containing the respective MPI executables is prepended
|
||||
to the ``PATH`` variable.
|
||||
unless the associated spec satisfies ``^openmpi`` in which case ``BAR=baz``.
|
||||
In addition in any spec that satisfies ``zlib`` the value ``foo`` will be
|
||||
prepended to ``LD_LIBRARY_PATH`` and in any spec that satisfies ``zlib%gcc@4.8``
|
||||
the variable ``FOOBAR`` will be unset.
|
||||
@@ -400,30 +396,28 @@ that are already in the Lmod hierarchy.
|
||||
|
||||
|
||||
.. note::
|
||||
Tcl and Lua modules also allow for explicit conflicts between modulefiles.
|
||||
Tcl modules
|
||||
Tcl modules also allow for explicit conflicts between modulefiles.
|
||||
|
||||
.. code-block:: yaml
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
default:
|
||||
enable:
|
||||
- tcl
|
||||
tcl:
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}-{compiler.version}'
|
||||
all:
|
||||
conflict:
|
||||
- '{name}'
|
||||
- 'intel/14.0.1'
|
||||
modules:
|
||||
default:
|
||||
enable:
|
||||
- tcl
|
||||
tcl:
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}-{compiler.version}'
|
||||
all:
|
||||
conflict:
|
||||
- '{name}'
|
||||
- 'intel/14.0.1'
|
||||
|
||||
will create module files that will conflict with ``intel/14.0.1`` and with the
|
||||
base directory of the same module, effectively preventing the possibility to
|
||||
load two or more versions of the same software at the same time. The tokens
|
||||
that are available for use in this directive are the same understood by the
|
||||
:meth:`~spack.spec.Spec.format` method.
|
||||
|
||||
For Lmod and Environment Modules versions prior 4.2, it is important to
|
||||
express the conflict on both modulefiles conflicting with each other.
|
||||
will create module files that will conflict with ``intel/14.0.1`` and with the
|
||||
base directory of the same module, effectively preventing the possibility to
|
||||
load two or more versions of the same software at the same time. The tokens
|
||||
that are available for use in this directive are the same understood by
|
||||
the :meth:`~spack.spec.Spec.format` method.
|
||||
|
||||
|
||||
.. note::
|
||||
@@ -519,11 +513,11 @@ inspections and customize them per-module-set.
|
||||
|
||||
modules:
|
||||
prefix_inspections:
|
||||
./bin:
|
||||
bin:
|
||||
- PATH
|
||||
./man:
|
||||
man:
|
||||
- MANPATH
|
||||
./:
|
||||
'':
|
||||
- CMAKE_PREFIX_PATH
|
||||
|
||||
Prefix inspections are only applied if the relative path inside the
|
||||
@@ -579,7 +573,7 @@ the view.
|
||||
view_relative_modules:
|
||||
use_view: my_view
|
||||
prefix_inspections:
|
||||
./bin:
|
||||
bin:
|
||||
- PATH
|
||||
view:
|
||||
my_view:
|
||||
|
17
lib/spack/docs/package_list.rst
Normal file
17
lib/spack/docs/package_list.rst
Normal file
@@ -0,0 +1,17 @@
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _package-list:
|
||||
|
||||
============
|
||||
Package List
|
||||
============
|
||||
|
||||
This is a list of things you can install using Spack. It is
|
||||
automatically generated based on the packages in this Spack
|
||||
version.
|
||||
|
||||
.. raw:: html
|
||||
:file: package_list.html
|
@@ -1,591 +0,0 @@
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
.. _packages-config:
|
||||
|
||||
================================
|
||||
Package Settings (packages.yaml)
|
||||
================================
|
||||
|
||||
Spack allows you to customize how your software is built through the
|
||||
``packages.yaml`` file. Using it, you can make Spack prefer particular
|
||||
implementations of virtual dependencies (e.g., MPI or BLAS/LAPACK),
|
||||
or you can make it prefer to build with particular compilers. You can
|
||||
also tell Spack to use *external* software installations already
|
||||
present on your system.
|
||||
|
||||
At a high level, the ``packages.yaml`` file is structured like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
package1:
|
||||
# settings for package1
|
||||
package2:
|
||||
# settings for package2
|
||||
# ...
|
||||
all:
|
||||
# settings that apply to all packages.
|
||||
|
||||
So you can either set build preferences specifically for *one* package,
|
||||
or you can specify that certain settings should apply to *all* packages.
|
||||
The types of settings you can customize are described in detail below.
|
||||
|
||||
Spack's build defaults are in the default
|
||||
``etc/spack/defaults/packages.yaml`` file. You can override them in
|
||||
``~/.spack/packages.yaml`` or ``etc/spack/packages.yaml``. For more
|
||||
details on how this works, see :ref:`configuration-scopes`.
|
||||
|
||||
.. _sec-external-packages:
|
||||
|
||||
-----------------
|
||||
External Packages
|
||||
-----------------
|
||||
|
||||
Spack can be configured to use externally-installed
|
||||
packages rather than building its own packages. This may be desirable
|
||||
if machines ship with system packages, such as a customized MPI
|
||||
that should be used instead of Spack building its own MPI.
|
||||
|
||||
External packages are configured through the ``packages.yaml`` file.
|
||||
Here's an example of an external configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
externals:
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.4.3
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
|
||||
prefix: /opt/openmpi-1.4.3-debug
|
||||
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.6.5-intel
|
||||
|
||||
This example lists three installations of OpenMPI, one built with GCC,
|
||||
one built with GCC and debug information, and another built with Intel.
|
||||
If Spack is asked to build a package that uses one of these MPIs as a
|
||||
dependency, it will use the pre-installed OpenMPI in
|
||||
the given directory. Note that the specified path is the top-level
|
||||
install prefix, not the ``bin`` subdirectory.
|
||||
|
||||
``packages.yaml`` can also be used to specify modules to load instead
|
||||
of the installation prefixes. The following example says that module
|
||||
``CMake/3.7.2`` provides cmake version 3.7.2.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
cmake:
|
||||
externals:
|
||||
- spec: cmake@3.7.2
|
||||
modules:
|
||||
- CMake/3.7.2
|
||||
|
||||
Each ``packages.yaml`` begins with a ``packages:`` attribute, followed
|
||||
by a list of package names. To specify externals, add an ``externals:``
|
||||
attribute under the package name, which lists externals.
|
||||
Each external should specify a ``spec:`` string that should be as
|
||||
well-defined as reasonably possible. If a
|
||||
package lacks a spec component, such as missing a compiler or
|
||||
package version, then Spack will guess the missing component based
|
||||
on its most-favored packages, and it may guess incorrectly.
|
||||
|
||||
Each package version and compiler listed in an external should
|
||||
have entries in Spack's packages and compiler configuration, even
|
||||
though the package and compiler may not ever be built.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Prevent packages from being built from sources
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Adding an external spec in ``packages.yaml`` allows Spack to use an external location,
|
||||
but it does not prevent Spack from building packages from sources. In the above example,
|
||||
Spack might choose for many valid reasons to start building and linking with the
|
||||
latest version of OpenMPI rather than continue using the pre-installed OpenMPI versions.
|
||||
|
||||
To prevent this, the ``packages.yaml`` configuration also allows packages
|
||||
to be flagged as non-buildable. The previous example could be modified to
|
||||
be:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
externals:
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.4.3
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
|
||||
prefix: /opt/openmpi-1.4.3-debug
|
||||
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.6.5-intel
|
||||
buildable: False
|
||||
|
||||
The addition of the ``buildable`` flag tells Spack that it should never build
|
||||
its own version of OpenMPI from sources, and it will instead always rely on a pre-built
|
||||
OpenMPI.
|
||||
|
||||
.. note::
|
||||
|
||||
If ``concretizer:reuse`` is on (see :ref:`concretizer-options` for more information on that flag)
|
||||
pre-built specs include specs already available from a local store, an upstream store, a registered
|
||||
buildcache or specs marked as externals in ``packages.yaml``. If ``concretizer:reuse`` is off, only
|
||||
external specs in ``packages.yaml`` are included in the list of pre-built specs.
|
||||
|
||||
If an external module is specified as not buildable, then Spack will load the
|
||||
external module into the build environment which can be used for linking.
|
||||
|
||||
The ``buildable`` does not need to be paired with external packages.
|
||||
It could also be used alone to forbid packages that may be
|
||||
buggy or otherwise undesirable.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Non-buildable virtual packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Virtual packages in Spack can also be specified as not buildable, and
|
||||
external implementations can be provided. In the example above,
|
||||
OpenMPI is configured as not buildable, but Spack will often prefer
|
||||
other MPI implementations over the externally available OpenMPI. Spack
|
||||
can be configured with every MPI provider not buildable individually,
|
||||
but more conveniently:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
buildable: False
|
||||
openmpi:
|
||||
externals:
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.4.3
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
|
||||
prefix: /opt/openmpi-1.4.3-debug
|
||||
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.6.5-intel
|
||||
|
||||
Spack can then use any of the listed external implementations of MPI
|
||||
to satisfy a dependency, and will choose depending on the compiler and
|
||||
architecture.
|
||||
|
||||
In cases where the concretizer is configured to reuse specs, and other ``mpi`` providers
|
||||
(available via stores or buildcaches) are not wanted, Spack can be configured to require
|
||||
specs matching only the available externals:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
buildable: False
|
||||
require:
|
||||
- one_of: [
|
||||
"openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64",
|
||||
"openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug",
|
||||
"openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
]
|
||||
openmpi:
|
||||
externals:
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.4.3
|
||||
- spec: "openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug"
|
||||
prefix: /opt/openmpi-1.4.3-debug
|
||||
- spec: "openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64"
|
||||
prefix: /opt/openmpi-1.6.5-intel
|
||||
|
||||
This configuration prevents any spec using MPI and originating from stores or buildcaches to be reused,
|
||||
unless it matches the requirements under ``packages:mpi:require``. For more information on requirements see
|
||||
:ref:`package-requirements`.
|
||||
|
||||
.. _cmd-spack-external-find:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Automatically Find External Packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can run the :ref:`spack external find <spack-external-find>` command
|
||||
to search for system-provided packages and add them to ``packages.yaml``.
|
||||
After running this command your ``packages.yaml`` may include new entries:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
cmake:
|
||||
externals:
|
||||
- spec: cmake@3.17.2
|
||||
prefix: /usr
|
||||
|
||||
Generally this is useful for detecting a small set of commonly-used packages;
|
||||
for now this is generally limited to finding build-only dependencies.
|
||||
Specific limitations include:
|
||||
|
||||
* Packages are not discoverable by default: For a package to be
|
||||
discoverable with ``spack external find``, it needs to add special
|
||||
logic. See :ref:`here <make-package-findable>` for more details.
|
||||
* The logic does not search through module files, it can only detect
|
||||
packages with executables defined in ``PATH``; you can help Spack locate
|
||||
externals which use module files by loading any associated modules for
|
||||
packages that you want Spack to know about before running
|
||||
``spack external find``.
|
||||
* Spack does not overwrite existing entries in the package configuration:
|
||||
If there is an external defined for a spec at any configuration scope,
|
||||
then Spack will not add a new external entry (``spack config blame packages``
|
||||
can help locate all external entries).
|
||||
|
||||
.. _package-requirements:
|
||||
|
||||
--------------------
|
||||
Package Requirements
|
||||
--------------------
|
||||
|
||||
Spack can be configured to always use certain compilers, package
|
||||
versions, and variants during concretization through package
|
||||
requirements.
|
||||
|
||||
Package requirements are useful when you find yourself repeatedly
|
||||
specifying the same constraints on the command line, and wish that
|
||||
Spack respects these constraints whether you mention them explicitly
|
||||
or not. Another use case is specifying constraints that should apply
|
||||
to all root specs in an environment, without having to repeat the
|
||||
constraint everywhere.
|
||||
|
||||
Apart from that, requirements config is more flexible than constraints
|
||||
on the command line, because it can specify constraints on packages
|
||||
*when they occur* as a dependency. In contrast, on the command line it
|
||||
is not possible to specify constraints on dependencies while also keeping
|
||||
those dependencies optional.
|
||||
|
||||
.. seealso::
|
||||
|
||||
FAQ: :ref:`Why does Spack pick particular versions and variants? <faq-concretizer-precedence>`
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Requirements syntax
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The package requirements configuration is specified in ``packages.yaml``,
|
||||
keyed by package name and expressed using the Spec syntax. In the simplest
|
||||
case you can specify attributes that you always want the package to have
|
||||
by providing a single spec string to ``require``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
libfabric:
|
||||
require: "@1.13.2"
|
||||
|
||||
In the above example, ``libfabric`` will always build with version 1.13.2. If you
|
||||
need to compose multiple configuration scopes ``require`` accepts a list of
|
||||
strings:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
libfabric:
|
||||
require:
|
||||
- "@1.13.2"
|
||||
- "%gcc"
|
||||
|
||||
In this case ``libfabric`` will always build with version 1.13.2 **and** using GCC
|
||||
as a compiler.
|
||||
|
||||
For more complex use cases, require accepts also a list of objects. These objects
|
||||
must have either a ``any_of`` or a ``one_of`` field, containing a list of spec strings,
|
||||
and they can optionally have a ``when`` and a ``message`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- any_of: ["@4.1.5", "%gcc"]
|
||||
message: "in this example only 4.1.5 can build with other compilers"
|
||||
|
||||
``any_of`` is a list of specs. One of those specs must be satisfied
|
||||
and it is also allowed for the concretized spec to match more than one.
|
||||
In the above example, that means you could build ``openmpi@4.1.5%gcc``,
|
||||
``openmpi@4.1.5%clang`` or ``openmpi@3.9%gcc``, but
|
||||
not ``openmpi@3.9%clang``.
|
||||
|
||||
If a custom message is provided, and the requirement is not satisfiable,
|
||||
Spack will print the custom error message:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec openmpi@3.9%clang
|
||||
==> Error: in this example only 4.1.5 can build with other compilers
|
||||
|
||||
We could express a similar requirement using the ``when`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- any_of: ["%gcc"]
|
||||
when: "@:4.1.4"
|
||||
message: "in this example only 4.1.5 can build with other compilers"
|
||||
|
||||
In the example above, if the version turns out to be 4.1.4 or less, we require the compiler to be GCC.
|
||||
For readability, Spack also allows a ``spec`` key accepting a string when there is only a single
|
||||
constraint:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- spec: "%gcc"
|
||||
when: "@:4.1.4"
|
||||
message: "in this example only 4.1.5 can build with other compilers"
|
||||
|
||||
This code snippet and the one before it are semantically equivalent.
|
||||
|
||||
Finally, instead of ``any_of`` you can use ``one_of`` which also takes a list of specs. The final
|
||||
concretized spec must match one and only one of them:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpich:
|
||||
require:
|
||||
- one_of: ["+cuda", "+rocm"]
|
||||
|
||||
In the example above, that means you could build ``mpich+cuda`` or ``mpich+rocm`` but not ``mpich+cuda+rocm``.
|
||||
|
||||
.. note::
|
||||
|
||||
For ``any_of`` and ``one_of``, the order of specs indicates a
|
||||
preference: items that appear earlier in the list are preferred
|
||||
(note that these preferences can be ignored in favor of others).
|
||||
|
||||
.. note::
|
||||
|
||||
When using a conditional requirement, Spack is allowed to actively avoid the triggering
|
||||
condition (the ``when=...`` spec) if that leads to a concrete spec with better scores in
|
||||
the optimization criteria. To check the current optimization criteria and their
|
||||
priorities you can run ``spack solve zlib``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting default requirements
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can also set default requirements for all packages under ``all``
|
||||
like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
require: '%clang'
|
||||
|
||||
which means every spec will be required to use ``clang`` as a compiler.
|
||||
|
||||
Requirements on variants for all packages are possible too, but note that they
|
||||
are only enforced for those packages that define these variants, otherwise they
|
||||
are disregarded. For example:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
require:
|
||||
- "+shared"
|
||||
- "+cuda"
|
||||
|
||||
will just enforce ``+shared`` on ``zlib``, which has a boolean ``shared`` variant but
|
||||
no ``cuda`` variant.
|
||||
|
||||
Constraints in a single spec literal are always considered as a whole, so in a case like:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
require: "+shared +cuda"
|
||||
|
||||
the default requirement will be enforced only if a package has both a ``cuda`` and
|
||||
a ``shared`` variant, and will never be partially enforced.
|
||||
|
||||
Finally, ``all`` represents a *default set of requirements* -
|
||||
if there are specific package requirements, then the default requirements
|
||||
under ``all`` are disregarded. For example, with a configuration like this:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
require:
|
||||
- 'build_type=Debug'
|
||||
- '%clang'
|
||||
cmake:
|
||||
require:
|
||||
- 'build_type=Debug'
|
||||
- '%gcc'
|
||||
|
||||
Spack requires ``cmake`` to use ``gcc`` and all other nodes (including ``cmake``
|
||||
dependencies) to use ``clang``. If enforcing ``build_type=Debug`` is needed also
|
||||
on ``cmake``, it must be repeated in the specific ``cmake`` requirements.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting requirements on virtual specs
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A requirement on a virtual spec applies whenever that virtual is present in the DAG.
|
||||
This can be useful for fixing which virtual provider you want to use:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
require: 'mvapich2 %gcc'
|
||||
|
||||
With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``.
|
||||
|
||||
Requirements on the virtual spec and on the specific provider are both applied, if
|
||||
present. For instance with a configuration like:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpi:
|
||||
require: 'mvapich2 %gcc'
|
||||
mvapich2:
|
||||
require: '~cuda'
|
||||
|
||||
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
||||
|
||||
.. _package-preferences:
|
||||
|
||||
-------------------
|
||||
Package Preferences
|
||||
-------------------
|
||||
|
||||
In some cases package requirements can be too strong, and package
|
||||
preferences are the better option. Package preferences do not impose
|
||||
constraints on packages for particular versions or variants values,
|
||||
they rather only set defaults. The concretizer is free to change
|
||||
them if it must, due to other constraints, and also prefers reusing
|
||||
installed packages over building new ones that are a better match for
|
||||
preferences.
|
||||
|
||||
.. seealso::
|
||||
|
||||
FAQ: :ref:`Why does Spack pick particular versions and variants? <faq-concretizer-precedence>`
|
||||
|
||||
|
||||
Most package preferences (``compilers``, ``target`` and ``providers``)
|
||||
can only be set globally under the ``all`` section of ``packages.yaml``:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc@12.2.0, clang@12:, oneapi@2023:]
|
||||
target: [x86_64_v3]
|
||||
providers:
|
||||
mpi: [mvapich2, mpich, openmpi]
|
||||
|
||||
These preferences override Spack's default and effectively reorder priorities
|
||||
when looking for the best compiler, target or virtual package provider. Each
|
||||
preference takes an ordered list of spec constraints, with earlier entries in
|
||||
the list being preferred over later entries.
|
||||
|
||||
In the example above all packages prefer to be compiled with ``gcc@12.2.0``,
|
||||
to target the ``x86_64_v3`` microarchitecture and to use ``mvapich2`` if they
|
||||
depend on ``mpi``.
|
||||
|
||||
The ``variants`` and ``version`` preferences can be set under
|
||||
package specific sections of the ``packages.yaml`` file:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
opencv:
|
||||
variants: +debug
|
||||
gperftools:
|
||||
version: [2.2, 2.4, 2.3]
|
||||
|
||||
In this case, the preference for ``opencv`` is to build with debug options, while
|
||||
``gperftools`` prefers version 2.2 over 2.4.
|
||||
|
||||
Any preference can be overwritten on the command line if explicitly requested.
|
||||
|
||||
Preferences cannot overcome explicit constraints, as they only set a preferred
|
||||
ordering among homogeneous attribute values. Going back to the example, if
|
||||
``gperftools@2.3:`` was requested, then Spack will install version 2.4
|
||||
since the most preferred version 2.2 is prohibited by the version constraint.
|
||||
|
||||
.. _package_permissions:
|
||||
|
||||
-------------------
|
||||
Package Permissions
|
||||
-------------------
|
||||
|
||||
Spack can be configured to assign permissions to the files installed
|
||||
by a package.
|
||||
|
||||
In the ``packages.yaml`` file under ``permissions``, the attributes
|
||||
``read``, ``write``, and ``group`` control the package
|
||||
permissions. These attributes can be set per-package, or for all
|
||||
packages under ``all``. If permissions are set under ``all`` and for a
|
||||
specific package, the package-specific settings take precedence.
|
||||
|
||||
The ``read`` and ``write`` attributes take one of ``user``, ``group``,
|
||||
and ``world``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
permissions:
|
||||
write: group
|
||||
group: spack
|
||||
my_app:
|
||||
permissions:
|
||||
read: group
|
||||
group: my_team
|
||||
|
||||
The permissions settings describe the broadest level of access to
|
||||
installations of the specified packages. The execute permissions of
|
||||
the file are set to the same level as read permissions for those files
|
||||
that are executable. The default setting for ``read`` is ``world``,
|
||||
and for ``write`` is ``user``. In the example above, installations of
|
||||
``my_app`` will be installed with user and group permissions but no
|
||||
world permissions, and owned by the group ``my_team``. All other
|
||||
packages will be installed with user and group write privileges, and
|
||||
world read privileges. Those packages will be owned by the group
|
||||
``spack``.
|
||||
|
||||
The ``group`` attribute assigns a Unix-style group to a package. All
|
||||
files installed by the package will be owned by the assigned group,
|
||||
and the sticky group bit will be set on the install prefix and all
|
||||
directories inside the install prefix. This will ensure that even
|
||||
manually placed files within the install prefix are owned by the
|
||||
assigned group. If no group is assigned, Spack will allow the OS
|
||||
default behavior to go as expected.
|
||||
|
||||
----------------------------
|
||||
Assigning Package Attributes
|
||||
----------------------------
|
||||
|
||||
You can assign class-level attributes in the configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpileaks:
|
||||
# Override existing attributes
|
||||
url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
|
||||
# ... or add new ones
|
||||
x: 1
|
||||
|
||||
Attributes set this way will be accessible to any method executed
|
||||
in the package.py file (e.g. the ``install()`` method). Values for these
|
||||
attributes may be any value parseable by yaml.
|
||||
|
||||
These can only be applied to specific packages, not "all" or
|
||||
virtual packages.
|
@@ -121,7 +121,7 @@ Since v0.19, Spack supports two ways of writing a package recipe. The most comm
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version >= Version("2.1.1"):
|
||||
return super().url_for_version(version)
|
||||
return super(Openjpeg, self).url_for_version(version)
|
||||
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
||||
return url_fmt.format(version)
|
||||
|
||||
@@ -155,7 +155,7 @@ builder class explicitly. Using the same example as above, this reads:
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version >= Version("2.1.1"):
|
||||
return super().url_for_version(version)
|
||||
return super(Openjpeg, self).url_for_version(version)
|
||||
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
||||
return url_fmt.format(version)
|
||||
|
||||
@@ -363,42 +363,6 @@ one of these::
|
||||
If Spack finds none of these variables set, it will look for ``vim``, ``vi``, ``emacs``,
|
||||
``nano``, and ``notepad``, in that order.
|
||||
|
||||
^^^^^^^^^^^^^^^^^
|
||||
Bundling software
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you have a collection of software expected to work well together with
|
||||
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
|
||||
Examples where bundle packages can be useful include defining suites of
|
||||
applications (e.g, `EcpProxyApps
|
||||
<https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
|
||||
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
|
||||
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
|
||||
|
||||
These versioned packages primarily consist of dependencies on the associated
|
||||
software packages. They can include :ref:`variants <variants>` to ensure
|
||||
common build options are consistently applied to dependencies. Known build
|
||||
failures, such as not building on a platform or when certain compilers or
|
||||
variants are used, can be flagged with :ref:`conflicts <packaging_conflicts>`.
|
||||
Build requirements, such as only building with specific compilers, can similarly
|
||||
be flagged with :ref:`requires <packaging_conflicts>`.
|
||||
|
||||
The ``spack create --template bundle`` command will create a skeleton
|
||||
``BundlePackage`` ``package.py`` for you:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack create --template bundle --name coolsdk
|
||||
|
||||
Now you can fill in the basic package documentation, version(s), and software
|
||||
package dependencies along with any other relevant customizations.
|
||||
|
||||
.. note::
|
||||
|
||||
Remember that bundle packages have no software of their own so there
|
||||
is nothing to download.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Non-downloadable software
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -646,16 +610,7 @@ add a line like this in the package class:
|
||||
version("8.2.0", md5="1c9f62f0778697a09d36121ead88e08e")
|
||||
version("8.1.2", md5="d47dd09ed7ae6e7fd6f9a816d7f5fdf6")
|
||||
|
||||
.. note::
|
||||
|
||||
By convention, we list versions in descending order, from newest to oldest.
|
||||
|
||||
.. note::
|
||||
|
||||
:ref:`Bundle packages <bundlepackage>` do not have source code so
|
||||
there is nothing to fetch. Consequently, their version directives
|
||||
consist solely of the version name (e.g., ``version("202309")``).
|
||||
|
||||
Versions should be listed in descending order, from newest to oldest.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Date Versions
|
||||
@@ -1549,7 +1504,7 @@ its value:
|
||||
|
||||
def configure_args(self):
|
||||
...
|
||||
if self.spec.satisfies("+shared"):
|
||||
if "+shared" in self.spec:
|
||||
extra_args.append("--enable-shared")
|
||||
else:
|
||||
extra_args.append("--disable-shared")
|
||||
@@ -1636,7 +1591,7 @@ Within a package recipe a multi-valued variant is tested using a ``key=value`` s
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
if spec.satisfies("languages=jit"):
|
||||
if "languages=jit" in spec:
|
||||
options.append("--enable-host-shared")
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""""""
|
||||
@@ -2288,7 +2243,7 @@ looks like this:
|
||||
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
|
||||
|
||||
version("1.0.1h", md5="8d6d684a9430d5cc98a62a5d8fbda8cf")
|
||||
depends_on("zlib-api")
|
||||
depends_on("zlib")
|
||||
|
||||
parallel = False
|
||||
|
||||
@@ -2337,7 +2292,7 @@ window while a batch job is running ``spack install`` on the same or
|
||||
overlapping dependencies without any process trying to re-do the work of
|
||||
another.
|
||||
|
||||
For example, if you are using Slurm, you could launch an installation
|
||||
For example, if you are using SLURM, you could launch an installation
|
||||
of ``mpich`` using the following command:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -2352,7 +2307,7 @@ the following at the command line of a bash shell:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 & done
|
||||
$ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 &; done
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -2557,10 +2512,9 @@ Conditional dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You may have a package that only requires a dependency under certain
|
||||
conditions. For example, you may have a package with optional MPI support.
|
||||
You would then provide a variant to reflect that the feature is optional
|
||||
and specify the MPI dependency only applies when MPI support is enabled.
|
||||
In that case, you could say something like:
|
||||
conditions. For example, you may have a package that has optional MPI support,
|
||||
- MPI is only a dependency when you want to enable MPI support for the
|
||||
package. In that case, you could say something like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -2568,39 +2522,13 @@ In that case, you could say something like:
|
||||
|
||||
depends_on("mpi", when="+mpi")
|
||||
|
||||
``when`` can include constraints on the variant, version, compiler, etc. and
|
||||
the :mod:`syntax<spack.spec>` is the same as for Specs written on the command
|
||||
line.
|
||||
|
||||
Suppose the above package also has, since version 3, optional `Trilinos`
|
||||
support and you want them both to build either with or without MPI. Further
|
||||
suppose you require a version of `Trilinos` no older than 12.6. In that case,
|
||||
the `trilinos` variant and dependency directives would be:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
variant("trilinos", default=False, description="Enable Trilinos support")
|
||||
|
||||
depends_on("trilinos@12.6:", when="@3: +trilinos")
|
||||
depends_on("trilinos@12.6: +mpi", when="@3: +trilinos +mpi")
|
||||
|
||||
|
||||
Alternatively, you could use the `when` context manager to equivalently specify
|
||||
the `trilinos` variant dependencies as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with when("@3: +trilinos"):
|
||||
depends_on("trilinos@12.6:")
|
||||
depends_on("trilinos +mpi", when="+mpi")
|
||||
|
||||
|
||||
The argument to ``when`` in either case can include any Spec constraints that
|
||||
are supported on the command line using the same :ref:`syntax <sec-specs>`.
|
||||
|
||||
.. note::
|
||||
|
||||
If a dependency isn't typically used, you can save time by making it
|
||||
conditional since Spack will not build the dependency unless it is
|
||||
required for the Spec.
|
||||
|
||||
If a dependency/feature of a package isn't typically used, you can save time
|
||||
by making it conditional (since Spack will not build the dependency unless it
|
||||
is required for the Spec).
|
||||
|
||||
.. _dependency_dependency_patching:
|
||||
|
||||
@@ -2688,6 +2616,60 @@ appear in the package file (or in this case, in the list).
|
||||
right version. If two packages depend on ``binutils`` patched *the
|
||||
same* way, they can both use a single installation of ``binutils``.
|
||||
|
||||
.. _setup-dependent-environment:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Influence how dependents are built or run
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack provides a mechanism for dependencies to influence the
|
||||
environment of their dependents by overriding the
|
||||
:meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||
or the
|
||||
:meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
|
||||
methods.
|
||||
The Qt package, for instance, uses this call:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
|
||||
:pyobject: Qt.setup_dependent_build_environment
|
||||
:linenos:
|
||||
|
||||
to set the ``QTDIR`` environment variable so that packages
|
||||
that depend on a particular Qt installation will find it.
|
||||
Another good example of how a dependency can influence
|
||||
the build environment of dependents is the Python package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_build_environment
|
||||
:linenos:
|
||||
|
||||
In the method above it is ensured that any package that depends on Python
|
||||
will have the ``PYTHONPATH``, ``PYTHONHOME`` and ``PATH`` environment
|
||||
variables set appropriately before starting the installation. To make things
|
||||
even simpler the ``python setup.py`` command is also inserted into the module
|
||||
scope of dependents by overriding a third method called
|
||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`
|
||||
:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_package
|
||||
:linenos:
|
||||
|
||||
This allows most python packages to have a very simple install procedure,
|
||||
like the following:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def install(self, spec, prefix):
|
||||
setup_py("install", "--prefix={0}".format(prefix))
|
||||
|
||||
Finally the Python package takes also care of the modifications to ``PYTHONPATH``
|
||||
to allow dependencies to run correctly:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_run_environment
|
||||
:linenos:
|
||||
|
||||
|
||||
.. _packaging_conflicts:
|
||||
|
||||
@@ -2696,7 +2678,7 @@ Conflicts and requirements
|
||||
--------------------------
|
||||
|
||||
Sometimes packages have known bugs, or limitations, that would prevent them
|
||||
from building e.g. against other dependencies or with certain compilers. Spack
|
||||
to build e.g. against other dependencies or with certain compilers. Spack
|
||||
makes it possible to express such constraints with the ``conflicts`` directive.
|
||||
|
||||
Adding the following to a package:
|
||||
@@ -2832,70 +2814,6 @@ variant(s) are selected. This may be accomplished with conditional
|
||||
extends("python", when="+python")
|
||||
...
|
||||
|
||||
.. _setup-environment:
|
||||
|
||||
--------------------------------------------
|
||||
Runtime and build time environment variables
|
||||
--------------------------------------------
|
||||
|
||||
Spack provides a few methods to help package authors set up the required environment variables for
|
||||
their package. Environment variables typically depend on how the package is used: variables that
|
||||
make sense during the build phase may not be needed at runtime, and vice versa. Further, sometimes
|
||||
it makes sense to let a dependency set the environment variables for its dependents. To allow all
|
||||
this, Spack provides four different methods that can be overridden in a package:
|
||||
|
||||
1. :meth:`setup_build_environment <spack.builder.Builder.setup_build_environment>`
|
||||
2. :meth:`setup_run_environment <spack.package_base.PackageBase.setup_run_environment>`
|
||||
3. :meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
|
||||
4. :meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||
|
||||
The Qt package, for instance, uses this call:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
|
||||
:pyobject: Qt.setup_dependent_build_environment
|
||||
:linenos:
|
||||
|
||||
to set the ``QTDIR`` environment variable so that packages that depend on a particular Qt
|
||||
installation will find it.
|
||||
|
||||
The following diagram will give you an idea when each of these methods is called in a build
|
||||
context:
|
||||
|
||||
.. image:: images/setup_env.png
|
||||
:align: center
|
||||
|
||||
Notice that ``setup_dependent_run_environment`` can be called multiple times, once for each
|
||||
dependent package, whereas ``setup_run_environment`` is called only once for the package itself.
|
||||
This means that the former should only be used if the environment variables depend on the dependent
|
||||
package, whereas the latter should be used if the environment variables depend only on the package
|
||||
itself.
|
||||
|
||||
--------------------------------
|
||||
Setting package module variables
|
||||
--------------------------------
|
||||
|
||||
Apart from modifying environment variables of the dependent package, you can also define Python
|
||||
variables to be used by the dependent. This is done by implementing
|
||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An
|
||||
example of this can be found in the ``Python`` package:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.setup_dependent_package
|
||||
:linenos:
|
||||
|
||||
This allows Python packages to directly use these variables:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def install(self, spec, prefix):
|
||||
...
|
||||
install("script.py", python_platlib)
|
||||
|
||||
.. note::
|
||||
|
||||
We recommend using ``setup_dependent_package`` sparingly, as it is not always clear where
|
||||
global variables are coming from when editing a ``package.py`` file.
|
||||
|
||||
-----
|
||||
Views
|
||||
-----
|
||||
@@ -2974,33 +2892,6 @@ The ``provides("mpi")`` call tells Spack that the ``mpich`` package
|
||||
can be used to satisfy the dependency of any package that
|
||||
``depends_on("mpi")``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Providing multiple virtuals simultaneously
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Packages can provide more than one virtual dependency. Sometimes, due to implementation details,
|
||||
there are subsets of those virtuals that need to be provided together by the same package.
|
||||
|
||||
A well-known example is ``openblas``, which provides both the ``lapack`` and ``blas`` API in a single ``libopenblas``
|
||||
library. A package that needs ``lapack`` and ``blas`` must either use ``openblas`` to provide both, or not use
|
||||
``openblas`` at all. It cannot pick one or the other.
|
||||
|
||||
To express this constraint in a package, the two virtual dependencies must be listed in the same ``provides`` directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
provides('blas', 'lapack')
|
||||
|
||||
This makes it impossible to select ``openblas`` as a provider for one of the two
|
||||
virtual dependencies and not for the other. If you try to, Spack will report an error:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec netlib-scalapack ^[virtuals=lapack] openblas ^[virtuals=blas] atlas
|
||||
==> Error: concretization failed for the following reasons:
|
||||
|
||||
1. Package 'openblas' needs to provide both 'lapack' and 'blas' together, but provides only 'lapack'
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Versioned Interfaces
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -3180,7 +3071,7 @@ follows:
|
||||
# The library provided by the bar virtual package
|
||||
@property
|
||||
def bar_libs(self):
|
||||
return find_libraries("libFooBar", root=self.home, recursive=True)
|
||||
return find_libraries("libFooBar", root=sef.home, recursive=True)
|
||||
|
||||
# The baz virtual package home
|
||||
@property
|
||||
@@ -3503,56 +3394,6 @@ is equivalent to:
|
||||
Constraints from nested context managers are also combined together, but they are rarely
|
||||
needed or recommended.
|
||||
|
||||
.. _default_args:
|
||||
|
||||
------------------------
|
||||
Common default arguments
|
||||
------------------------
|
||||
|
||||
Similarly, if directives have a common set of default arguments, you can
|
||||
group them together in a ``with default_args()`` block:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class PyExample(PythonPackage):
|
||||
|
||||
with default_args(type=("build", "run")):
|
||||
depends_on("py-foo")
|
||||
depends_on("py-foo@2:", when="@2:")
|
||||
depends_on("py-bar")
|
||||
depends_on("py-bz")
|
||||
|
||||
The above is short for:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class PyExample(PythonPackage):
|
||||
|
||||
depends_on("py-foo", type=("build", "run"))
|
||||
depends_on("py-foo@2:", when="@2:", type=("build", "run"))
|
||||
depends_on("py-bar", type=("build", "run"))
|
||||
depends_on("py-bz", type=("build", "run"))
|
||||
|
||||
.. note::
|
||||
|
||||
The ``with when()`` context manager is composable, while ``with default_args()``
|
||||
merely overrides the default. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
with default_args(when="+feature"):
|
||||
depends_on("foo")
|
||||
depends_on("bar")
|
||||
depends_on("baz", when="+baz")
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("foo", when="+feature")
|
||||
depends_on("bar", when="+feature")
|
||||
depends_on("baz", when="+baz") # Note: not when="+feature+baz"
|
||||
|
||||
.. _install-method:
|
||||
|
||||
------------------
|
||||
@@ -3615,7 +3456,7 @@ need to override methods like ``configure_args``:
|
||||
|
||||
def configure_args(self):
|
||||
args = ["--enable-cxx"] + self.enable_or_disable("libs")
|
||||
if self.spec.satisfies("libs=static"):
|
||||
if "libs=static" in self.spec:
|
||||
args.append("--with-pic")
|
||||
return args
|
||||
|
||||
@@ -3749,8 +3590,7 @@ regardless of the build system. The arguments for the phase are:
|
||||
The arguments ``spec`` and ``prefix`` are passed only for convenience, as they always
|
||||
correspond to ``self.spec`` and ``self.spec.prefix`` respectively.
|
||||
|
||||
If the ``package.py`` has build instructions in a separate
|
||||
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
|
||||
If the ``package.py`` encodes builders explicitly, the signature for a phase changes slightly:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -3760,6 +3600,56 @@ If the ``package.py`` has build instructions in a separate
|
||||
|
||||
In this case the package is passed as the second argument, and ``self`` is the builder instance.
|
||||
|
||||
.. _multiple_build_systems:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
Multiple build systems
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
There are cases where a software actively supports two build systems, or changes build systems
|
||||
as it evolves, or needs different build systems on different platforms. Spack allows dealing with
|
||||
these cases natively, if a recipe is written using builders explicitly.
|
||||
|
||||
For instance, software that supports two build systems unconditionally should derive from
|
||||
both ``*Package`` base classes, and declare the possible use of multiple build systems using
|
||||
a directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class ArpackNg(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system("cmake", "autotools", default="cmake")
|
||||
|
||||
In this case the software can be built with both ``autotools`` and ``cmake``. Since the package
|
||||
supports multiple build systems, it is necessary to declare which one is the default. The ``package.py``
|
||||
will likely contain some overriding of default builder methods:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
pass
|
||||
|
||||
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
pass
|
||||
|
||||
In more complex cases it might happen that the build system changes according to certain conditions,
|
||||
for instance across versions. That can be expressed with conditional variant values:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class ArpackNg(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system(
|
||||
conditional("cmake", when="@0.64:"),
|
||||
conditional("autotools", when="@:0.63"),
|
||||
default="cmake",
|
||||
)
|
||||
|
||||
In the example the directive impose a change from ``Autotools`` to ``CMake`` going
|
||||
from ``v0.63`` to ``v0.64``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Mixin base classes
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
@@ -3806,106 +3696,6 @@ for instance:
|
||||
|
||||
In the example above ``Cp2k`` inherits all the conflicts and variants that ``CudaPackage`` defines.
|
||||
|
||||
.. _multiple_build_systems:
|
||||
|
||||
----------------------
|
||||
Multiple build systems
|
||||
----------------------
|
||||
|
||||
There are cases where a package actively supports two build systems, or changes build systems
|
||||
as it evolves, or needs different build systems on different platforms. Spack allows dealing with
|
||||
these cases by splitting the build instructions into separate builder classes.
|
||||
|
||||
For instance, software that supports two build systems unconditionally should derive from
|
||||
both ``*Package`` base classes, and declare the possible use of multiple build systems using
|
||||
a directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Example(CMakePackage, AutotoolsPackage):
|
||||
|
||||
variant("my_feature", default=True)
|
||||
|
||||
build_system("cmake", "autotools", default="cmake")
|
||||
|
||||
In this case the software can be built with both ``autotools`` and ``cmake``. Since the package
|
||||
supports multiple build systems, it is necessary to declare which one is the default.
|
||||
|
||||
Additional build instructions are split into separate builder classes:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
return [
|
||||
self.define_from_variant("MY_FEATURE", "my_feature")
|
||||
]
|
||||
|
||||
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
return self.with_or_without("my-feature", variant="my_feature")
|
||||
|
||||
In this example, ``spack install example +feature build_sytem=cmake`` will
|
||||
pick the ``CMakeBuilder`` and invoke ``cmake -DMY_FEATURE:BOOL=ON``.
|
||||
|
||||
Similarly, ``spack install example +feature build_system=autotools`` will pick
|
||||
the ``AutotoolsBuilder`` and invoke ``./configure --with-my-feature``.
|
||||
|
||||
Dependencies are always specified in the package class. When some dependencies
|
||||
depend on the choice of the build system, it is possible to use when conditions as
|
||||
usual:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Example(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system("cmake", "autotools", default="cmake")
|
||||
|
||||
# Runtime dependencies
|
||||
depends_on("ncurses")
|
||||
depends_on("libxml2")
|
||||
|
||||
# Lowerbounds for cmake only apply when using cmake as the build system
|
||||
with when("build_system=cmake"):
|
||||
depends_on("cmake@3.18:", when="@2.0:", type="build")
|
||||
depends_on("cmake@3:", type="build")
|
||||
|
||||
# Specify extra build dependencies used only in the configure script
|
||||
with when("build_system=autotools"):
|
||||
depends_on("perl", type="build")
|
||||
depends_on("pkgconfig", type="build")
|
||||
|
||||
Very often projects switch from one build system to another, or add support
|
||||
for a new build system from a certain version, which means that the choice
|
||||
of the build system typically depends on a version range. Those situations can
|
||||
be handled by using conditional values in the ``build_system`` directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Example(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system(
|
||||
conditional("cmake", when="@0.64:"),
|
||||
conditional("autotools", when="@:0.63"),
|
||||
default="cmake",
|
||||
)
|
||||
|
||||
In the example the directive impose a change from ``Autotools`` to ``CMake`` going
|
||||
from ``v0.63`` to ``v0.64``.
|
||||
|
||||
The ``build_system`` can be used as an ordinary variant, which also means that it can
|
||||
be used in ``depends_on`` statements. This can be useful when a package *requires* that
|
||||
its dependency has a CMake config file, meaning that the dependent can only build when the
|
||||
dependency is built with CMake, and not Autotools. In that case, you can force the choice
|
||||
of the build system in the dependent:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Dependent(CMakePackage):
|
||||
|
||||
depends_on("example build_system=cmake")
|
||||
|
||||
|
||||
.. _install-environment:
|
||||
|
||||
-----------------------
|
||||
@@ -4478,7 +4268,7 @@ for supported features, for instance:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
if spec.satisfies("target=avx512"):
|
||||
if "avx512" in spec.target:
|
||||
args.append("--with-avx512")
|
||||
|
||||
The snippet above will append the ``--with-avx512`` item to a list of arguments only if the corresponding
|
||||
@@ -4983,17 +4773,17 @@ For example, running:
|
||||
|
||||
results in spack checking that the installation created the following **file**:
|
||||
|
||||
* ``self.prefix.bin.reframe``
|
||||
* ``self.prefix/bin/reframe``
|
||||
|
||||
and the following **directories**:
|
||||
|
||||
* ``self.prefix.bin``
|
||||
* ``self.prefix.config``
|
||||
* ``self.prefix.docs``
|
||||
* ``self.prefix.reframe``
|
||||
* ``self.prefix.tutorials``
|
||||
* ``self.prefix.unittests``
|
||||
* ``self.prefix.cscs-checks``
|
||||
* ``self.prefix/bin``
|
||||
* ``self.prefix/config``
|
||||
* ``self.prefix/docs``
|
||||
* ``self.prefix/reframe``
|
||||
* ``self.prefix/tutorials``
|
||||
* ``self.prefix/unittests``
|
||||
* ``self.prefix/cscs-checks``
|
||||
|
||||
If **any** of these paths are missing, then Spack considers the installation
|
||||
to have failed.
|
||||
@@ -5137,7 +4927,7 @@ installed executable. The check is implemented as follows:
|
||||
@on_package_attributes(run_tests=True)
|
||||
def check_list(self):
|
||||
with working_dir(self.stage.source_path):
|
||||
reframe = Executable(self.prefix.bin.reframe)
|
||||
reframe = Executable(join_path(self.prefix, "bin", "reframe"))
|
||||
reframe("-l")
|
||||
|
||||
.. warning::
|
||||
@@ -5357,8 +5147,8 @@ embedded test parts.
|
||||
for example in ["ex1", "ex2"]:
|
||||
with test_part(
|
||||
self,
|
||||
f"test_example_{example}",
|
||||
purpose=f"run installed {example}",
|
||||
"test_example_{0}".format(example),
|
||||
purpose="run installed {0}".format(example),
|
||||
):
|
||||
exe = which(join_path(self.prefix.bin, example))
|
||||
exe()
|
||||
@@ -5436,10 +5226,11 @@ Below illustrates using this feature to compile an example.
|
||||
...
|
||||
cxx = which(os.environ["CXX"])
|
||||
cxx(
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.cpp",
|
||||
"-o", exe
|
||||
"-L{0}".format(self.prefix.lib),
|
||||
"-I{0}".format(self.prefix.include),
|
||||
"{0}.cpp".format(exe),
|
||||
"-o",
|
||||
exe
|
||||
)
|
||||
cxx_example = which(exe)
|
||||
cxx_example()
|
||||
@@ -5456,14 +5247,14 @@ Saving build-time files
|
||||
We highly recommend re-using build-time test sources and pared down
|
||||
input files for testing installed software. These files are easier
|
||||
to keep synchronized with software capabilities since they reside
|
||||
within the software's repository.
|
||||
|
||||
within the software's repository.
|
||||
|
||||
If that is not possible, you can add test-related files to the package
|
||||
repository (see :ref:`adding custom files <cache_custom_files>`). It
|
||||
will be important to maintain them so they work across listed or supported
|
||||
versions of the package.
|
||||
|
||||
You can use the ``cache_extra_test_sources`` helper to copy directories
|
||||
You can use the ``cache_extra_test_sources`` method to copy directories
|
||||
and or files from the source build stage directory to the package's
|
||||
installation directory.
|
||||
|
||||
@@ -5471,15 +5262,10 @@ The signature for ``cache_extra_test_sources`` is:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def cache_extra_test_sources(pkg, srcs):
|
||||
|
||||
where each argument has the following meaning:
|
||||
|
||||
* ``pkg`` is an instance of the package for the spec under test.
|
||||
|
||||
* ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
def cache_extra_test_sources(self, srcs):
|
||||
|
||||
where ``srcs`` is a string *or* a list of strings corresponding to the
|
||||
paths of subdirectories and or files needed for stand-alone testing.
|
||||
The paths must be relative to the staged source directory. Contents of
|
||||
subdirectories and files are copied to a special test cache subdirectory
|
||||
of the installation prefix. They are automatically copied to the appropriate
|
||||
@@ -5500,18 +5286,21 @@ and using ``foo.c`` in a test method is illustrated below.
|
||||
srcs = ["tests",
|
||||
join_path("examples", "foo.c"),
|
||||
join_path("examples", "bar.c")]
|
||||
cache_extra_test_sources(self, srcs)
|
||||
self.cache_extra_test_sources(srcs)
|
||||
|
||||
def test_foo(self):
|
||||
exe = "foo"
|
||||
src_dir = self.test_suite.current_test_cache_dir.examples
|
||||
src_dir = join_path(
|
||||
self.test_suite.current_test_cache_dir, "examples"
|
||||
)
|
||||
with working_dir(src_dir):
|
||||
cc = which(os.environ["CC"])
|
||||
cc(
|
||||
f"-L{self.prefix.lib}",
|
||||
f"-I{self.prefix.include}",
|
||||
f"{exe}.c",
|
||||
"-o", exe
|
||||
"-L{0}".format(self.prefix.lib),
|
||||
"-I{0}".format(self.prefix.include),
|
||||
"{0}.c".format(exe),
|
||||
"-o",
|
||||
exe
|
||||
)
|
||||
foo = which(exe)
|
||||
foo()
|
||||
@@ -5537,9 +5326,9 @@ the files using the ``self.test_suite.current_test_cache_dir`` property.
|
||||
In our example above, test methods can use the following paths to reference
|
||||
the copy of each entry listed in ``srcs``, respectively:
|
||||
|
||||
* ``self.test_suite.current_test_cache_dir.tests``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir.examples, "bar.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "tests")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||
* ``join_path(self.test_suite.current_test_cache_dir, "examples", "bar.c")``
|
||||
|
||||
.. admonition:: Library packages should build stand-alone tests
|
||||
|
||||
@@ -5558,7 +5347,7 @@ the copy of each entry listed in ``srcs``, respectively:
|
||||
If one or more of the copied files needs to be modified to reference
|
||||
the installed software, it is recommended that those changes be made
|
||||
to the cached files **once** in the ``copy_test_sources`` method and
|
||||
***after** the call to ``cache_extra_test_sources()``. This will
|
||||
***after** the call to ``self.cache_extra_test_sources()``. This will
|
||||
reduce the amount of unnecessary work in the test method **and** avoid
|
||||
problems testing in shared instances and facility deployments.
|
||||
|
||||
@@ -5605,7 +5394,7 @@ property as shown below.
|
||||
"""build and run custom-example"""
|
||||
data_dir = self.test_suite.current_test_data_dir
|
||||
exe = "custom-example"
|
||||
src = datadir.join(f"{exe}.cpp")
|
||||
src = datadir.join("{0}.cpp".format(exe))
|
||||
...
|
||||
# TODO: Build custom-example using src and exe
|
||||
...
|
||||
@@ -5621,7 +5410,7 @@ Reading expected output from a file
|
||||
|
||||
The helper function ``get_escaped_text_output`` is available for packages
|
||||
to retrieve and properly format the text from a file that contains the
|
||||
expected output from running an executable that may contain special
|
||||
expected output from running an executable that may contain special
|
||||
characters.
|
||||
|
||||
The signature for ``get_escaped_text_output`` is:
|
||||
@@ -5655,7 +5444,7 @@ added to the package's ``test`` subdirectory.
|
||||
db_filename, ".dump", output=str.split, error=str.split
|
||||
)
|
||||
for exp in expected:
|
||||
assert re.search(exp, out), f"Expected '{exp}' in output"
|
||||
assert re.search(exp, out), "Expected '{0}' in output".format(exp)
|
||||
|
||||
If the file was instead copied from the ``tests`` subdirectory of the staged
|
||||
source code, the path would be obtained as shown below.
|
||||
@@ -5668,7 +5457,7 @@ source code, the path would be obtained as shown below.
|
||||
db_filename = test_cache_dir.join("packages.db")
|
||||
|
||||
Alternatively, if the file was copied to the ``share/tests`` subdirectory
|
||||
as part of the installation process, the test could access the path as
|
||||
as part of the installation process, the test could access the path as
|
||||
follows:
|
||||
|
||||
.. code-block:: python
|
||||
@@ -5705,12 +5494,9 @@ Invoking the method is the equivalent of:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
errors = []
|
||||
for check in expected:
|
||||
if not re.search(check, actual):
|
||||
errors.append(f"Expected '{check}' in output '{actual}'")
|
||||
if errors:
|
||||
raise RuntimeError("\n ".join(errors))
|
||||
raise RuntimeError("Expected '{0}' in output '{1}'".format(check, actual))
|
||||
|
||||
|
||||
.. _accessing-files:
|
||||
@@ -5750,7 +5536,7 @@ repository, and installation.
|
||||
- ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||
* - Current Spec's Build-time Files
|
||||
- ``self.test_suite.current_test_cache_dir``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir.examples, "foo.c")``
|
||||
- ``join_path(self.test_suite.current_test_cache_dir, "examples", "foo.c")``
|
||||
* - Current Spec's Custom Test Files
|
||||
- ``self.test_suite.current_test_data_dir``
|
||||
- ``join_path(self.test_suite.current_test_data_dir, "hello.f90")``
|
||||
@@ -5765,7 +5551,7 @@ Inheriting stand-alone tests
|
||||
Stand-alone tests defined in parent (.e.g., :ref:`build-systems`) and
|
||||
virtual (e.g., :ref:`virtual-dependencies`) packages are executed by
|
||||
packages that inherit from or provide interface implementations for those
|
||||
packages, respectively.
|
||||
packages, respectively.
|
||||
|
||||
The table below summarizes the stand-alone tests that will be executed along
|
||||
with those implemented in the package itself.
|
||||
@@ -5835,7 +5621,7 @@ for ``openmpi``:
|
||||
SKIPPED: test_version_oshcc: oshcc is not installed
|
||||
...
|
||||
==> [2023-03-10-16:04:02.215227] Completed testing
|
||||
==> [2023-03-10-16:04:02.215597]
|
||||
==> [2023-03-10-16:04:02.215597]
|
||||
======================== SUMMARY: openmpi-4.1.4-ubmrigj ========================
|
||||
Openmpi::test_bin_mpirun .. PASSED
|
||||
Openmpi::test_bin_ompi_info .. PASSED
|
||||
@@ -6285,7 +6071,7 @@ in the extra attributes can implement this method like this:
|
||||
@classmethod
|
||||
def validate_detected_spec(cls, spec, extra_attributes):
|
||||
"""Check that "compilers" is in the extra attributes."""
|
||||
msg = ("the extra attribute 'compilers' must be set for "
|
||||
msg = ("the extra attribute "compilers" must be set for "
|
||||
"the detected spec '{0}'".format(spec))
|
||||
assert "compilers" in extra_attributes, msg
|
||||
|
||||
@@ -6361,100 +6147,7 @@ follows:
|
||||
"foo-package@{0}".format(version_str)
|
||||
)
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Add detection tests to packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To ensure that software is detected correctly for multiple configurations
|
||||
and on different systems users can write a ``detection_test.yaml`` file and
|
||||
put it in the package directory alongside the ``package.py`` file.
|
||||
This YAML file contains enough information for Spack to mock an environment
|
||||
and try to check if the detection logic yields the results that are expected.
|
||||
|
||||
As a general rule, attributes at the top-level of ``detection_test.yaml``
|
||||
represent search mechanisms and they each map to a list of tests that should confirm
|
||||
the validity of the package's detection logic.
|
||||
|
||||
The detection tests can be run with the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack audit externals
|
||||
|
||||
Errors that have been detected are reported to screen.
|
||||
|
||||
""""""""""""""""""""""""""
|
||||
Tests for PATH inspections
|
||||
""""""""""""""""""""""""""
|
||||
|
||||
Detection tests insisting on ``PATH`` inspections are listed under
|
||||
the ``paths`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
paths:
|
||||
- layout:
|
||||
- executables:
|
||||
- "bin/clang-3.9"
|
||||
- "bin/clang++-3.9"
|
||||
script: |
|
||||
echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)"
|
||||
echo "Target: x86_64-pc-linux-gnu"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /usr/bin"
|
||||
results:
|
||||
- spec: 'llvm@3.9.1 +clang~lld~lldb'
|
||||
|
||||
Each test is performed by first creating a temporary directory structure as
|
||||
specified in the corresponding ``layout`` and by then running
|
||||
package detection and checking that the outcome matches the expected
|
||||
``results``. The exact details on how to specify both the ``layout`` and the
|
||||
``results`` are reported in the table below:
|
||||
|
||||
.. list-table:: Test based on PATH inspections
|
||||
:header-rows: 1
|
||||
|
||||
* - Option Name
|
||||
- Description
|
||||
- Allowed Values
|
||||
- Required Field
|
||||
* - ``layout``
|
||||
- Specifies the filesystem tree used for the test
|
||||
- List of objects
|
||||
- Yes
|
||||
* - ``layout:[0]:executables``
|
||||
- Relative paths for the mock executables to be created
|
||||
- List of strings
|
||||
- Yes
|
||||
* - ``layout:[0]:script``
|
||||
- Mock logic for the executable
|
||||
- Any valid shell script
|
||||
- Yes
|
||||
* - ``results``
|
||||
- List of expected results
|
||||
- List of objects (empty if no result is expected)
|
||||
- Yes
|
||||
* - ``results:[0]:spec``
|
||||
- A spec that is expected from detection
|
||||
- Any valid spec
|
||||
- Yes
|
||||
|
||||
"""""""""""""""""""""""""""""""
|
||||
Reuse tests from other packages
|
||||
"""""""""""""""""""""""""""""""
|
||||
|
||||
When using a custom repository, it is possible to customize a package that already exists in ``builtin``
|
||||
and reuse its external tests. To do so, just write a ``detection_tests.yaml`` alongside the customized
|
||||
``package.py`` with an ``includes`` attribute. For instance the ``detection_tests.yaml`` for
|
||||
``myrepo.llvm`` might look like:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
includes:
|
||||
- "builtin.llvm"
|
||||
|
||||
This YAML file instructs Spack to run the detection tests defined in ``builtin.llvm`` in addition to
|
||||
those locally defined in the file.
|
||||
.. _package-lifecycle:
|
||||
|
||||
-----------------------------
|
||||
Style guidelines for packages
|
||||
@@ -6913,63 +6606,3 @@ To achieve backward compatibility with the single-class format Spack creates in
|
||||
Overall the role of the adapter is to route access to attributes of methods first through the ``*Package``
|
||||
hierarchy, and then back to the base class builder. This is schematically shown in the diagram above, where
|
||||
the adapter role is to "emulate" a method resolution order like the one represented by the red arrows.
|
||||
|
||||
------------------------------
|
||||
Specifying License Information
|
||||
------------------------------
|
||||
|
||||
Most of the software in Spack is open source, and most open source software is released
|
||||
under one or more `common open source licenses <https://opensource.org/licenses/>`_.
|
||||
Specifying the license that a package is released under in a project's
|
||||
`package.py` is good practice. To specify a license, find the `SPDX identifier
|
||||
<https://spdx.org/licenses/>`_ for a project and then add it using the license
|
||||
directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("<SPDX Identifier HERE>")
|
||||
|
||||
For example, the SPDX ID for the Apache Software License, version 2.0 is ``Apache-2.0``,
|
||||
so you'd write:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("Apache-2.0")
|
||||
|
||||
Or, for a dual-licensed package like Spack, you would use an `SPDX Expression
|
||||
<https://spdx.github.io/spdx-spec/v2-draft/SPDX-license-expressions/>`_ with both of its
|
||||
licenses:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("Apache-2.0 OR MIT")
|
||||
|
||||
Note that specifying a license without a when clause makes it apply to all
|
||||
versions and variants of the package, which might not actually be the case.
|
||||
For example, a project might have switched licenses at some point or have
|
||||
certain build configurations that include files that are licensed differently.
|
||||
Spack itself used to be under the ``LGPL-2.1`` license, until it was relicensed
|
||||
in version ``0.12`` in 2018.
|
||||
|
||||
You can specify when a ``license()`` directive applies using with a ``when=``
|
||||
clause, just like other directives. For example, to specify that a specific
|
||||
license identifier should only apply to versions up to ``0.11``, but another
|
||||
license should apply for later versions, you could write:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
license("LGPL-2.1", when="@:0.11")
|
||||
license("Apache-2.0 OR MIT", when="@0.12:")
|
||||
|
||||
Note that unlike for most other directives, the ``when=`` constraints in the
|
||||
``license()`` directive can't intersect. Spack needs to be able to resolve
|
||||
exactly one license identifier expression for any given version. To specify
|
||||
*multiple* licenses, use SPDX expressions and operators as above. The operators
|
||||
you probably care most about are:
|
||||
|
||||
* ``OR``: user chooses one license to adhere to; and
|
||||
* ``AND``: user has to adhere to all the licenses.
|
||||
|
||||
You may also care about `license exceptions
|
||||
<https://spdx.org/licenses/exceptions-index.html>`_ that use the ``WITH`` operator,
|
||||
e.g. ``Apache-2.0 WITH LLVM-exception``.
|
||||
|
@@ -213,16 +213,6 @@ pipeline jobs.
|
||||
``spack ci generate``
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Throughout this documentation, references to the "mirror" mean the target
|
||||
mirror which is checked for the presence of up-to-date specs, and where
|
||||
any scheduled jobs should push built binary packages. In the past, this
|
||||
defaulted to the mirror at index 0 in the mirror configs, and could be
|
||||
overridden using the ``--buildcache-destination`` argument. Starting with
|
||||
Spack 0.23, ``spack ci generate`` will require you to identify this mirror
|
||||
by the name "buildcache-destination". While you can configure any number
|
||||
of mirrors as sources for your pipelines, you will need to identify the
|
||||
destination mirror by name.
|
||||
|
||||
Concretizes the specs in the active environment, stages them (as described in
|
||||
:ref:`staging_algorithm`), and writes the resulting ``.gitlab-ci.yml`` to disk.
|
||||
During concretization of the environment, ``spack ci generate`` also writes a
|
||||
|
@@ -4,7 +4,7 @@
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
=====================================
|
||||
Spack for Homebrew/Conda Users
|
||||
Using Spack to Replace Homebrew/Conda
|
||||
=====================================
|
||||
|
||||
Spack is an incredibly powerful package manager, designed for supercomputers
|
||||
@@ -191,18 +191,18 @@ The ``--fresh`` flag tells Spack to use the latest version of every package
|
||||
where possible instead of trying to optimize for reuse of existing installed
|
||||
packages.
|
||||
|
||||
The ``--force`` flag in addition tells Spack to overwrite its previous
|
||||
concretization decisions, allowing you to choose a new version of Python.
|
||||
If any of the new packages like Bash are already installed, ``spack install``
|
||||
The ``--force`` flag in addition tells Spack to overwrite its previous
|
||||
concretization decisions, allowing you to choose a new version of Python.
|
||||
If any of the new packages like Bash are already installed, ``spack install``
|
||||
won't re-install them, it will keep the symlinks in place.
|
||||
|
||||
-----------------------------------
|
||||
Updating & Cleaning Up Old Packages
|
||||
-----------------------------------
|
||||
|
||||
If you're looking to mimic the behavior of Homebrew, you may also want to
|
||||
clean up out-of-date packages from your environment after an upgrade. To
|
||||
upgrade your entire software stack within an environment and clean up old
|
||||
If you're looking to mimic the behavior of Homebrew, you may also want to
|
||||
clean up out-of-date packages from your environment after an upgrade. To
|
||||
upgrade your entire software stack within an environment and clean up old
|
||||
package versions, simply run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -212,9 +212,9 @@ package versions, simply run the following commands:
|
||||
$ spack concretize --fresh --force
|
||||
$ spack install
|
||||
$ spack gc
|
||||
|
||||
Running ``spack mark -i --all`` tells Spack to mark all of the existing
|
||||
packages within an environment as "implicitly" installed. This tells
|
||||
|
||||
Running ``spack mark -i --all`` tells Spack to mark all of the existing
|
||||
packages within an environment as "implicitly" installed. This tells
|
||||
spack's garbage collection system that these packages should be cleaned up.
|
||||
|
||||
Don't worry however, this will not remove your entire environment.
|
||||
@@ -223,8 +223,8 @@ a fresh concretization and will re-mark any packages that should remain
|
||||
installed as "explicitly" installed.
|
||||
|
||||
**Note:** if you use multiple spack environments you should re-run ``spack install``
|
||||
in each of your environments prior to running ``spack gc`` to prevent spack
|
||||
from uninstalling any shared packages that are no longer required by the
|
||||
in each of your environments prior to running ``spack gc`` to prevent spack
|
||||
from uninstalling any shared packages that are no longer required by the
|
||||
environment you just upgraded.
|
||||
|
||||
--------------
|
||||
|
@@ -1,13 +1,13 @@
|
||||
sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.23.0
|
||||
docutils==0.20.1
|
||||
pygments==2.17.2
|
||||
urllib3==2.1.0
|
||||
pytest==7.4.3
|
||||
isort==5.12.0
|
||||
black==23.11.0
|
||||
flake8==6.1.0
|
||||
mypy==1.7.1
|
||||
# These dependencies should be installed using pip in order
|
||||
# to build the documentation.
|
||||
|
||||
sphinx>=3.4,!=4.1.2,!=5.1.0
|
||||
sphinxcontrib-programoutput
|
||||
sphinx-design
|
||||
sphinx-rtd-theme
|
||||
python-levenshtein
|
||||
# Restrict to docutils <0.17 to workaround a list rendering issue in sphinx.
|
||||
# https://stackoverflow.com/questions/67542699
|
||||
docutils <0.17
|
||||
pygments <2.13
|
||||
urllib3 <2
|
||||
|
@@ -1,478 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _signing:
|
||||
|
||||
=====================
|
||||
Spack Package Signing
|
||||
=====================
|
||||
|
||||
The goal of package signing in Spack is to provide data integrity
|
||||
assurances around official packages produced by the automated Spack CI
|
||||
pipelines. These assurances directly address the security of Spack’s
|
||||
software supply chain by explaining why a security-conscious user can
|
||||
be reasonably justified in the belief that packages installed via Spack
|
||||
have an uninterrupted auditable trail back to change management
|
||||
decisions judged to be appropriate by the Spack maintainers. This is
|
||||
achieved through cryptographic signing of packages built by Spack CI
|
||||
pipelines based on code that has been transparently reviewed and
|
||||
approved on GitHub. This document describes the signing process for
|
||||
interested users.
|
||||
|
||||
.. _risks:
|
||||
|
||||
------------------------------
|
||||
Risks, Impact and Threat Model
|
||||
------------------------------
|
||||
|
||||
This document addresses the approach taken to safeguard Spack’s
|
||||
reputation with regard to the integrity of the package data produced by
|
||||
Spack’s CI pipelines. It does not address issues of data confidentiality
|
||||
(Spack is intended to be largely open source) or availability (efforts
|
||||
are described elsewhere). With that said the main reputational risk can
|
||||
be broadly categorized as a loss of faith in the data integrity due to a
|
||||
breach of the private key used to sign packages. Remediation of a
|
||||
private key breach would require republishing the public key with a
|
||||
revocation certificate, generating a new signing key, an assessment and
|
||||
potential rebuild/resigning of all packages since the key was breached,
|
||||
and finally direct intervention by every spack user to update their copy
|
||||
of Spack’s public keys used for local verification.
|
||||
|
||||
The primary threat model used in mitigating the risks of these stated
|
||||
impacts is one of individual error not malicious intent or insider
|
||||
threat. The primary objective is to avoid the above impacts by making a
|
||||
private key breach nearly impossible due to oversight or configuration
|
||||
error. Obvious and straightforward measures are taken to mitigate issues
|
||||
of malicious interference in data integrity and insider threats but
|
||||
these attack vectors are not systematically addressed. It should be hard
|
||||
to exfiltrate the private key intentionally, and almost impossible to
|
||||
leak the key by accident.
|
||||
|
||||
.. _overview:
|
||||
|
||||
-----------------
|
||||
Pipeline Overview
|
||||
-----------------
|
||||
|
||||
Spack pipelines build software through progressive stages where packages
|
||||
in later stages nominally depend on packages built in earlier stages.
|
||||
For both technical and design reasons these dependencies are not
|
||||
implemented through the default GitLab artifacts mechanism; instead
|
||||
built packages are uploaded to AWS S3 mirrors (buckets) where they are
|
||||
retrieved by subsequent stages in the pipeline. Two broad categories of
|
||||
pipelines exist: Pull Request (PR) pipelines and Develop/Release
|
||||
pipelines.
|
||||
|
||||
- PR pipelines are launched in response to pull requests made by
|
||||
trusted and untrusted users. Packages built on these pipelines upload
|
||||
code to quarantined AWS S3 locations which cache the built packages
|
||||
for the purposes of review and iteration on the changes proposed in
|
||||
the pull request. Packages built on PR pipelines can come from
|
||||
untrusted users so signing of these pipelines is not implemented.
|
||||
Jobs in these pipelines are executed via normal GitLab runners both
|
||||
within the AWS GitLab infrastructure and at affiliated institutions.
|
||||
- Develop and Release pipelines **sign** the packages they produce and carry
|
||||
strong integrity assurances that trace back to auditable change management
|
||||
decisions. These pipelines only run after members from a trusted group of
|
||||
reviewers verify that the proposed changes in a pull request are appropriate.
|
||||
Once the PR is merged, or a release is cut, a pipeline is run on protected
|
||||
GitLab runners which provide access to the required signing keys within the
|
||||
job. Intermediary keys are used to sign packages in each stage of the
|
||||
pipeline as they are built and a final job officially signs each package
|
||||
external to any specific packages’ build environment. An intermediate key
|
||||
exists in the AWS infrastructure and for each affiliated instritution that
|
||||
maintains protected runners. The runners that execute these pipelines
|
||||
exclusively accept jobs from protected branches meaning the intermediate keys
|
||||
are never exposed to unreviewed code and the official keys are never exposed
|
||||
to any specific build environment.
|
||||
|
||||
.. _key_architecture:
|
||||
|
||||
----------------
|
||||
Key Architecture
|
||||
----------------
|
||||
|
||||
Spack’s CI process uses public-key infrastructure (PKI) based on GNU Privacy
|
||||
Guard (gpg) keypairs to sign public releases of spack package metadata, also
|
||||
called specs. Two classes of GPG keys are involved in the process to reduce the
|
||||
impact of an individual private key compromise, these key classes are the
|
||||
*Intermediate CI Key* and *Reputational Key*. Each of these keys has signing
|
||||
sub-keys that are used exclusively for signing packages. This can be confusing
|
||||
so for the purpose of this explanation we’ll refer to Root and Signing keys.
|
||||
Each key has a private and a public component as well as one or more identities
|
||||
and zero or more signatures.
|
||||
|
||||
-------------------
|
||||
Intermediate CI Key
|
||||
-------------------
|
||||
|
||||
The Intermediate key class is used to sign and verify packages between stages
|
||||
within a develop or release pipeline. An intermediate key exists for the AWS
|
||||
infrastructure as well as each affiliated institution that maintains protected
|
||||
runners. These intermediate keys are made available to the GitLab execution
|
||||
environment building the package so that the package’s dependencies may be
|
||||
verified by the Signing Intermediate CI Public Key and the final package may be
|
||||
signed by the Signing Intermediate CI Private Key.
|
||||
|
||||
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| **Intermediate CI Key (GPG)** |
|
||||
+==================================================+======================================================+
|
||||
| Root Intermediate CI Private Key (RSA 4096)# | Root Intermediate CI Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Signing Intermediate CI Private Key (RSA 4096) | Signing Intermediate CI Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Identity: “Intermediate CI Key <maintainers@spack.io>” |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| Signatures: None |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
|
||||
|
||||
The *Root intermediate CI Private Key*\ Is stripped out of the GPG key and
|
||||
stored offline completely separate from Spack’s infrastructure. This allows the
|
||||
core development team to append revocation certificates to the GPG key and
|
||||
issue new sub-keys for use in the pipeline. It is our expectation that this
|
||||
will happen on a semi regular basis. A corollary of this is that *this key
|
||||
should not be used to verify package integrity outside the internal CI process.*
|
||||
|
||||
----------------
|
||||
Reputational Key
|
||||
----------------
|
||||
|
||||
The Reputational Key is the public facing key used to sign complete groups of
|
||||
development and release packages. Only one key pair exsits in this class of
|
||||
keys. In contrast to the Intermediate CI Key the Reputational Key *should* be
|
||||
used to verify package integrity. At the end of develop and release pipeline a
|
||||
final pipeline job pulls down all signed package metadata built by the pipeline,
|
||||
verifies they were signed with an Intermediate CI Key, then strips the
|
||||
Intermediate CI Key signature from the package and re-signs them with the
|
||||
Signing Reputational Private Key. The officially signed packages are then
|
||||
uploaded back to the AWS S3 mirror. Please note that separating use of the
|
||||
reputational key into this final job is done to prevent leakage of the key in a
|
||||
spack package. Because the Signing Reputational Private Key is never exposed to
|
||||
a build job it cannot accidentally end up in any built package.
|
||||
|
||||
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| **Reputational Key (GPG)** |
|
||||
+==================================================+======================================================+
|
||||
| Root Reputational Private Key (RSA 4096)# | Root Reputational Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Signing Reputational Private Key (RSA 4096) | Signing Reputational Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Identity: “Spack Project <maintainers@spack.io>” |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| Signatures: Signed by core development team [#f1]_ |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
|
||||
The Root Reputational Private Key is stripped out of the GPG key and stored
|
||||
offline completely separate from Spack’s infrastructure. This allows the core
|
||||
development team to append revocation certificates to the GPG key in the
|
||||
unlikely event that the Signing Reputation Private Key is compromised. In
|
||||
general it is the expectation that rotating this key will happen infrequently if
|
||||
at all. This should allow relatively transparent verification for the end-user
|
||||
community without needing deep familiarity with GnuPG or Public Key
|
||||
Infrastructure.
|
||||
|
||||
|
||||
.. _build_cache_format:
|
||||
|
||||
------------------
|
||||
Build Cache Format
|
||||
------------------
|
||||
|
||||
A binary package consists of a metadata file unambiguously defining the
|
||||
built package (and including other details such as how to relocate it)
|
||||
and the installation directory of the package stored as a compressed
|
||||
archive file. The metadata files can either be unsigned, in which case
|
||||
the contents are simply the json-serialized concrete spec plus metadata,
|
||||
or they can be signed, in which case the json-serialized concrete spec
|
||||
plus metadata is wrapped in a gpg cleartext signature. Built package
|
||||
metadata files are named to indicate the operating system and
|
||||
architecture for which the package was built as well as the compiler
|
||||
used to build it and the packages name and version. For example::
|
||||
|
||||
linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
|
||||
would contain the concrete spec and binary metadata for a binary package
|
||||
of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell``
|
||||
architecture. The id of the built package exists in the name of the file
|
||||
as well (after the package name and version) and in this case begins
|
||||
with ``llv2ys``. The id distinguishes a particular built package from all
|
||||
other built packages with the same os/arch, compiler, name, and version.
|
||||
Below is an example of a signed binary package metadata file. Such a
|
||||
file would live in the ``build_cache`` directory of a binary mirror::
|
||||
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec": {
|
||||
<concrete-spec-contents-omitted>
|
||||
},
|
||||
|
||||
"buildcache_layout_version": 1,
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
|
||||
}
|
||||
}
|
||||
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc
|
||||
ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn
|
||||
4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+
|
||||
QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b
|
||||
887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6
|
||||
4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH
|
||||
qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7
|
||||
QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi
|
||||
Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx
|
||||
j3DXty57
|
||||
=3gvm
|
||||
-----END PGP SIGNATURE-----
|
||||
|
||||
If a user has trusted the public key associated with the private key
|
||||
used to sign the above spec file, the signature can be verified with
|
||||
gpg, as follows::
|
||||
|
||||
$ gpg –verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
|
||||
The metadata (regardless whether signed or unsigned) contains the checksum
|
||||
of the ``.spack`` file containing the actual installation. The checksum should
|
||||
be compared to a checksum computed locally on the ``.spack`` file to ensure the
|
||||
contents have not changed since the binary spec plus metadata were signed. The
|
||||
``.spack`` files are actually tarballs containing the compressed archive of the
|
||||
install tree. These files, along with the metadata files, live within the
|
||||
``build_cache`` directory of the mirror, and together are organized as follows::
|
||||
|
||||
build_cache/
|
||||
# unsigned metadata (for indexing, contains sha256 of .spack file)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
|
||||
# clearsigned metadata (same as above, but signed)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig
|
||||
<arch>/
|
||||
<compiler>/
|
||||
<name>-<ver>/
|
||||
# tar.gz-compressed prefix (may support more compression formats later)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
|
||||
|
||||
Uncompressing and extracting the ``.spack`` file results in the install tree.
|
||||
This is in contrast to previous versions of spack, where the ``.spack`` file
|
||||
contained a (duplicated) metadata file, a signature file and a nested tarball
|
||||
containing the install tree.
|
||||
|
||||
.. _internal_implementation:
|
||||
|
||||
-----------------------
|
||||
Internal Implementation
|
||||
-----------------------
|
||||
|
||||
The technical implementation of the pipeline signing process includes components
|
||||
defined in Amazon Web Services, the Kubernetes cluster, at affilicated
|
||||
institutions, and the GitLab/GitLab Runner deployment. We present the techincal
|
||||
implementation in two interdependent sections. The first addresses how secrets
|
||||
are managed through the lifecycle of a develop or release pipeline. The second
|
||||
section describes how Gitlab Runner and pipelines are configured and managed to
|
||||
support secure automated signing.
|
||||
|
||||
Secrets Management
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
As stated above the Root Private Keys (intermediate and reputational)
|
||||
are stripped from the GPG keys and stored outside Spack’s
|
||||
infrastructure.
|
||||
|
||||
.. warning::
|
||||
**TODO**
|
||||
- Explanation here about where and how access is handled for these keys.
|
||||
- Both Root private keys are protected with strong passwords
|
||||
- Who has access to these and how?
|
||||
|
||||
**Intermediate CI Key**
|
||||
-----------------------
|
||||
|
||||
Multiple intermediate CI signing keys exist, one Intermediate CI Key for jobs
|
||||
run in AWS, and one key for each affiliated institution (e.g. Univerity of
|
||||
Oregon). Here we describe how the Intermediate CI Key is managed in AWS:
|
||||
|
||||
The Intermediate CI Key (including the Signing Intermediate CI Private Key is
|
||||
exported as an ASCII armored file and stored in a Kubernetes secret called
|
||||
``spack-intermediate-ci-signing-key``. For convenience sake, this same secret
|
||||
contains an ASCII-armored export of just the *public* components of the
|
||||
Reputational Key. This secret also contains the *public* components of each of
|
||||
the affiliated institutions' Intermediate CI Key. These are potentially needed
|
||||
to verify dependent packages which may have been found in the public mirror or
|
||||
built by a protected job running on an affiliated institution's infrastrcuture
|
||||
in an earlier stage of the pipeline.
|
||||
|
||||
Procedurally the ``spack-intermediate-ci-signing-key`` secret is used in
|
||||
the following way:
|
||||
|
||||
1. A ``large-arm-prot`` or ``large-x86-prot`` protected runner picks up
|
||||
a job tagged ``protected`` from a protected GitLab branch. (See
|
||||
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
||||
2. Based on its configuration, the runner creates a job Pod in the
|
||||
pipeline namespace and mounts the spack-intermediate-ci-signing-key
|
||||
Kubernetes secret into the build container
|
||||
3. The Intermediate CI Key, affiliated institutions' public key and the
|
||||
Reputational Public Key are imported into a keyring by the ``spack gpg …``
|
||||
sub-command. This is initiated by the job’s build script which is created by
|
||||
the generate job at the beginning of the pipeline.
|
||||
4. Assuming the package has dependencies those specs are verified using
|
||||
the keyring.
|
||||
5. The package is built and the spec.json is generated
|
||||
6. The spec.json is signed by the keyring and uploaded to the mirror’s
|
||||
build cache.
|
||||
|
||||
**Reputational Key**
|
||||
--------------------
|
||||
|
||||
Because of the increased impact to end users in the case of a private
|
||||
key breach, the Reputational Key is managed separately from the
|
||||
Intermediate CI Keys and has additional controls. First, the Reputational
|
||||
Key was generated outside of Spack’s infrastructure and has been signed
|
||||
by the core development team. The Reputational Key (along with the
|
||||
Signing Reputational Private Key) was then ASCII armor exported to a
|
||||
file. Unlike the Intermediate CI Key this exported file is not stored as
|
||||
a base64 encoded secret in Kubernetes. Instead\ *the key file
|
||||
itself*\ is encrypted and stored in Kubernetes as the
|
||||
``spack-signing-key-encrypted`` secret in the pipeline namespace.
|
||||
|
||||
The encryption of the exported Reputational Key (including the Signing
|
||||
Reputational Private Key) is handled by `AWS Key Management Store (KMS) data
|
||||
keys
|
||||
<https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-keys>`__.
|
||||
The private key material is decrypted and imported at the time of signing into a
|
||||
memory mounted temporary directory holding the keychain. The signing job uses
|
||||
the `AWS Encryption SDK
|
||||
<https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli.html>`__
|
||||
(i.e. ``aws-encryption-cli``) to decrypt the Reputational Key. Permission to
|
||||
decrypt the key is granted to the job Pod through a Kubernetes service account
|
||||
specifically used for this, and only this, function. Finally, for convenience
|
||||
sake, this same secret contains an ASCII-armored export of the *public*
|
||||
components of the Intermediate CI Keys and the Reputational Key. This allows the
|
||||
signing script to verify that packages were built by the pipeline (both on AWS
|
||||
or at affiliated institutions), or signed previously as a part of a different
|
||||
pipeline. This is is done *before* importing decrypting and importing the
|
||||
Signing Reputational Private Key material and officially signing the packages.
|
||||
|
||||
Procedurally the ``spack-singing-key-encrypted`` secret is used in the
|
||||
following way:
|
||||
|
||||
1. The ``spack-package-signing-gitlab-runner`` protected runner picks
|
||||
up a job tagged ``notary`` from a protected GitLab branch (See
|
||||
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
||||
2. Based on its configuration, the runner creates a job pod in the
|
||||
pipeline namespace. The job is run in a stripped down purpose-built
|
||||
image ``ghcr.io/spack/notary:latest`` Docker image. The runner is
|
||||
configured to only allow running jobs with this image.
|
||||
3. The runner also mounts the ``spack-signing-key-encrypted`` secret to
|
||||
a path on disk. Note that this becomes several files on disk, the
|
||||
public components of the Intermediate CI Keys, the public components
|
||||
of the Reputational CI, and an AWS KMS encrypted file containing the
|
||||
Singing Reputational Private Key.
|
||||
4. In addition to the secret, the runner creates a tmpfs memory mounted
|
||||
directory where the GnuPG keyring will be created to verify, and
|
||||
then resign the package specs.
|
||||
5. The job script syncs all spec.json.sig files from the build cache to
|
||||
a working directory in the job’s execution environment.
|
||||
6. The job script then runs the ``sign.sh`` script built into the
|
||||
notary Docker image.
|
||||
7. The ``sign.sh`` script imports the public components of the
|
||||
Reputational and Intermediate CI Keys and uses them to verify good
|
||||
signatures on the spec.json.sig files. If any signed spec does not
|
||||
verify the job immediately fails.
|
||||
8. Assuming all specs are verified, the ``sign.sh`` script then unpacks
|
||||
the spec json data from the signed file in preparation for being
|
||||
re-signed with the Reputational Key.
|
||||
9. The private components of the Reputational Key are decrypted to
|
||||
standard out using ``aws-encryption-cli`` directly into a ``gpg
|
||||
–import …`` statement which imports the key into the
|
||||
keyring mounted in-memory.
|
||||
10. The private key is then used to sign each of the json specs and the
|
||||
keyring is removed from disk.
|
||||
11. The re-signed json specs are resynced to the AWS S3 Mirror and the
|
||||
public signing of the packages for the develop or release pipeline
|
||||
that created them is complete.
|
||||
|
||||
Non service-account access to the private components of the Reputational
|
||||
Key that are managed through access to the symmetric secret in KMS used
|
||||
to encrypt the data key (which in turn is used to encrypt the GnuPG key
|
||||
- See:\ `Encryption SDK
|
||||
Documentation <https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli-examples.html#cli-example-encrypt-file>`__).
|
||||
A small trusted subset of the core development team are the only
|
||||
individuals with access to this symmetric key.
|
||||
|
||||
.. _protected_runners:
|
||||
|
||||
Protected Runners and Reserved Tags
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack has a large number of Gitlab Runners operating in its build farm.
|
||||
These include runners deployed in the AWS Kubernetes cluster as well as
|
||||
runners deployed at affiliated institutions. The majority of runners are
|
||||
shared runners that operate across projects in gitlab.spack.io. These
|
||||
runners pick up jobs primarily from the spack/spack project and execute
|
||||
them in PR pipelines.
|
||||
|
||||
A small number of runners operating on AWS and at affiliated institutions are
|
||||
registered as specific *protected* runners on the spack/spack project. In
|
||||
addition to protected runners there are protected branches on the spack/spack
|
||||
project. These are the ``develop`` branch, any release branch (i.e. managed with
|
||||
the ``releases/v*`` wildcard) and any tag branch (managed with the ``v*``
|
||||
wildcard) Finally Spack’s pipeline generation code reserves certain tags to make
|
||||
sure jobs are routed to the correct runners, these tags are ``public``,
|
||||
``protected``, and ``notary``. Understanding how all this works together to
|
||||
protect secrets and provide integrity assurances can be a little confusing so
|
||||
lets break these down:
|
||||
|
||||
- **Protected Branches**- Protected branches in Spack prevent anyone
|
||||
other than Maintainers in GitLab from pushing code. In the case of
|
||||
Spack the only Maintainer level entity pushing code to protected
|
||||
branches is Spack bot. Protecting branches also marks them in such a
|
||||
way that Protected Runners will only run jobs from those branches
|
||||
- **Protected Runners**- Protected Runners only run jobs from protected
|
||||
branches. Because protected runners have access to secrets, it's critical
|
||||
that they not run Jobs from untrusted code (i.e. PR branches). If they did it
|
||||
would be possible for a PR branch to tag a job in such a way that a protected
|
||||
runner executed that job and mounted secrets into a code execution
|
||||
environment that had not been reviewed by Spack maintainers. Note however
|
||||
that in the absence of tagging used to route jobs, public runners *could* run
|
||||
jobs from protected branches. No secrets would be at risk of being breached
|
||||
because non-protected runners do not have access to those secrets; lack of
|
||||
secrets would, however, cause the jobs to fail.
|
||||
- **Reserved Tags**- To mitigate the issue of public runners picking up
|
||||
protected jobs Spack uses a small set of “reserved” job tags (Note that these
|
||||
are *job* tags not git tags). These tags are “public”, “private”, and
|
||||
“notary.” The majority of jobs executed in Spack’s GitLab instance are
|
||||
executed via a ``generate`` job. The generate job code systematically ensures
|
||||
that no user defined configuration sets these tags. Instead, the ``generate``
|
||||
job sets these tags based on rules related to the branch where this pipeline
|
||||
originated. If the job is a part of a pipeline on a PR branch it sets the
|
||||
``public`` tag. If the job is part of a pipeline on a protected branch it
|
||||
sets the ``protected`` tag. Finally if the job is the package signing job and
|
||||
it is running on a pipeline that is part of a protected branch then it sets
|
||||
the ``notary`` tag.
|
||||
|
||||
Protected Runners are configured to only run jobs from protected branches. Only
|
||||
jobs running in pipelines on protected branches are tagged with ``protected`` or
|
||||
``notary`` tags. This tightly couples jobs on protected branches to protected
|
||||
runners that provide access to the secrets required to sign the built packages.
|
||||
The secrets are can **only** be accessed via:
|
||||
|
||||
1. Runners under direct control of the core development team.
|
||||
2. Runners under direct control of trusted maintainers at affiliated institutions.
|
||||
3. By code running the automated pipeline that has been reviewed by the
|
||||
Spack maintainers and judged to be appropriate.
|
||||
|
||||
Other attempts (either through malicious intent or incompetence) can at
|
||||
worst grab jobs intended for protected runners which will cause those
|
||||
jobs to fail alerting both Spack maintainers and the core development
|
||||
team.
|
||||
|
||||
.. [#f1]
|
||||
The Reputational Key has also cross signed core development team
|
||||
keys.
|
@@ -1,7 +1,9 @@
|
||||
Name, Supported Versions, Notes, Requirement Reason
|
||||
Python, 3.6--3.12, , Interpreter for Spack
|
||||
Python, 3.6--3.11, , Interpreter for Spack
|
||||
C/C++ Compilers, , , Building software
|
||||
make, , , Build software
|
||||
patch, , , Build software
|
||||
bash, , , Compiler wrappers
|
||||
tar, , , Extract/create archives
|
||||
gzip, , , Compress/Decompress archives
|
||||
unzip, , , Compress/Decompress archives
|
||||
|
|
428
lib/spack/env/cc
vendored
428
lib/spack/env/cc
vendored
@@ -416,14 +416,30 @@ input_command="$*"
|
||||
# The lists are all bell-separated to be as flexible as possible, as their
|
||||
# contents may come from the command line, from ' '-separated lists,
|
||||
# ':'-separated lists, etc.
|
||||
include_dirs_list=""
|
||||
lib_dirs_list=""
|
||||
rpath_dirs_list=""
|
||||
system_include_dirs_list=""
|
||||
system_lib_dirs_list=""
|
||||
system_rpath_dirs_list=""
|
||||
isystem_system_include_dirs_list=""
|
||||
isystem_include_dirs_list=""
|
||||
libs_list=""
|
||||
other_args_list=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
|
||||
# Same, but for -Xlinker -rpath -Xlinker /path
|
||||
xlinker_expect_rpath=no
|
||||
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
append system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
append rpath_dirs_list "$1"
|
||||
fi
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
@@ -433,9 +449,9 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
@@ -443,9 +459,9 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
@@ -459,7 +475,7 @@ parse_Wl() {
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "-Wl,$1"
|
||||
append other_args_list "-Wl,$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
@@ -467,210 +483,177 @@ parse_Wl() {
|
||||
done
|
||||
}
|
||||
|
||||
categorize_arguments() {
|
||||
|
||||
unset IFS
|
||||
while [ $# -ne 0 ]; do
|
||||
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
# an RPATH to be added after the case statement.
|
||||
rp=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
# Multiple consecutive spaces in the command line can
|
||||
# result in blank arguments
|
||||
if [ -z "$1" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
# Same, but for -Xlinker -rpath -Xlinker /path
|
||||
xlinker_expect_rpath=no
|
||||
|
||||
while [ $# -ne 0 ]; do
|
||||
|
||||
# an RPATH to be added after the case statement.
|
||||
rp=""
|
||||
|
||||
# Multiple consecutive spaces in the command line can
|
||||
# result in blank arguments
|
||||
if [ -z "$1" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||
eval "\
|
||||
case \"\$1\" in
|
||||
$SPACK_COMPILER_FLAGS_KEEP)
|
||||
append return_other_args_list \"\$1\"
|
||||
shift
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
"
|
||||
fi
|
||||
# the replace list is a space-separated list of pipe-separated pairs,
|
||||
# the first in each pair is the original prefix to be matched, the
|
||||
# second is the replacement prefix
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||
before=${rep%|*}
|
||||
after=${rep#*|}
|
||||
eval "\
|
||||
stripped=\"\${1##$before}\"
|
||||
"
|
||||
if [ "$stripped" = "$1" ] ; then
|
||||
continue
|
||||
fi
|
||||
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||
eval "\
|
||||
case \"\$1\" in
|
||||
$SPACK_COMPILER_FLAGS_KEEP)
|
||||
append other_args_list \"\$1\"
|
||||
shift
|
||||
|
||||
if [ -z "$replaced" ] ; then
|
||||
# completely removed, continue OUTER loop
|
||||
continue 2
|
||||
fi
|
||||
|
||||
# re-build argument list with replacement
|
||||
set -- "$replaced" "$@"
|
||||
done
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_lib_dirs_list "$arg"
|
||||
else
|
||||
append return_lib_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# library. Filter it out.
|
||||
# TODO: generalize filtering of args with an env var, so that
|
||||
# TODO: we do not have to special case this here.
|
||||
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
||||
&& [ "$1" != "${1#-loopopt}" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
arg="${1#-l}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append return_other_args_list "-l$arg"
|
||||
;;
|
||||
-Wl,*)
|
||||
IFS=,
|
||||
if ! parse_Wl ${1#-Wl,}; then
|
||||
append return_other_args_list "$1"
|
||||
fi
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
shift
|
||||
if [ $# -eq 0 ]; then
|
||||
# -Xlinker without value: let the compiler error about it.
|
||||
append return_other_args_list -Xlinker
|
||||
xlinker_expect_rpath=no
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list -Xlinker
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "$1"
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
"
|
||||
fi
|
||||
# the replace list is a space-separated list of pipe-separated pairs,
|
||||
# the first in each pair is the original prefix to be matched, the
|
||||
# second is the replacement prefix
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||
before=${rep%|*}
|
||||
after=${rep#*|}
|
||||
eval "\
|
||||
stripped=\"\${1##$before}\"
|
||||
"
|
||||
if [ "$stripped" = "$1" ] ; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
||||
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
||||
# parsing.
|
||||
if [ "$xlinker_expect_rpath" = yes ]; then
|
||||
append return_other_args_list -Xlinker
|
||||
append return_other_args_list -rpath
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
shift
|
||||
|
||||
if [ -z "$replaced" ] ; then
|
||||
# completely removed, continue OUTER loop
|
||||
continue 2
|
||||
fi
|
||||
|
||||
# re-build argument list with replacement
|
||||
set -- "$replaced" "$@"
|
||||
done
|
||||
fi
|
||||
|
||||
# Same, but for -Wl flags.
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append return_other_args_list -Wl,-rpath
|
||||
fi
|
||||
}
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_include_dirs_list "$arg"
|
||||
else
|
||||
append include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_lib_dirs_list "$arg"
|
||||
else
|
||||
append lib_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# library. Filter it out.
|
||||
# TODO: generalize filtering of args with an env var, so that
|
||||
# TODO: we do not have to special case this here.
|
||||
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
||||
&& [ "$1" != "${1#-loopopt}" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
arg="${1#-l}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append other_args_list "-l$arg"
|
||||
;;
|
||||
-Wl,*)
|
||||
IFS=,
|
||||
if ! parse_Wl ${1#-Wl,}; then
|
||||
append other_args_list "$1"
|
||||
fi
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
shift
|
||||
if [ $# -eq 0 ]; then
|
||||
# -Xlinker without value: let the compiler error about it.
|
||||
append other_args_list -Xlinker
|
||||
xlinker_expect_rpath=no
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append system_rpath_dirs_list "$1"
|
||||
else
|
||||
append rpath_dirs_list "$1"
|
||||
fi
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append other_args_list -Xlinker
|
||||
append other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
categorize_arguments "$@"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
other_args_list="$return_other_args_list"
|
||||
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
||||
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
||||
# parsing.
|
||||
if [ "$xlinker_expect_rpath" = yes ]; then
|
||||
append other_args_list -Xlinker
|
||||
append other_args_list -rpath
|
||||
fi
|
||||
|
||||
# Same, but for -Wl flags.
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append other_args_list -Wl,-rpath
|
||||
fi
|
||||
|
||||
#
|
||||
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
||||
@@ -690,14 +673,12 @@ elif [ "$SPACK_ADD_DEBUG_FLAGS" = "custom" ]; then
|
||||
extend flags_list SPACK_DEBUG_FLAGS
|
||||
fi
|
||||
|
||||
spack_flags_list=""
|
||||
|
||||
# Fortran flags come before CPPFLAGS
|
||||
case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
F)
|
||||
extend spack_flags_list SPACK_FFLAGS
|
||||
extend flags_list SPACK_FFLAGS
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
@@ -706,7 +687,7 @@ esac
|
||||
# C preprocessor flags come before any C/CXX flags
|
||||
case "$mode" in
|
||||
cpp|as|cc|ccld)
|
||||
extend spack_flags_list SPACK_CPPFLAGS
|
||||
extend flags_list SPACK_CPPFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -716,10 +697,10 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
C)
|
||||
extend spack_flags_list SPACK_CFLAGS
|
||||
extend flags_list SPACK_CFLAGS
|
||||
;;
|
||||
CXX)
|
||||
extend spack_flags_list SPACK_CXXFLAGS
|
||||
extend flags_list SPACK_CXXFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -731,25 +712,10 @@ esac
|
||||
# Linker flags
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
extend spack_flags_list SPACK_LDFLAGS
|
||||
extend flags_list SPACK_LDFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
|
||||
# On macOS insert headerpad_max_install_names linker flag
|
||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
||||
@@ -775,8 +741,6 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
extend lib_dirs_list SPACK_LINK_DIRS
|
||||
fi
|
||||
|
||||
libs_list=""
|
||||
|
||||
# add RPATHs if we're in in any linking mode
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
@@ -805,16 +769,12 @@ args_list="$flags_list"
|
||||
|
||||
# Insert include directories just prior to any system include directories
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_flags_include_dirs_list "-I"
|
||||
extend args_list include_dirs_list "-I"
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
elif [ "$isystem_was_used" = "true" ]; then
|
||||
if [ "$isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
else
|
||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
||||
@@ -822,15 +782,11 @@ case "$mode" in
|
||||
;;
|
||||
esac
|
||||
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
# RPATHs arguments
|
||||
@@ -839,25 +795,20 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
ld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Other arguments from the input command
|
||||
extend args_list other_args_list
|
||||
extend args_list spack_flags_other_args_list
|
||||
|
||||
# Inject SPACK_LDLIBS, if supplied
|
||||
extend args_list libs_list "-l"
|
||||
@@ -913,4 +864,3 @@ fi
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.2 (commit 1dc58a5776dd77e6fc6e4ba5626af5b1fb24996e)
|
||||
* Version: 0.2.1 (commit 9e1117bd8a2f0581bced161f2a2e8d6294d0300b)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
2
lib/spack/external/archspec/__init__.py
vendored
2
lib/spack/external/archspec/__init__.py
vendored
@@ -1,2 +1,2 @@
|
||||
"""Init file to avoid namespace packages"""
|
||||
__version__ = "0.2.2"
|
||||
__version__ = "0.2.0"
|
||||
|
@@ -79,18 +79,14 @@ def __init__(self, name, parents, vendor, features, compilers, generation=0):
|
||||
self.features = features
|
||||
self.compilers = compilers
|
||||
self.generation = generation
|
||||
# Cache the ancestor computation
|
||||
self._ancestors = None
|
||||
|
||||
@property
|
||||
def ancestors(self):
|
||||
"""All the ancestors of this microarchitecture."""
|
||||
if self._ancestors is None:
|
||||
value = self.parents[:]
|
||||
for parent in self.parents:
|
||||
value.extend(a for a in parent.ancestors if a not in value)
|
||||
self._ancestors = value
|
||||
return self._ancestors
|
||||
value = self.parents[:]
|
||||
for parent in self.parents:
|
||||
value.extend(a for a in parent.ancestors if a not in value)
|
||||
return value
|
||||
|
||||
def _to_set(self):
|
||||
"""Returns a set of the nodes in this microarchitecture DAG."""
|
||||
|
@@ -145,13 +145,6 @@
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune=generic -mpopcnt"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
@@ -224,13 +217,6 @@
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name} -fma -mf16c"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
@@ -314,13 +300,6 @@
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
@@ -1433,92 +1412,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"sapphirerapids": {
|
||||
"from": [
|
||||
"icelake"
|
||||
],
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"mmx",
|
||||
"sse",
|
||||
"sse2",
|
||||
"ssse3",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"popcnt",
|
||||
"aes",
|
||||
"pclmulqdq",
|
||||
"avx",
|
||||
"rdrand",
|
||||
"f16c",
|
||||
"movbe",
|
||||
"fma",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"rdseed",
|
||||
"adx",
|
||||
"clflushopt",
|
||||
"xsavec",
|
||||
"xsaveopt",
|
||||
"avx512f",
|
||||
"avx512vl",
|
||||
"avx512bw",
|
||||
"avx512dq",
|
||||
"avx512cd",
|
||||
"avx512vbmi",
|
||||
"avx512ifma",
|
||||
"sha_ni",
|
||||
"clwb",
|
||||
"rdpid",
|
||||
"gfni",
|
||||
"avx512_vbmi2",
|
||||
"avx512_vpopcntdq",
|
||||
"avx512_bitalg",
|
||||
"avx512_vnni",
|
||||
"vpclmulqdq",
|
||||
"vaes",
|
||||
"avx512_bf16",
|
||||
"cldemote",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pdcm",
|
||||
"serialize",
|
||||
"waitpkg"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "12.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "2021.2:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"k10": {
|
||||
"from": ["x86_64"],
|
||||
"vendor": "AuthenticAMD",
|
||||
@@ -2172,6 +2065,8 @@
|
||||
"pku",
|
||||
"gfni",
|
||||
"flush_l1d",
|
||||
"erms",
|
||||
"avic",
|
||||
"avx512f",
|
||||
"avx512dq",
|
||||
"avx512ifma",
|
||||
@@ -2188,12 +2083,12 @@
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "10.3:12.2",
|
||||
"versions": "10.3:13.0",
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg"
|
||||
},
|
||||
{
|
||||
"versions": "12.3:",
|
||||
"versions": "13.1:",
|
||||
"name": "znver4",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
@@ -2318,26 +2213,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"power10": {
|
||||
"from": ["power9"],
|
||||
"vendor": "IBM",
|
||||
"generation": 10,
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "11.1:",
|
||||
"flags": "-mcpu={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"flags": "-mcpu={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"ppc64le": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
@@ -2425,29 +2300,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"power10le": {
|
||||
"from": ["power9le"],
|
||||
"vendor": "IBM",
|
||||
"generation": 10,
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"name": "power10",
|
||||
"versions": "11.1:",
|
||||
"flags": "-mcpu={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"family": "ppc64le",
|
||||
"name": "power10",
|
||||
"flags": "-mcpu={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"aarch64": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
@@ -2635,37 +2487,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"armv9.0a": {
|
||||
"from": ["armv8.5a"],
|
||||
"vendor": "generic",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "12:",
|
||||
"flags": "-march=armv9-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "14:",
|
||||
"flags": "-march=armv9-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv9-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv9-a -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"thunderx2": {
|
||||
"from": ["armv8.1a"],
|
||||
"vendor": "Cavium",
|
||||
@@ -2887,12 +2708,8 @@
|
||||
],
|
||||
"arm" : [
|
||||
{
|
||||
"versions": "20:21.9",
|
||||
"versions": "20:",
|
||||
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
|
||||
},
|
||||
{
|
||||
"versions": "22:",
|
||||
"flags" : "-mcpu=neoverse-n1"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
@@ -3020,7 +2837,7 @@
|
||||
},
|
||||
{
|
||||
"versions": "22:",
|
||||
"flags" : "-mcpu=neoverse-v1"
|
||||
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
@@ -3032,126 +2849,6 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"neoverse_v2": {
|
||||
"from": ["neoverse_n1", "armv9.0a"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"sveaes",
|
||||
"svepmull",
|
||||
"svebitperm",
|
||||
"svesha3",
|
||||
"svesm4",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16",
|
||||
"dgh",
|
||||
"bti"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "4.8:5.99",
|
||||
"flags": "-march=armv8-a"
|
||||
},
|
||||
{
|
||||
"versions": "6:6.99",
|
||||
"flags" : "-march=armv8.1-a"
|
||||
},
|
||||
{
|
||||
"versions": "7.0:7.99",
|
||||
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "8.0:8.99",
|
||||
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "9.0:9.99",
|
||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:11.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||
},
|
||||
{
|
||||
"versions": "12.0:12.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:10.99",
|
||||
"flags" : "-march=armv8.5-a+sve"
|
||||
},
|
||||
{
|
||||
"versions": "11.0:13.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "14.0:15.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
}
|
||||
],
|
||||
"arm" : [
|
||||
{
|
||||
"versions": "23.04.0:",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
{
|
||||
"versions": "23.3:",
|
||||
"name": "neoverse-v2",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"m1": {
|
||||
"from": ["armv8.4a"],
|
||||
"vendor": "Apple",
|
||||
|
7
lib/spack/external/ctest_log_parser.py
vendored
7
lib/spack/external/ctest_log_parser.py
vendored
@@ -65,6 +65,9 @@
|
||||
up to date with CTest, just make sure the ``*_matches`` and
|
||||
``*_exceptions`` lists are kept up to date with CTest's build handler.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from __future__ import division
|
||||
|
||||
import re
|
||||
import math
|
||||
import multiprocessing
|
||||
@@ -208,7 +211,7 @@
|
||||
]
|
||||
|
||||
|
||||
class LogEvent:
|
||||
class LogEvent(object):
|
||||
"""Class representing interesting events (e.g., errors) in a build log."""
|
||||
def __init__(self, text, line_no,
|
||||
source_file=None, source_line_no=None,
|
||||
@@ -345,7 +348,7 @@ def _parse_unpack(args):
|
||||
return _parse(*args)
|
||||
|
||||
|
||||
class CTestLogParser:
|
||||
class CTestLogParser(object):
|
||||
"""Log file parser that extracts errors and warnings."""
|
||||
def __init__(self, profile=False):
|
||||
# whether to record timing information
|
||||
|
@@ -1,105 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Path primitives that just require Python standard library."""
|
||||
import functools
|
||||
import sys
|
||||
from typing import List, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
|
||||
class Path:
|
||||
"""Enum to identify the path-style."""
|
||||
|
||||
unix: int = 0
|
||||
windows: int = 1
|
||||
platform_path: int = windows if sys.platform == "win32" else unix
|
||||
|
||||
|
||||
def format_os_path(path: str, mode: int = Path.unix) -> str:
|
||||
"""Formats the input path to use consistent, platform specific separators.
|
||||
|
||||
Absolute paths are converted between drive letters and a prepended '/' as per platform
|
||||
requirement.
|
||||
|
||||
Parameters:
|
||||
path: the path to be normalized, must be a string or expose the replace method.
|
||||
mode: the path file separator style to normalize the passed path to.
|
||||
Default is unix style, i.e. '/'
|
||||
"""
|
||||
if not path:
|
||||
return path
|
||||
if mode == Path.windows:
|
||||
path = path.replace("/", "\\")
|
||||
else:
|
||||
path = path.replace("\\", "/")
|
||||
return path
|
||||
|
||||
|
||||
def convert_to_posix_path(path: str) -> str:
|
||||
"""Converts the input path to POSIX style."""
|
||||
return format_os_path(path, mode=Path.unix)
|
||||
|
||||
|
||||
def convert_to_windows_path(path: str) -> str:
|
||||
"""Converts the input path to Windows style."""
|
||||
return format_os_path(path, mode=Path.windows)
|
||||
|
||||
|
||||
def convert_to_platform_path(path: str) -> str:
|
||||
"""Converts the input path to the current platform's native style."""
|
||||
return format_os_path(path, mode=Path.platform_path)
|
||||
|
||||
|
||||
def path_to_os_path(*parameters: str) -> List[str]:
|
||||
"""Takes an arbitrary number of positional parameters, converts each argument of type
|
||||
string to use a normalized filepath separator, and returns a list of all values.
|
||||
"""
|
||||
|
||||
def _is_url(path_or_url: str) -> bool:
|
||||
if "\\" in path_or_url:
|
||||
return False
|
||||
url_tuple = urlparse(path_or_url)
|
||||
return bool(url_tuple.scheme) and len(url_tuple.scheme) > 1
|
||||
|
||||
result = []
|
||||
for item in parameters:
|
||||
if isinstance(item, str) and not _is_url(item):
|
||||
item = convert_to_platform_path(item)
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
|
||||
def system_path_filter(_func=None, arg_slice: Optional[slice] = None):
|
||||
"""Filters function arguments to account for platform path separators.
|
||||
Optional slicing range can be specified to select specific arguments
|
||||
|
||||
This decorator takes all (or a slice) of a method's positional arguments
|
||||
and normalizes usage of filepath separators on a per platform basis.
|
||||
|
||||
Note: `**kwargs`, urls, and any type that is not a string are ignored
|
||||
so in such cases where path normalization is required, that should be
|
||||
handled by calling path_to_os_path directly as needed.
|
||||
|
||||
Parameters:
|
||||
arg_slice: a slice object specifying the slice of arguments
|
||||
in the decorated method over which filepath separators are
|
||||
normalized
|
||||
"""
|
||||
|
||||
def holder_func(func):
|
||||
@functools.wraps(func)
|
||||
def path_filter_caller(*args, **kwargs):
|
||||
args = list(args)
|
||||
if arg_slice:
|
||||
args[arg_slice] = path_to_os_path(*args[arg_slice])
|
||||
else:
|
||||
args = path_to_os_path(*args)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return path_filter_caller
|
||||
|
||||
if _func:
|
||||
return holder_func(_func)
|
||||
return holder_func
|
@@ -1,67 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""String manipulation functions that do not have other dependencies than Python
|
||||
standard library
|
||||
"""
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
def comma_list(sequence: List[str], article: str = "") -> str:
|
||||
if type(sequence) is not list:
|
||||
sequence = list(sequence)
|
||||
|
||||
if not sequence:
|
||||
return ""
|
||||
if len(sequence) == 1:
|
||||
return sequence[0]
|
||||
|
||||
out = ", ".join(str(s) for s in sequence[:-1])
|
||||
if len(sequence) != 2:
|
||||
out += "," # oxford comma
|
||||
out += " "
|
||||
if article:
|
||||
out += article + " "
|
||||
out += str(sequence[-1])
|
||||
return out
|
||||
|
||||
|
||||
def comma_or(sequence: List[str]) -> str:
|
||||
"""Return a string with all the elements of the input joined by comma, but the last
|
||||
one (which is joined by 'or').
|
||||
"""
|
||||
return comma_list(sequence, "or")
|
||||
|
||||
|
||||
def comma_and(sequence: List[str]) -> str:
|
||||
"""Return a string with all the elements of the input joined by comma, but the last
|
||||
one (which is joined by 'and').
|
||||
"""
|
||||
return comma_list(sequence, "and")
|
||||
|
||||
|
||||
def quote(sequence: List[str], q: str = "'") -> List[str]:
|
||||
"""Quotes each item in the input list with the quote character passed as second argument."""
|
||||
return [f"{q}{e}{q}" for e in sequence]
|
||||
|
||||
|
||||
def plural(n: int, singular: str, plural: Optional[str] = None, show_n: bool = True) -> str:
|
||||
"""Pluralize <singular> word by adding an s if n != 1.
|
||||
|
||||
Arguments:
|
||||
n: number of things there are
|
||||
singular: singular form of word
|
||||
plural: optional plural form, for when it's not just singular + 's'
|
||||
show_n: whether to include n in the result string (default True)
|
||||
|
||||
Returns:
|
||||
"1 thing" if n == 1 or "n things" if n != 1
|
||||
"""
|
||||
number = f"{n} " if show_n else ""
|
||||
if n == 1:
|
||||
return f"{number}{singular}"
|
||||
elif plural is not None:
|
||||
return f"{number}{plural}"
|
||||
else:
|
||||
return f"{number}{singular}s"
|
@@ -1,459 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""URL primitives that just require Python standard library."""
|
||||
import itertools
|
||||
import os.path
|
||||
import re
|
||||
from typing import Optional, Set, Tuple
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
||||
# Archive extensions allowed in Spack
|
||||
PREFIX_EXTENSIONS = ("tar", "TAR")
|
||||
EXTENSIONS = ("gz", "bz2", "xz", "Z")
|
||||
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz")
|
||||
|
||||
# Add PREFIX_EXTENSIONS and EXTENSIONS last so that .tar.gz is matched *before* .tar or .gz
|
||||
ALLOWED_ARCHIVE_TYPES = (
|
||||
tuple(".".join(ext) for ext in itertools.product(PREFIX_EXTENSIONS, EXTENSIONS))
|
||||
+ PREFIX_EXTENSIONS
|
||||
+ EXTENSIONS
|
||||
+ NO_TAR_EXTENSIONS
|
||||
)
|
||||
CONTRACTION_MAP = {"tgz": "tar.gz", "txz": "tar.xz", "tbz": "tar.bz2", "tbz2": "tar.bz2"}
|
||||
|
||||
|
||||
def find_list_urls(url: str) -> Set[str]:
|
||||
r"""Find good list URLs for the supplied URL.
|
||||
|
||||
By default, returns the dirname of the archive path.
|
||||
|
||||
Provides special treatment for the following websites, which have a
|
||||
unique list URL different from the dirname of the download URL:
|
||||
|
||||
========= =======================================================
|
||||
GitHub https://github.com/<repo>/<name>/releases
|
||||
GitLab https://gitlab.\*/<repo>/<name>/tags
|
||||
BitBucket https://bitbucket.org/<repo>/<name>/downloads/?tab=tags
|
||||
CRAN https://\*.r-project.org/src/contrib/Archive/<name>
|
||||
PyPI https://pypi.org/simple/<name>/
|
||||
LuaRocks https://luarocks.org/modules/<repo>/<name>
|
||||
========= =======================================================
|
||||
|
||||
Note: this function is called by `spack versions`, `spack checksum`,
|
||||
and `spack create`, but not by `spack fetch` or `spack install`.
|
||||
|
||||
Parameters:
|
||||
url (str): The download URL for the package
|
||||
|
||||
Returns:
|
||||
set: One or more list URLs for the package
|
||||
"""
|
||||
|
||||
url_types = [
|
||||
# GitHub
|
||||
# e.g. https://github.com/llnl/callpath/archive/v1.0.1.tar.gz
|
||||
(r"(.*github\.com/[^/]+/[^/]+)", lambda m: m.group(1) + "/releases"),
|
||||
# GitLab API endpoint
|
||||
# e.g. https://gitlab.dkrz.de/api/v4/projects/k202009%2Flibaec/repository/archive.tar.gz?sha=v1.0.2
|
||||
(
|
||||
r"(.*gitlab[^/]+)/api/v4/projects/([^/]+)%2F([^/]+)",
|
||||
lambda m: m.group(1) + "/" + m.group(2) + "/" + m.group(3) + "/tags",
|
||||
),
|
||||
# GitLab non-API endpoint
|
||||
# e.g. https://gitlab.dkrz.de/k202009/libaec/uploads/631e85bcf877c2dcaca9b2e6d6526339/libaec-1.0.0.tar.gz
|
||||
(r"(.*gitlab[^/]+/(?!api/v4/projects)[^/]+/[^/]+)", lambda m: m.group(1) + "/tags"),
|
||||
# BitBucket
|
||||
# e.g. https://bitbucket.org/eigen/eigen/get/3.3.3.tar.bz2
|
||||
(r"(.*bitbucket.org/[^/]+/[^/]+)", lambda m: m.group(1) + "/downloads/?tab=tags"),
|
||||
# CRAN
|
||||
# e.g. https://cran.r-project.org/src/contrib/Rcpp_0.12.9.tar.gz
|
||||
# e.g. https://cloud.r-project.org/src/contrib/rgl_0.98.1.tar.gz
|
||||
(
|
||||
r"(.*\.r-project\.org/src/contrib)/([^_]+)",
|
||||
lambda m: m.group(1) + "/Archive/" + m.group(2),
|
||||
),
|
||||
# PyPI
|
||||
# e.g. https://pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://www.pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://pypi.org/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://pypi.python.org/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://files.pythonhosted.org/packages/source/n/numpy/numpy-1.19.4.zip
|
||||
# e.g. https://pypi.io/packages/py2.py3/o/opencensus-context/opencensus_context-0.1.1-py2.py3-none-any.whl
|
||||
(
|
||||
r"(?:pypi|pythonhosted)[^/]+/packages/[^/]+/./([^/]+)",
|
||||
lambda m: "https://pypi.org/simple/" + m.group(1) + "/",
|
||||
),
|
||||
# LuaRocks
|
||||
# e.g. https://luarocks.org/manifests/gvvaughan/lpeg-1.0.2-1.src.rock
|
||||
# e.g. https://luarocks.org/manifests/openresty/lua-cjson-2.1.0-1.src.rock
|
||||
(
|
||||
r"luarocks[^/]+/(?:modules|manifests)/(?P<org>[^/]+)/"
|
||||
+ r"(?P<name>.+?)-[0-9.-]*\.src\.rock",
|
||||
lambda m: "https://luarocks.org/modules/"
|
||||
+ m.group("org")
|
||||
+ "/"
|
||||
+ m.group("name")
|
||||
+ "/",
|
||||
),
|
||||
]
|
||||
|
||||
list_urls = {os.path.dirname(url)}
|
||||
|
||||
for pattern, fun in url_types:
|
||||
match = re.search(pattern, url)
|
||||
if match:
|
||||
list_urls.add(fun(match))
|
||||
|
||||
return list_urls
|
||||
|
||||
|
||||
def strip_query_and_fragment(url: str) -> Tuple[str, str]:
|
||||
"""Strips query and fragment from a url, then returns the base url and the suffix.
|
||||
|
||||
Args:
|
||||
url: URL to be stripped
|
||||
|
||||
Raises:
|
||||
ValueError: when there is any error parsing the URL
|
||||
"""
|
||||
components = urlsplit(url)
|
||||
stripped = components[:3] + (None, None)
|
||||
|
||||
query, frag = components[3:5]
|
||||
suffix = ""
|
||||
if query:
|
||||
suffix += "?" + query
|
||||
if frag:
|
||||
suffix += "#" + frag
|
||||
|
||||
return urlunsplit(stripped), suffix
|
||||
|
||||
|
||||
SOURCEFORGE_RE = re.compile(r"(.*(?:sourceforge\.net|sf\.net)/.*)(/download)$")
|
||||
|
||||
|
||||
def split_url_on_sourceforge_suffix(url: str) -> Tuple[str, ...]:
|
||||
"""If the input is a sourceforge URL, returns base URL and "/download" suffix. Otherwise,
|
||||
returns the input URL and an empty string.
|
||||
"""
|
||||
match = SOURCEFORGE_RE.search(url)
|
||||
if match is not None:
|
||||
return match.groups()
|
||||
return url, ""
|
||||
|
||||
|
||||
def has_extension(path_or_url: str, ext: str) -> bool:
|
||||
"""Returns true if the extension in input is present in path, false otherwise."""
|
||||
prefix, _ = split_url_on_sourceforge_suffix(path_or_url)
|
||||
if not ext.startswith(r"\."):
|
||||
ext = rf"\.{ext}$"
|
||||
|
||||
if re.search(ext, prefix):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def extension_from_path(path_or_url: Optional[str]) -> Optional[str]:
|
||||
"""Tries to match an allowed archive extension to the input. Returns the first match,
|
||||
or None if no match was found.
|
||||
|
||||
Raises:
|
||||
ValueError: if the input is None
|
||||
"""
|
||||
if path_or_url is None:
|
||||
raise ValueError("Can't call extension() on None")
|
||||
|
||||
for t in ALLOWED_ARCHIVE_TYPES:
|
||||
if has_extension(path_or_url, t):
|
||||
return t
|
||||
return None
|
||||
|
||||
|
||||
def remove_extension(path_or_url: str, *, extension: str) -> str:
|
||||
"""Returns the input with the extension removed"""
|
||||
suffix = rf"\.{extension}$"
|
||||
return re.sub(suffix, "", path_or_url)
|
||||
|
||||
|
||||
def check_and_remove_ext(path: str, *, extension: str) -> str:
|
||||
"""Returns the input path with the extension removed, if the extension is present in path.
|
||||
Otherwise, returns the input unchanged.
|
||||
"""
|
||||
if not has_extension(path, extension):
|
||||
return path
|
||||
path, _ = split_url_on_sourceforge_suffix(path)
|
||||
return remove_extension(path, extension=extension)
|
||||
|
||||
|
||||
def strip_extension(path_or_url: str, *, extension: Optional[str] = None) -> str:
|
||||
"""If a path contains the extension in input, returns the path stripped of the extension.
|
||||
Otherwise, returns the input path.
|
||||
|
||||
If extension is None, attempts to strip any allowed extension from path.
|
||||
"""
|
||||
if extension is None:
|
||||
for t in ALLOWED_ARCHIVE_TYPES:
|
||||
if has_extension(path_or_url, ext=t):
|
||||
extension = t
|
||||
break
|
||||
else:
|
||||
return path_or_url
|
||||
|
||||
return check_and_remove_ext(path_or_url, extension=extension)
|
||||
|
||||
|
||||
def split_url_extension(url: str) -> Tuple[str, ...]:
|
||||
"""Some URLs have a query string, e.g.:
|
||||
|
||||
1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true
|
||||
2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz
|
||||
3. https://gitlab.kitware.com/vtk/vtk/repository/archive.tar.bz2?ref=v7.0.0
|
||||
|
||||
In (1), the query string needs to be stripped to get at the
|
||||
extension, but in (2) & (3), the filename is IN a single final query
|
||||
argument.
|
||||
|
||||
This strips the URL into three pieces: ``prefix``, ``ext``, and ``suffix``.
|
||||
The suffix contains anything that was stripped off the URL to
|
||||
get at the file extension. In (1), it will be ``'?raw=true'``, but
|
||||
in (2), it will be empty. In (3) the suffix is a parameter that follows
|
||||
after the file extension, e.g.:
|
||||
|
||||
1. ``('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true')``
|
||||
2. ``('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin', '.tar.gz', None)``
|
||||
3. ``('https://gitlab.kitware.com/vtk/vtk/repository/archive', '.tar.bz2', '?ref=v7.0.0')``
|
||||
"""
|
||||
# Strip off sourceforge download suffix.
|
||||
# e.g. https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download
|
||||
prefix, suffix = split_url_on_sourceforge_suffix(url)
|
||||
|
||||
ext = extension_from_path(prefix)
|
||||
if ext is not None:
|
||||
prefix = strip_extension(prefix)
|
||||
return prefix, ext, suffix
|
||||
|
||||
try:
|
||||
prefix, suf = strip_query_and_fragment(prefix)
|
||||
except ValueError:
|
||||
# FIXME: tty.debug("Got error parsing path %s" % path)
|
||||
# Ignore URL parse errors here
|
||||
return url, ""
|
||||
|
||||
ext = extension_from_path(prefix)
|
||||
prefix = strip_extension(prefix)
|
||||
suffix = suf + suffix
|
||||
if ext is None:
|
||||
ext = ""
|
||||
|
||||
return prefix, ext, suffix
|
||||
|
||||
|
||||
def strip_version_suffixes(path_or_url: str) -> str:
|
||||
"""Some tarballs contain extraneous information after the version:
|
||||
|
||||
* ``bowtie2-2.2.5-source``
|
||||
* ``libevent-2.0.21-stable``
|
||||
* ``cuda_8.0.44_linux.run``
|
||||
|
||||
These strings are not part of the version number and should be ignored.
|
||||
This function strips those suffixes off and returns the remaining string.
|
||||
The goal is that the version is always the last thing in ``path``:
|
||||
|
||||
* ``bowtie2-2.2.5``
|
||||
* ``libevent-2.0.21``
|
||||
* ``cuda_8.0.44``
|
||||
|
||||
Args:
|
||||
path_or_url: The filename or URL for the package
|
||||
|
||||
Returns:
|
||||
The ``path`` with any extraneous suffixes removed
|
||||
"""
|
||||
# NOTE: This could be done with complicated regexes in parse_version_offset
|
||||
# NOTE: The problem is that we would have to add these regexes to the end
|
||||
# NOTE: of every single version regex. Easier to just strip them off
|
||||
# NOTE: permanently
|
||||
|
||||
suffix_regexes = [
|
||||
# Download type
|
||||
r"[Ii]nstall",
|
||||
r"all",
|
||||
r"code",
|
||||
r"[Ss]ources?",
|
||||
r"file",
|
||||
r"full",
|
||||
r"single",
|
||||
r"with[a-zA-Z_-]+",
|
||||
r"rock",
|
||||
r"src(_0)?",
|
||||
r"public",
|
||||
r"bin",
|
||||
r"binary",
|
||||
r"run",
|
||||
r"[Uu]niversal",
|
||||
r"jar",
|
||||
r"complete",
|
||||
r"dynamic",
|
||||
r"oss",
|
||||
r"gem",
|
||||
r"tar",
|
||||
r"sh",
|
||||
# Download version
|
||||
r"release",
|
||||
r"bin",
|
||||
r"stable",
|
||||
r"[Ff]inal",
|
||||
r"rel",
|
||||
r"orig",
|
||||
r"dist",
|
||||
r"\+",
|
||||
# License
|
||||
r"gpl",
|
||||
# Arch
|
||||
# Needs to come before and after OS, appears in both orders
|
||||
r"ia32",
|
||||
r"intel",
|
||||
r"amd64",
|
||||
r"linux64",
|
||||
r"x64",
|
||||
r"64bit",
|
||||
r"x86[_-]64",
|
||||
r"i586_64",
|
||||
r"x86",
|
||||
r"i[36]86",
|
||||
r"ppc64(le)?",
|
||||
r"armv?(7l|6l|64)",
|
||||
# Other
|
||||
r"cpp",
|
||||
r"gtk",
|
||||
r"incubating",
|
||||
# OS
|
||||
r"[Ll]inux(_64)?",
|
||||
r"LINUX",
|
||||
r"[Uu]ni?x",
|
||||
r"[Ss]un[Oo][Ss]",
|
||||
r"[Mm]ac[Oo][Ss][Xx]?",
|
||||
r"[Oo][Ss][Xx]",
|
||||
r"[Dd]arwin(64)?",
|
||||
r"[Aa]pple",
|
||||
r"[Ww]indows",
|
||||
r"[Ww]in(64|32)?",
|
||||
r"[Cc]ygwin(64|32)?",
|
||||
r"[Mm]ingw",
|
||||
r"centos",
|
||||
# Arch
|
||||
# Needs to come before and after OS, appears in both orders
|
||||
r"ia32",
|
||||
r"intel",
|
||||
r"amd64",
|
||||
r"linux64",
|
||||
r"x64",
|
||||
r"64bit",
|
||||
r"x86[_-]64",
|
||||
r"i586_64",
|
||||
r"x86",
|
||||
r"i[36]86",
|
||||
r"ppc64(le)?",
|
||||
r"armv?(7l|6l|64)?",
|
||||
# PyPI
|
||||
r"[._-]py[23].*\.whl",
|
||||
r"[._-]cp[23].*\.whl",
|
||||
r"[._-]win.*\.exe",
|
||||
]
|
||||
|
||||
for regex in suffix_regexes:
|
||||
# Remove the suffix from the end of the path
|
||||
# This may be done multiple times
|
||||
path_or_url = re.sub(r"[._-]?" + regex + "$", "", path_or_url)
|
||||
|
||||
return path_or_url
|
||||
|
||||
|
||||
def expand_contracted_extension(extension: str) -> str:
|
||||
"""Returns the expanded version of a known contracted extension.
|
||||
|
||||
This function maps extensions like ".tgz" to ".tar.gz". On unknown extensions,
|
||||
return the input unmodified.
|
||||
"""
|
||||
extension = extension.strip(".")
|
||||
return CONTRACTION_MAP.get(extension, extension)
|
||||
|
||||
|
||||
def expand_contracted_extension_in_path(
|
||||
path_or_url: str, *, extension: Optional[str] = None
|
||||
) -> str:
|
||||
"""Returns the input path or URL with any contraction extension expanded.
|
||||
|
||||
Args:
|
||||
path_or_url: path or URL to be expanded
|
||||
extension: if specified, only attempt to expand that extension
|
||||
"""
|
||||
extension = extension or extension_from_path(path_or_url)
|
||||
if extension is None:
|
||||
return path_or_url
|
||||
|
||||
expanded = expand_contracted_extension(extension)
|
||||
if expanded != extension:
|
||||
return re.sub(rf"{extension}", rf"{expanded}", path_or_url)
|
||||
return path_or_url
|
||||
|
||||
|
||||
def compression_ext_from_compressed_archive(extension: str) -> Optional[str]:
|
||||
"""Returns compression extension for a compressed archive"""
|
||||
extension = expand_contracted_extension(extension)
|
||||
for ext in [*EXTENSIONS]:
|
||||
if ext in extension:
|
||||
return ext
|
||||
return None
|
||||
|
||||
|
||||
def strip_compression_extension(path_or_url: str, ext: Optional[str] = None) -> str:
|
||||
"""Strips the compression extension from the input, and returns it. For instance,
|
||||
"foo.tgz" becomes "foo.tar".
|
||||
|
||||
If no extension is given, try a default list of extensions.
|
||||
|
||||
Args:
|
||||
path_or_url: input to be stripped
|
||||
ext: if given, extension to be stripped
|
||||
"""
|
||||
if not extension_from_path(path_or_url):
|
||||
return path_or_url
|
||||
|
||||
expanded_path = expand_contracted_extension_in_path(path_or_url)
|
||||
candidates = [ext] if ext is not None else EXTENSIONS
|
||||
for current_extension in candidates:
|
||||
modified_path = check_and_remove_ext(expanded_path, extension=current_extension)
|
||||
if modified_path != expanded_path:
|
||||
return modified_path
|
||||
return expanded_path
|
||||
|
||||
|
||||
def allowed_archive(path_or_url: str) -> bool:
|
||||
"""Returns true if the input is a valid archive, False otherwise."""
|
||||
return (
|
||||
False if not path_or_url else any(path_or_url.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
|
||||
)
|
||||
|
||||
|
||||
def determine_url_file_extension(path: str) -> str:
|
||||
"""This returns the type of archive a URL refers to. This is
|
||||
sometimes confusing because of URLs like:
|
||||
|
||||
(1) https://github.com/petdance/ack/tarball/1.93_02
|
||||
|
||||
Where the URL doesn't actually contain the filename. We need
|
||||
to know what type it is so that we can appropriately name files
|
||||
in mirrors.
|
||||
"""
|
||||
match = re.search(r"github.com/.+/(zip|tar)ball/", path)
|
||||
if match:
|
||||
if match.group(1) == "zip":
|
||||
return "zip"
|
||||
elif match.group(1) == "tar":
|
||||
return "tar.gz"
|
||||
|
||||
prefix, ext, suffix = split_url_extension(path)
|
||||
return ext
|
@@ -3,42 +3,33 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import abc
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import io
|
||||
import re
|
||||
import sys
|
||||
from argparse import ArgumentParser
|
||||
from typing import IO, Any, Iterable, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
|
||||
class Command:
|
||||
class Command(object):
|
||||
"""Parsed representation of a command from argparse.
|
||||
|
||||
This is a single command from an argparse parser. ``ArgparseWriter`` creates these and returns
|
||||
them from ``parse()``, and it passes one of these to each call to ``format()`` so that we can
|
||||
take an action for a single command.
|
||||
This is a single command from an argparse parser. ``ArgparseWriter``
|
||||
creates these and returns them from ``parse()``, and it passes one of
|
||||
these to each call to ``format()`` so that we can take an action for
|
||||
a single command.
|
||||
|
||||
Parts of a Command:
|
||||
- prog: command name (str)
|
||||
- description: command description (str)
|
||||
- usage: command usage (str)
|
||||
- positionals: list of positional arguments (list)
|
||||
- optionals: list of optional arguments (list)
|
||||
- subcommands: list of subcommand parsers (list)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
prog: str,
|
||||
description: Optional[str],
|
||||
usage: str,
|
||||
positionals: List[Tuple[str, Optional[Iterable[Any]], Union[int, str, None], str]],
|
||||
optionals: List[Tuple[Sequence[str], List[str], str, Union[int, str, None], str]],
|
||||
subcommands: List[Tuple[ArgumentParser, str, str]],
|
||||
) -> None:
|
||||
"""Initialize a new Command instance.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
description: Command description.
|
||||
usage: Command usage.
|
||||
positionals: List of positional arguments.
|
||||
optionals: List of optional arguments.
|
||||
subcommands: List of subcommand parsers.
|
||||
"""
|
||||
def __init__(self, prog, description, usage, positionals, optionals, subcommands):
|
||||
self.prog = prog
|
||||
self.description = description
|
||||
self.usage = usage
|
||||
@@ -47,34 +38,35 @@ def __init__(
|
||||
self.subcommands = subcommands
|
||||
|
||||
|
||||
# NOTE: The only reason we subclass argparse.HelpFormatter is to get access to self._expand_help(),
|
||||
# ArgparseWriter is not intended to be used as a formatter_class.
|
||||
class ArgparseWriter(argparse.HelpFormatter, abc.ABC):
|
||||
"""Analyze an argparse ArgumentParser for easy generation of help."""
|
||||
# NOTE: The only reason we subclass argparse.HelpFormatter is to get access
|
||||
# to self._expand_help(), ArgparseWriter is not intended to be used as a
|
||||
# formatter_class.
|
||||
class ArgparseWriter(argparse.HelpFormatter):
|
||||
"""Analyzes an argparse ArgumentParser for easy generation of help."""
|
||||
|
||||
def __init__(self, prog: str, out: IO = sys.stdout, aliases: bool = False) -> None:
|
||||
"""Initialize a new ArgparseWriter instance.
|
||||
def __init__(self, prog, out=None, aliases=False):
|
||||
"""Initializes a new ArgparseWriter instance.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
out: File object to write to.
|
||||
aliases: Whether or not to include subparsers for aliases.
|
||||
Parameters:
|
||||
prog (str): the program name
|
||||
out (file object): the file to write to (default sys.stdout)
|
||||
aliases (bool): whether or not to include subparsers for aliases
|
||||
"""
|
||||
super().__init__(prog)
|
||||
super(ArgparseWriter, self).__init__(prog)
|
||||
self.level = 0
|
||||
self.prog = prog
|
||||
self.out = out
|
||||
self.out = sys.stdout if out is None else out
|
||||
self.aliases = aliases
|
||||
|
||||
def parse(self, parser: ArgumentParser, prog: str) -> Command:
|
||||
"""Parse the parser object and return the relavent components.
|
||||
def parse(self, parser, prog):
|
||||
"""Parses the parser object and returns the relavent components.
|
||||
|
||||
Args:
|
||||
parser: Command parser.
|
||||
prog: Program name.
|
||||
Parameters:
|
||||
parser (argparse.ArgumentParser): the parser
|
||||
prog (str): the command name
|
||||
|
||||
Returns:
|
||||
Information about the command from the parser.
|
||||
(Command) information about the command from the parser
|
||||
"""
|
||||
self.parser = parser
|
||||
|
||||
@@ -88,7 +80,8 @@ def parse(self, parser: ArgumentParser, prog: str) -> Command:
|
||||
groups = parser._mutually_exclusive_groups
|
||||
usage = fmt._format_usage(None, actions, groups, "").strip()
|
||||
|
||||
# Go through actions and split them into optionals, positionals, and subcommands
|
||||
# Go through actions and split them into optionals, positionals,
|
||||
# and subcommands
|
||||
optionals = []
|
||||
positionals = []
|
||||
subcommands = []
|
||||
@@ -96,97 +89,74 @@ def parse(self, parser: ArgumentParser, prog: str) -> Command:
|
||||
if action.option_strings:
|
||||
flags = action.option_strings
|
||||
dest_flags = fmt._format_action_invocation(action)
|
||||
nargs = action.nargs
|
||||
help = (
|
||||
self._expand_help(action)
|
||||
if action.help and action.help != argparse.SUPPRESS
|
||||
else ""
|
||||
)
|
||||
help = help.split("\n")[0]
|
||||
|
||||
if action.choices is not None:
|
||||
dest = [str(choice) for choice in action.choices]
|
||||
else:
|
||||
dest = [action.dest]
|
||||
|
||||
optionals.append((flags, dest, dest_flags, nargs, help))
|
||||
help = self._expand_help(action) if action.help else ""
|
||||
help = help.replace("\n", " ")
|
||||
optionals.append((flags, dest_flags, help))
|
||||
elif isinstance(action, argparse._SubParsersAction):
|
||||
for subaction in action._choices_actions:
|
||||
subparser = action._name_parser_map[subaction.dest]
|
||||
help = (
|
||||
self._expand_help(subaction)
|
||||
if subaction.help and action.help != argparse.SUPPRESS
|
||||
else ""
|
||||
)
|
||||
help = help.split("\n")[0]
|
||||
subcommands.append((subparser, subaction.dest, help))
|
||||
subcommands.append((subparser, subaction.dest))
|
||||
|
||||
# Look for aliases of the form 'name (alias, ...)'
|
||||
if self.aliases and isinstance(subaction.metavar, str):
|
||||
if self.aliases:
|
||||
match = re.match(r"(.*) \((.*)\)", subaction.metavar)
|
||||
if match:
|
||||
aliases = match.group(2).split(", ")
|
||||
for alias in aliases:
|
||||
subparser = action._name_parser_map[alias]
|
||||
help = (
|
||||
self._expand_help(subaction)
|
||||
if subaction.help and action.help != argparse.SUPPRESS
|
||||
else ""
|
||||
)
|
||||
help = help.split("\n")[0]
|
||||
subcommands.append((subparser, alias, help))
|
||||
subcommands.append((subparser, alias))
|
||||
else:
|
||||
args = fmt._format_action_invocation(action)
|
||||
help = (
|
||||
self._expand_help(action)
|
||||
if action.help and action.help != argparse.SUPPRESS
|
||||
else ""
|
||||
)
|
||||
help = help.split("\n")[0]
|
||||
positionals.append((args, action.choices, action.nargs, help))
|
||||
help = self._expand_help(action) if action.help else ""
|
||||
help = help.replace("\n", " ")
|
||||
positionals.append((args, help))
|
||||
|
||||
return Command(prog, description, usage, positionals, optionals, subcommands)
|
||||
|
||||
@abc.abstractmethod
|
||||
def format(self, cmd: Command) -> str:
|
||||
"""Return the string representation of a single node in the parser tree.
|
||||
def format(self, cmd):
|
||||
"""Returns the string representation of a single node in the
|
||||
parser tree.
|
||||
|
||||
Override this in subclasses to define how each subcommand should be displayed.
|
||||
Override this in subclasses to define how each subcommand
|
||||
should be displayed.
|
||||
|
||||
Args:
|
||||
cmd: Parsed information about a command or subcommand.
|
||||
Parameters:
|
||||
(Command): parsed information about a command or subcommand
|
||||
|
||||
Returns:
|
||||
String representation of this subcommand.
|
||||
str: the string representation of this subcommand
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def _write(self, parser: ArgumentParser, prog: str, level: int = 0) -> None:
|
||||
"""Recursively write a parser.
|
||||
def _write(self, parser, prog, level=0):
|
||||
"""Recursively writes a parser.
|
||||
|
||||
Args:
|
||||
parser: Command parser.
|
||||
prog: Program name.
|
||||
level: Current level.
|
||||
Parameters:
|
||||
parser (argparse.ArgumentParser): the parser
|
||||
prog (str): the command name
|
||||
level (int): the current level
|
||||
"""
|
||||
self.level = level
|
||||
|
||||
cmd = self.parse(parser, prog)
|
||||
self.out.write(self.format(cmd))
|
||||
|
||||
for subparser, prog, help in cmd.subcommands:
|
||||
for subparser, prog in cmd.subcommands:
|
||||
self._write(subparser, prog, level=level + 1)
|
||||
|
||||
def write(self, parser: ArgumentParser) -> None:
|
||||
def write(self, parser):
|
||||
"""Write out details about an ArgumentParser.
|
||||
|
||||
Args:
|
||||
parser: Command parser.
|
||||
parser (argparse.ArgumentParser): the parser
|
||||
"""
|
||||
try:
|
||||
self._write(parser, self.prog)
|
||||
except BrokenPipeError:
|
||||
except IOError as e:
|
||||
# Swallow pipe errors
|
||||
pass
|
||||
# Raises IOError in Python 2 and BrokenPipeError in Python 3
|
||||
if e.errno != errno.EPIPE:
|
||||
raise
|
||||
|
||||
|
||||
_rst_levels = ["=", "-", "^", "~", ":", "`"]
|
||||
@@ -195,33 +165,21 @@ def write(self, parser: ArgumentParser) -> None:
|
||||
class ArgparseRstWriter(ArgparseWriter):
|
||||
"""Write argparse output as rst sections."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
prog: str,
|
||||
out: IO = sys.stdout,
|
||||
aliases: bool = False,
|
||||
rst_levels: Sequence[str] = _rst_levels,
|
||||
) -> None:
|
||||
"""Initialize a new ArgparseRstWriter instance.
|
||||
def __init__(self, prog, out=None, aliases=False, rst_levels=_rst_levels):
|
||||
"""Create a new ArgparseRstWriter.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
out: File object to write to.
|
||||
aliases: Whether or not to include subparsers for aliases.
|
||||
rst_levels: List of characters for rst section headings.
|
||||
Parameters:
|
||||
prog (str): program name
|
||||
out (file object): file to write to
|
||||
aliases (bool): whether or not to include subparsers for aliases
|
||||
rst_levels (list of str): list of characters
|
||||
for rst section headings
|
||||
"""
|
||||
super().__init__(prog, out, aliases)
|
||||
out = sys.stdout if out is None else out
|
||||
super(ArgparseRstWriter, self).__init__(prog, out, aliases)
|
||||
self.rst_levels = rst_levels
|
||||
|
||||
def format(self, cmd: Command) -> str:
|
||||
"""Return the string representation of a single node in the parser tree.
|
||||
|
||||
Args:
|
||||
cmd: Parsed information about a command or subcommand.
|
||||
|
||||
Returns:
|
||||
String representation of a node.
|
||||
"""
|
||||
def format(self, cmd):
|
||||
string = io.StringIO()
|
||||
string.write(self.begin_command(cmd.prog))
|
||||
|
||||
@@ -232,13 +190,13 @@ def format(self, cmd: Command) -> str:
|
||||
|
||||
if cmd.positionals:
|
||||
string.write(self.begin_positionals())
|
||||
for args, choices, nargs, help in cmd.positionals:
|
||||
for args, help in cmd.positionals:
|
||||
string.write(self.positional(args, help))
|
||||
string.write(self.end_positionals())
|
||||
|
||||
if cmd.optionals:
|
||||
string.write(self.begin_optionals())
|
||||
for flags, dest, dest_flags, nargs, help in cmd.optionals:
|
||||
for flags, dest_flags, help in cmd.optionals:
|
||||
string.write(self.optional(dest_flags, help))
|
||||
string.write(self.end_optionals())
|
||||
|
||||
@@ -247,15 +205,7 @@ def format(self, cmd: Command) -> str:
|
||||
|
||||
return string.getvalue()
|
||||
|
||||
def begin_command(self, prog: str) -> str:
|
||||
"""Text to print before a command.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
|
||||
Returns:
|
||||
Text before a command.
|
||||
"""
|
||||
def begin_command(self, prog):
|
||||
return """
|
||||
----
|
||||
|
||||
@@ -268,26 +218,10 @@ def begin_command(self, prog: str) -> str:
|
||||
prog.replace(" ", "-"), prog, self.rst_levels[self.level] * len(prog)
|
||||
)
|
||||
|
||||
def description(self, description: str) -> str:
|
||||
"""Description of a command.
|
||||
|
||||
Args:
|
||||
description: Command description.
|
||||
|
||||
Returns:
|
||||
Description of a command.
|
||||
"""
|
||||
def description(self, description):
|
||||
return description + "\n\n"
|
||||
|
||||
def usage(self, usage: str) -> str:
|
||||
"""Example usage of a command.
|
||||
|
||||
Args:
|
||||
usage: Command usage.
|
||||
|
||||
Returns:
|
||||
Usage of a command.
|
||||
"""
|
||||
def usage(self, usage):
|
||||
return """\
|
||||
.. code-block:: console
|
||||
|
||||
@@ -297,24 +231,10 @@ def usage(self, usage: str) -> str:
|
||||
usage
|
||||
)
|
||||
|
||||
def begin_positionals(self) -> str:
|
||||
"""Text to print before positional arguments.
|
||||
|
||||
Returns:
|
||||
Positional arguments header.
|
||||
"""
|
||||
def begin_positionals(self):
|
||||
return "\n**Positional arguments**\n\n"
|
||||
|
||||
def positional(self, name: str, help: str) -> str:
|
||||
"""Description of a positional argument.
|
||||
|
||||
Args:
|
||||
name: Argument name.
|
||||
help: Help text.
|
||||
|
||||
Returns:
|
||||
Positional argument description.
|
||||
"""
|
||||
def positional(self, name, help):
|
||||
return """\
|
||||
{0}
|
||||
{1}
|
||||
@@ -323,32 +243,13 @@ def positional(self, name: str, help: str) -> str:
|
||||
name, help
|
||||
)
|
||||
|
||||
def end_positionals(self) -> str:
|
||||
"""Text to print after positional arguments.
|
||||
|
||||
Returns:
|
||||
Positional arguments footer.
|
||||
"""
|
||||
def end_positionals(self):
|
||||
return ""
|
||||
|
||||
def begin_optionals(self) -> str:
|
||||
"""Text to print before optional arguments.
|
||||
|
||||
Returns:
|
||||
Optional arguments header.
|
||||
"""
|
||||
def begin_optionals(self):
|
||||
return "\n**Optional arguments**\n\n"
|
||||
|
||||
def optional(self, opts: str, help: str) -> str:
|
||||
"""Description of an optional argument.
|
||||
|
||||
Args:
|
||||
opts: Optional argument.
|
||||
help: Help text.
|
||||
|
||||
Returns:
|
||||
Optional argument description.
|
||||
"""
|
||||
def optional(self, opts, help):
|
||||
return """\
|
||||
``{0}``
|
||||
{1}
|
||||
@@ -357,23 +258,10 @@ def optional(self, opts: str, help: str) -> str:
|
||||
opts, help
|
||||
)
|
||||
|
||||
def end_optionals(self) -> str:
|
||||
"""Text to print after optional arguments.
|
||||
|
||||
Returns:
|
||||
Optional arguments footer.
|
||||
"""
|
||||
def end_optionals(self):
|
||||
return ""
|
||||
|
||||
def begin_subcommands(self, subcommands: List[Tuple[ArgumentParser, str, str]]) -> str:
|
||||
"""Table with links to other subcommands.
|
||||
|
||||
Arguments:
|
||||
subcommands: List of subcommands.
|
||||
|
||||
Returns:
|
||||
Subcommand linking text.
|
||||
"""
|
||||
def begin_subcommands(self, subcommands):
|
||||
string = """
|
||||
**Subcommands**
|
||||
|
||||
@@ -382,8 +270,116 @@ def begin_subcommands(self, subcommands: List[Tuple[ArgumentParser, str, str]])
|
||||
|
||||
"""
|
||||
|
||||
for cmd, _, _ in subcommands:
|
||||
for cmd, _ in subcommands:
|
||||
prog = re.sub(r"^[^ ]* ", "", cmd.prog)
|
||||
string += " * :ref:`{0} <{1}>`\n".format(prog, cmd.prog.replace(" ", "-"))
|
||||
|
||||
return string + "\n"
|
||||
|
||||
|
||||
class ArgparseCompletionWriter(ArgparseWriter):
|
||||
"""Write argparse output as shell programmable tab completion functions."""
|
||||
|
||||
def format(self, cmd):
|
||||
"""Returns the string representation of a single node in the
|
||||
parser tree.
|
||||
|
||||
Override this in subclasses to define how each subcommand
|
||||
should be displayed.
|
||||
|
||||
Parameters:
|
||||
(Command): parsed information about a command or subcommand
|
||||
|
||||
Returns:
|
||||
str: the string representation of this subcommand
|
||||
"""
|
||||
|
||||
assert cmd.optionals # we should always at least have -h, --help
|
||||
assert not (cmd.positionals and cmd.subcommands) # one or the other
|
||||
|
||||
# We only care about the arguments/flags, not the help messages
|
||||
positionals = []
|
||||
if cmd.positionals:
|
||||
positionals, _ = zip(*cmd.positionals)
|
||||
optionals, _, _ = zip(*cmd.optionals)
|
||||
subcommands = []
|
||||
if cmd.subcommands:
|
||||
_, subcommands = zip(*cmd.subcommands)
|
||||
|
||||
# Flatten lists of lists
|
||||
optionals = [x for xx in optionals for x in xx]
|
||||
|
||||
return (
|
||||
self.start_function(cmd.prog)
|
||||
+ self.body(positionals, optionals, subcommands)
|
||||
+ self.end_function(cmd.prog)
|
||||
)
|
||||
|
||||
def start_function(self, prog):
|
||||
"""Returns the syntax needed to begin a function definition.
|
||||
|
||||
Parameters:
|
||||
prog (str): the command name
|
||||
|
||||
Returns:
|
||||
str: the function definition beginning
|
||||
"""
|
||||
name = prog.replace("-", "_").replace(" ", "_")
|
||||
return "\n_{0}() {{".format(name)
|
||||
|
||||
def end_function(self, prog=None):
|
||||
"""Returns the syntax needed to end a function definition.
|
||||
|
||||
Parameters:
|
||||
prog (str or None): the command name
|
||||
|
||||
Returns:
|
||||
str: the function definition ending
|
||||
"""
|
||||
return "}\n"
|
||||
|
||||
def body(self, positionals, optionals, subcommands):
|
||||
"""Returns the body of the function.
|
||||
|
||||
Parameters:
|
||||
positionals (list): list of positional arguments
|
||||
optionals (list): list of optional arguments
|
||||
subcommands (list): list of subcommand parsers
|
||||
|
||||
Returns:
|
||||
str: the function body
|
||||
"""
|
||||
return ""
|
||||
|
||||
def positionals(self, positionals):
|
||||
"""Returns the syntax for reporting positional arguments.
|
||||
|
||||
Parameters:
|
||||
positionals (list): list of positional arguments
|
||||
|
||||
Returns:
|
||||
str: the syntax for positional arguments
|
||||
"""
|
||||
return ""
|
||||
|
||||
def optionals(self, optionals):
|
||||
"""Returns the syntax for reporting optional flags.
|
||||
|
||||
Parameters:
|
||||
optionals (list): list of optional arguments
|
||||
|
||||
Returns:
|
||||
str: the syntax for optional flags
|
||||
"""
|
||||
return ""
|
||||
|
||||
def subcommands(self, subcommands):
|
||||
"""Returns the syntax for reporting subcommands.
|
||||
|
||||
Parameters:
|
||||
subcommands (list): list of subcommand parsers
|
||||
|
||||
Returns:
|
||||
str: the syntax for subcommand parsers
|
||||
"""
|
||||
return ""
|
||||
|
@@ -11,7 +11,6 @@
|
||||
import itertools
|
||||
import numbers
|
||||
import os
|
||||
import pathlib
|
||||
import posixpath
|
||||
import re
|
||||
import shutil
|
||||
@@ -19,17 +18,14 @@
|
||||
import sys
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from itertools import accumulate
|
||||
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.symlink
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||
from llnl.util.symlink import islink, symlink
|
||||
|
||||
from spack.util.executable import Executable, which
|
||||
|
||||
from ..path import path_to_os_path, system_path_filter
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
|
||||
if sys.platform != "win32":
|
||||
import grp
|
||||
@@ -105,7 +101,7 @@ def _nop(args, ns=None, follow_symlinks=None):
|
||||
pass
|
||||
|
||||
# follow symlinks (aka don't not follow symlinks)
|
||||
follow = follow_symlinks or not (islink(src) and islink(dst))
|
||||
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
|
||||
if follow:
|
||||
# use the real function if it exists
|
||||
def lookup(name):
|
||||
@@ -156,37 +152,6 @@ def lookup(name):
|
||||
shutil.copystat = copystat
|
||||
|
||||
|
||||
def polite_path(components: Iterable[str]):
|
||||
"""
|
||||
Given a list of strings which are intended to be path components,
|
||||
generate a path, and format each component to avoid generating extra
|
||||
path entries.
|
||||
|
||||
For example all "/", "\", and ":" characters will be replaced with
|
||||
"_". Other characters like "=" will also be replaced.
|
||||
"""
|
||||
return os.path.join(*[polite_filename(x) for x in components])
|
||||
|
||||
|
||||
@memoized
|
||||
def _polite_antipattern():
|
||||
# A regex of all the characters we don't want in a filename
|
||||
return re.compile(r"[^A-Za-z0-9_.-]")
|
||||
|
||||
|
||||
def polite_filename(filename: str) -> str:
|
||||
"""
|
||||
Replace generally problematic filename characters with underscores.
|
||||
|
||||
This differs from sanitize_filename in that it is more aggressive in
|
||||
changing characters in the name. For example it removes "=" which can
|
||||
confuse path parsing in external tools.
|
||||
"""
|
||||
# This character set applies for both Windows and Linux. It does not
|
||||
# account for reserved filenames in Windows.
|
||||
return _polite_antipattern().sub("_", filename)
|
||||
|
||||
|
||||
def getuid():
|
||||
if sys.platform == "win32":
|
||||
import ctypes
|
||||
@@ -204,7 +169,7 @@ def rename(src, dst):
|
||||
if sys.platform == "win32":
|
||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||
# so check for that case
|
||||
if os.path.exists(dst) or islink(dst):
|
||||
if os.path.exists(dst) or os.path.islink(dst):
|
||||
os.remove(dst)
|
||||
os.rename(src, dst)
|
||||
|
||||
@@ -368,7 +333,8 @@ def groupid_to_group(x):
|
||||
|
||||
if string:
|
||||
regex = re.escape(regex)
|
||||
for filename in path_to_os_path(*filenames):
|
||||
filenames = path_to_os_path(*filenames)
|
||||
for filename in filenames:
|
||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||
tty.debug(msg.format(filename, regex))
|
||||
|
||||
@@ -436,7 +402,7 @@ def groupid_to_group(x):
|
||||
os.remove(backup_filename)
|
||||
|
||||
|
||||
class FileFilter:
|
||||
class FileFilter(object):
|
||||
"""Convenience class for calling ``filter_file`` a lot."""
|
||||
|
||||
def __init__(self, *filenames):
|
||||
@@ -600,7 +566,7 @@ def set_install_permissions(path):
|
||||
# If this points to a file maintained in a Spack prefix, it is assumed that
|
||||
# this function will be invoked on the target. If the file is outside a
|
||||
# Spack-maintained prefix, the permissions should not be modified.
|
||||
if islink(path):
|
||||
if os.path.islink(path):
|
||||
return
|
||||
if os.path.isdir(path):
|
||||
os.chmod(path, 0o755)
|
||||
@@ -644,8 +610,6 @@ def chgrp(path, group, follow_symlinks=True):
|
||||
gid = grp.getgrnam(group).gr_gid
|
||||
else:
|
||||
gid = group
|
||||
if os.stat(path).st_gid == gid:
|
||||
return
|
||||
if follow_symlinks:
|
||||
os.chown(path, -1, gid)
|
||||
else:
|
||||
@@ -669,7 +633,7 @@ def chmod_x(entry, perms):
|
||||
@system_path_filter
|
||||
def copy_mode(src, dest):
|
||||
"""Set the mode of dest to that of src unless it is a link."""
|
||||
if islink(dest):
|
||||
if os.path.islink(dest):
|
||||
return
|
||||
src_mode = os.stat(src).st_mode
|
||||
dest_mode = os.stat(dest).st_mode
|
||||
@@ -755,12 +719,26 @@ def install(src, dest):
|
||||
copy(src, dest, _permissions=True)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def resolve_link_target_relative_to_the_link(link):
|
||||
"""
|
||||
os.path.isdir uses os.path.exists, which for links will check
|
||||
the existence of the link target. If the link target is relative to
|
||||
the link, we need to construct a pathname that is valid from
|
||||
our cwd (which may not be the same as the link's directory)
|
||||
"""
|
||||
target = os.readlink(link)
|
||||
if os.path.isabs(target):
|
||||
return target
|
||||
link_dir = os.path.dirname(os.path.abspath(link))
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def copy_tree(
|
||||
src: str,
|
||||
dest: str,
|
||||
symlinks: bool = True,
|
||||
allow_broken_symlinks: bool = sys.platform != "win32",
|
||||
ignore: Optional[Callable[[str], bool]] = None,
|
||||
_permissions: bool = False,
|
||||
):
|
||||
@@ -783,8 +761,6 @@ def copy_tree(
|
||||
src (str): the directory to copy
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception. Defaults to true on unix.
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
@@ -792,8 +768,6 @@ def copy_tree(
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
if allow_broken_symlinks and sys.platform == "win32":
|
||||
raise llnl.util.symlink.SymlinkError("Cannot allow broken symlinks on Windows!")
|
||||
if _permissions:
|
||||
tty.debug("Installing {0} to {1}".format(src, dest))
|
||||
else:
|
||||
@@ -807,11 +781,6 @@ def copy_tree(
|
||||
if not files:
|
||||
raise IOError("No such file or directory: '{0}'".format(src))
|
||||
|
||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||
# symlinks at the end.
|
||||
links = []
|
||||
|
||||
for src in files:
|
||||
abs_src = os.path.abspath(src)
|
||||
if not abs_src.endswith(os.path.sep):
|
||||
@@ -834,7 +803,7 @@ def copy_tree(
|
||||
ignore=ignore,
|
||||
follow_nonexisting=True,
|
||||
):
|
||||
if islink(s):
|
||||
if os.path.islink(s):
|
||||
link_target = resolve_link_target_relative_to_the_link(s)
|
||||
if symlinks:
|
||||
target = os.readlink(s)
|
||||
@@ -848,9 +817,7 @@ def escaped_path(path):
|
||||
tty.debug("Redirecting link {0} to {1}".format(target, new_target))
|
||||
target = new_target
|
||||
|
||||
links.append((target, d, s))
|
||||
continue
|
||||
|
||||
symlink(target, d)
|
||||
elif os.path.isdir(link_target):
|
||||
mkdirp(d)
|
||||
else:
|
||||
@@ -865,17 +832,9 @@ def escaped_path(path):
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
|
||||
for target, d, s in links:
|
||||
symlink(target, d, allow_broken_symlinks=allow_broken_symlinks)
|
||||
if _permissions:
|
||||
set_install_permissions(d)
|
||||
copy_mode(s, d)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def install_tree(
|
||||
src, dest, symlinks=True, ignore=None, allow_broken_symlinks=sys.platform != "win32"
|
||||
):
|
||||
def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
"""Recursively install an entire directory tree rooted at *src*.
|
||||
|
||||
Same as :py:func:`copy_tree` with the addition of setting proper
|
||||
@@ -886,21 +845,12 @@ def install_tree(
|
||||
dest (str): the destination directory
|
||||
symlinks (bool): whether or not to preserve symlinks
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
allow_broken_symlinks (bool): whether or not to allow broken (dangling) symlinks,
|
||||
On Windows, setting this to True will raise an exception.
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
copy_tree(
|
||||
src,
|
||||
dest,
|
||||
symlinks=symlinks,
|
||||
allow_broken_symlinks=allow_broken_symlinks,
|
||||
ignore=ignore,
|
||||
_permissions=True,
|
||||
)
|
||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1304,12 +1254,7 @@ def traverse_tree(
|
||||
Keyword Arguments:
|
||||
order (str): Whether to do pre- or post-order traversal. Accepted
|
||||
values are 'pre' and 'post'
|
||||
ignore (typing.Callable): function indicating which files to ignore. This will also
|
||||
ignore symlinks if they point to an ignored file (regardless of whether the symlink
|
||||
is explicitly ignored); note this only supports one layer of indirection (i.e. if
|
||||
you have x -> y -> z, and z is ignored but x/y are not, then y would be ignored
|
||||
but not x). To avoid this, make sure the ignore function also ignores the symlink
|
||||
paths too.
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
follow_nonexisting (bool): Whether to descend into directories in
|
||||
``src`` that do not exit in ``dest``. Default is True
|
||||
follow_links (bool): Whether to descend into symlinks in ``src``
|
||||
@@ -1336,24 +1281,11 @@ def traverse_tree(
|
||||
dest_child = os.path.join(dest_path, f)
|
||||
rel_child = os.path.join(rel_path, f)
|
||||
|
||||
# If the source path is a link and the link's source is ignored, then ignore the link too,
|
||||
# but only do this if the ignore is defined.
|
||||
if ignore is not None:
|
||||
if islink(source_child) and not follow_links:
|
||||
target = readlink(source_child)
|
||||
all_parents = accumulate(target.split(os.sep), lambda x, y: os.path.join(x, y))
|
||||
if any(map(ignore, all_parents)):
|
||||
tty.warn(
|
||||
f"Skipping {source_path} because the source or a part of the source's "
|
||||
f"path is included in the ignores."
|
||||
)
|
||||
continue
|
||||
|
||||
# Treat as a directory
|
||||
# TODO: for symlinks, os.path.isdir looks for the link target. If the
|
||||
# target is relative to the link, then that may not resolve properly
|
||||
# relative to our cwd - see resolve_link_target_relative_to_the_link
|
||||
if os.path.isdir(source_child) and (follow_links or not islink(source_child)):
|
||||
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
|
||||
# When follow_nonexisting isn't set, don't descend into dirs
|
||||
# in source that do not exist in dest
|
||||
if follow_nonexisting or os.path.exists(dest_child):
|
||||
@@ -1379,11 +1311,7 @@ def traverse_tree(
|
||||
|
||||
def lexists_islink_isdir(path):
|
||||
"""Computes the tuple (lexists(path), islink(path), isdir(path)) in a minimal
|
||||
number of stat calls on unix. Use os.path and symlink.islink methods for windows."""
|
||||
if sys.platform == "win32":
|
||||
if not os.path.lexists(path):
|
||||
return False, False, False
|
||||
return os.path.lexists(path), islink(path), os.path.isdir(path)
|
||||
number of stat calls."""
|
||||
# First try to lstat, so we know if it's a link or not.
|
||||
try:
|
||||
lst = os.lstat(path)
|
||||
@@ -1408,7 +1336,7 @@ def lexists_islink_isdir(path):
|
||||
return True, is_link, is_dir
|
||||
|
||||
|
||||
class BaseDirectoryVisitor:
|
||||
class BaseDirectoryVisitor(object):
|
||||
"""Base class and interface for :py:func:`visit_directory_tree`."""
|
||||
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
@@ -1598,7 +1526,7 @@ def remove_if_dead_link(path):
|
||||
Parameters:
|
||||
path (str): The potential dead link
|
||||
"""
|
||||
if islink(path) and not os.path.exists(path):
|
||||
if os.path.islink(path) and not os.path.exists(path):
|
||||
os.unlink(path)
|
||||
|
||||
|
||||
@@ -1657,7 +1585,7 @@ def remove_linked_tree(path):
|
||||
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
||||
|
||||
if os.path.exists(path):
|
||||
if islink(path):
|
||||
if os.path.islink(path):
|
||||
shutil.rmtree(os.path.realpath(path), **kwargs)
|
||||
os.unlink(path)
|
||||
else:
|
||||
@@ -1824,14 +1752,9 @@ def find(root, files, recursive=True):
|
||||
files = [files]
|
||||
|
||||
if recursive:
|
||||
tty.debug(f"Find (recursive): {root} {str(files)}")
|
||||
result = _find_recursive(root, files)
|
||||
return _find_recursive(root, files)
|
||||
else:
|
||||
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
||||
result = _find_non_recursive(root, files)
|
||||
|
||||
tty.debug(f"Find complete: {root} {str(files)}")
|
||||
return result
|
||||
return _find_non_recursive(root, files)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1967,7 +1890,7 @@ class HeaderList(FileList):
|
||||
include_regex = re.compile(r"(.*?)(\binclude\b)(.*)")
|
||||
|
||||
def __init__(self, files):
|
||||
super().__init__(files)
|
||||
super(HeaderList, self).__init__(files)
|
||||
|
||||
self._macro_definitions = []
|
||||
self._directories = None
|
||||
@@ -1993,7 +1916,7 @@ def _default_directories(self):
|
||||
"""Default computation of directories based on the list of
|
||||
header files.
|
||||
"""
|
||||
dir_list = super().directories
|
||||
dir_list = super(HeaderList, self).directories
|
||||
values = []
|
||||
for d in dir_list:
|
||||
# If the path contains a subdirectory named 'include' then stop
|
||||
@@ -2429,7 +2352,7 @@ def find_all_libraries(root, recursive=False):
|
||||
)
|
||||
|
||||
|
||||
class WindowsSimulatedRPath:
|
||||
class WindowsSimulatedRPath(object):
|
||||
"""Class representing Windows filesystem rpath analog
|
||||
|
||||
One instance of this class is associated with a package (only on Windows)
|
||||
@@ -2458,7 +2381,7 @@ def library_dependents(self):
|
||||
"""
|
||||
Set of directories where package binaries/libraries are located.
|
||||
"""
|
||||
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
|
||||
return set([self.pkg.prefix.bin]) | self._additional_library_dependents
|
||||
|
||||
def add_library_dependent(self, *dest):
|
||||
"""
|
||||
@@ -2471,9 +2394,9 @@ def add_library_dependent(self, *dest):
|
||||
"""
|
||||
for pth in dest:
|
||||
if os.path.isfile(pth):
|
||||
self._additional_library_dependents.add(pathlib.Path(pth).parent)
|
||||
self._additional_library_dependents.add(os.path.dirname)
|
||||
else:
|
||||
self._additional_library_dependents.add(pathlib.Path(pth))
|
||||
self._additional_library_dependents.add(pth)
|
||||
|
||||
@property
|
||||
def rpaths(self):
|
||||
@@ -2486,7 +2409,7 @@ def rpaths(self):
|
||||
dependent_libs.extend(list(find_all_shared_libraries(path, recursive=True)))
|
||||
for extra_path in self._addl_rpaths:
|
||||
dependent_libs.extend(list(find_all_shared_libraries(extra_path, recursive=True)))
|
||||
return set([pathlib.Path(x) for x in dependent_libs])
|
||||
return set(dependent_libs)
|
||||
|
||||
def add_rpath(self, *paths):
|
||||
"""
|
||||
@@ -2502,7 +2425,7 @@ def add_rpath(self, *paths):
|
||||
"""
|
||||
self._addl_rpaths = self._addl_rpaths | set(paths)
|
||||
|
||||
def _link(self, path: pathlib.Path, dest_dir: pathlib.Path):
|
||||
def _link(self, path, dest_dir):
|
||||
"""Perform link step of simulated rpathing, installing
|
||||
simlinks of file in path to the dest_dir
|
||||
location. This method deliberately prevents
|
||||
@@ -2510,35 +2433,27 @@ def _link(self, path: pathlib.Path, dest_dir: pathlib.Path):
|
||||
This is because it is both meaningless from an rpath
|
||||
perspective, and will cause an error when Developer
|
||||
mode is not enabled"""
|
||||
|
||||
def report_already_linked():
|
||||
# We have either already symlinked or we are encoutering a naming clash
|
||||
# either way, we don't want to overwrite existing libraries
|
||||
already_linked = islink(str(dest_file))
|
||||
tty.debug(
|
||||
"Linking library %s to %s failed, " % (str(path), str(dest_file))
|
||||
+ "already linked."
|
||||
if already_linked
|
||||
else "library with name %s already exists at location %s."
|
||||
% (str(file_name), str(dest_dir))
|
||||
)
|
||||
|
||||
file_name = path.name
|
||||
dest_file = dest_dir / file_name
|
||||
if not dest_file.exists() and dest_dir.exists() and not dest_file == path:
|
||||
file_name = os.path.basename(path)
|
||||
dest_file = os.path.join(dest_dir, file_name)
|
||||
if os.path.exists(dest_dir) and not dest_file == path:
|
||||
try:
|
||||
symlink(str(path), str(dest_file))
|
||||
symlink(path, dest_file)
|
||||
# For py2 compatibility, we have to catch the specific Windows error code
|
||||
# associate with trying to create a file that already exists (winerror 183)
|
||||
# Catch OSErrors missed by the SymlinkError checks
|
||||
except OSError as e:
|
||||
if sys.platform == "win32" and (e.winerror == 183 or e.errno == errno.EEXIST):
|
||||
report_already_linked()
|
||||
# We have either already symlinked or we are encoutering a naming clash
|
||||
# either way, we don't want to overwrite existing libraries
|
||||
already_linked = islink(dest_file)
|
||||
tty.debug(
|
||||
"Linking library %s to %s failed, " % (path, dest_file) + "already linked."
|
||||
if already_linked
|
||||
else "library with name %s already exists at location %s."
|
||||
% (file_name, dest_dir)
|
||||
)
|
||||
pass
|
||||
else:
|
||||
raise e
|
||||
# catch errors we raise ourselves from Spack
|
||||
except llnl.util.symlink.AlreadyExistsError:
|
||||
report_already_linked()
|
||||
|
||||
def establish_link(self):
|
||||
"""
|
||||
@@ -2771,7 +2686,7 @@ def remove_directory_contents(dir):
|
||||
"""Remove all contents of a directory."""
|
||||
if os.path.exists(dir):
|
||||
for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]:
|
||||
if os.path.isfile(entry) or islink(entry):
|
||||
if os.path.isfile(entry) or os.path.islink(entry):
|
||||
os.unlink(entry)
|
||||
else:
|
||||
shutil.rmtree(entry)
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import division
|
||||
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import functools
|
||||
@@ -766,10 +768,10 @@ def pretty_seconds(seconds):
|
||||
|
||||
class RequiredAttributeError(ValueError):
|
||||
def __init__(self, message):
|
||||
super().__init__(message)
|
||||
super(RequiredAttributeError, self).__init__(message)
|
||||
|
||||
|
||||
class ObjectWrapper:
|
||||
class ObjectWrapper(object):
|
||||
"""Base class that wraps an object. Derived classes can add new behavior
|
||||
while staying undercover.
|
||||
|
||||
@@ -796,7 +798,7 @@ def __init__(self, wrapped_object):
|
||||
self.__dict__ = wrapped_object.__dict__
|
||||
|
||||
|
||||
class Singleton:
|
||||
class Singleton(object):
|
||||
"""Simple wrapper for lazily initialized singleton objects."""
|
||||
|
||||
def __init__(self, factory):
|
||||
@@ -821,7 +823,7 @@ def __getattr__(self, name):
|
||||
# 'instance'/'_instance' to be defined or it will enter an infinite
|
||||
# loop, so protect against that here.
|
||||
if name in ["_instance", "instance"]:
|
||||
raise AttributeError(f"cannot create {name}")
|
||||
raise AttributeError()
|
||||
return getattr(self.instance, name)
|
||||
|
||||
def __getitem__(self, name):
|
||||
@@ -843,6 +845,27 @@ def __repr__(self):
|
||||
return repr(self.instance)
|
||||
|
||||
|
||||
class LazyReference(object):
|
||||
"""Lazily evaluated reference to part of a singleton."""
|
||||
|
||||
def __init__(self, ref_function):
|
||||
self.ref_function = ref_function
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name == "ref_function":
|
||||
raise AttributeError()
|
||||
return getattr(self.ref_function(), name)
|
||||
|
||||
def __getitem__(self, name):
|
||||
return self.ref_function()[name]
|
||||
|
||||
def __str__(self):
|
||||
return str(self.ref_function())
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.ref_function())
|
||||
|
||||
|
||||
def load_module_from_file(module_name, module_path):
|
||||
"""Loads a python module from the path of the corresponding file.
|
||||
|
||||
@@ -920,7 +943,7 @@ def _wrapper(args):
|
||||
return _wrapper
|
||||
|
||||
|
||||
class Devnull:
|
||||
class Devnull(object):
|
||||
"""Null stream with less overhead than ``os.devnull``.
|
||||
|
||||
See https://stackoverflow.com/a/2929954.
|
||||
@@ -1037,7 +1060,7 @@ def __str__(self):
|
||||
return str(self.data)
|
||||
|
||||
|
||||
class GroupedExceptionHandler:
|
||||
class GroupedExceptionHandler(object):
|
||||
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
||||
|
||||
def __init__(self):
|
||||
@@ -1068,7 +1091,7 @@ def grouped_message(self, with_tracebacks: bool = True) -> str:
|
||||
return "due to the following failures:\n{0}".format("\n".join(each_exception_message))
|
||||
|
||||
|
||||
class GroupedExceptionForwarder:
|
||||
class GroupedExceptionForwarder(object):
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
@@ -1088,7 +1111,7 @@ def __exit__(self, exc_type, exc_value, tb):
|
||||
return True
|
||||
|
||||
|
||||
class classproperty:
|
||||
class classproperty(object):
|
||||
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
||||
the evaluation is injected at creation time and take an instance (could be None) and
|
||||
an owner (i.e. the class that originated the instance)
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
"""LinkTree class for setting up trees of symbolic links."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import filecmp
|
||||
import os
|
||||
import shutil
|
||||
@@ -285,7 +287,7 @@ def visit_symlinked_file(self, root, rel_path, depth):
|
||||
self.visit_file(root, rel_path, depth)
|
||||
|
||||
|
||||
class LinkTree:
|
||||
class LinkTree(object):
|
||||
"""Class to create trees of symbolic links from a source directory.
|
||||
|
||||
LinkTree objects are constructed with a source root. Their
|
||||
@@ -430,12 +432,12 @@ class MergeConflictError(Exception):
|
||||
|
||||
class ConflictingSpecsError(MergeConflictError):
|
||||
def __init__(self, spec_1, spec_2):
|
||||
super().__init__(spec_1, spec_2)
|
||||
super(MergeConflictError, self).__init__(spec_1, spec_2)
|
||||
|
||||
|
||||
class SingleMergeConflictError(MergeConflictError):
|
||||
def __init__(self, path):
|
||||
super().__init__("Package merge blocked by file: %s" % path)
|
||||
super(MergeConflictError, self).__init__("Package merge blocked by file: %s" % path)
|
||||
|
||||
|
||||
class MergeConflictSummary(MergeConflictError):
|
||||
@@ -450,4 +452,4 @@ def __init__(self, conflicts):
|
||||
msg += "\n `{0}` and `{1}` both project to `{2}`".format(
|
||||
conflict.src_a, conflict.src_b, conflict.dst
|
||||
)
|
||||
super().__init__(msg)
|
||||
super(MergeConflictSummary, self).__init__(msg)
|
||||
|
@@ -9,12 +9,11 @@
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime
|
||||
from types import TracebackType
|
||||
from typing import IO, Any, Callable, ContextManager, Dict, Generator, Optional, Tuple, Type, Union
|
||||
|
||||
from llnl.util import lang, tty
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import pretty_seconds
|
||||
|
||||
from ..string import plural
|
||||
import spack.util.string
|
||||
|
||||
if sys.platform != "win32":
|
||||
import fcntl
|
||||
@@ -35,15 +34,12 @@
|
||||
]
|
||||
|
||||
|
||||
ReleaseFnType = Optional[Callable[[], bool]]
|
||||
#: A useful replacement for functions that should return True when not provided
|
||||
#: for example.
|
||||
true_fn = lambda: True
|
||||
|
||||
|
||||
def true_fn() -> bool:
|
||||
"""A function that always returns True."""
|
||||
return True
|
||||
|
||||
|
||||
class OpenFile:
|
||||
class OpenFile(object):
|
||||
"""Record for keeping track of open lockfiles (with reference counting).
|
||||
|
||||
There's really only one ``OpenFile`` per inode, per process, but we record the
|
||||
@@ -52,12 +48,12 @@ class OpenFile:
|
||||
file descriptors as well in the future.
|
||||
"""
|
||||
|
||||
def __init__(self, fh: IO) -> None:
|
||||
def __init__(self, fh):
|
||||
self.fh = fh
|
||||
self.refs = 0
|
||||
|
||||
|
||||
class OpenFileTracker:
|
||||
class OpenFileTracker(object):
|
||||
"""Track open lockfiles, to minimize number of open file descriptors.
|
||||
|
||||
The ``fcntl`` locks that Spack uses are associated with an inode and a process.
|
||||
@@ -82,11 +78,11 @@ class OpenFileTracker:
|
||||
work in Python and assume the GIL.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
def __init__(self):
|
||||
"""Create a new ``OpenFileTracker``."""
|
||||
self._descriptors: Dict[Any, OpenFile] = {}
|
||||
self._descriptors = {}
|
||||
|
||||
def get_fh(self, path: str) -> IO:
|
||||
def get_fh(self, path):
|
||||
"""Get a filehandle for a lockfile.
|
||||
|
||||
This routine will open writable files for read/write even if you're asking
|
||||
@@ -94,7 +90,7 @@ def get_fh(self, path: str) -> IO:
|
||||
(write) lock later if requested.
|
||||
|
||||
Arguments:
|
||||
path: path to lock file we want a filehandle for
|
||||
path (str): path to lock file we want a filehandle for
|
||||
"""
|
||||
# Open writable files as 'r+' so we can upgrade to write later
|
||||
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "r+"
|
||||
@@ -143,7 +139,7 @@ def get_fh(self, path: str) -> IO:
|
||||
def release_by_stat(self, stat):
|
||||
key = (stat.st_dev, stat.st_ino, os.getpid())
|
||||
open_file = self._descriptors.get(key)
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_ino
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_inode
|
||||
|
||||
open_file.refs -= 1
|
||||
if not open_file.refs:
|
||||
@@ -161,7 +157,7 @@ def purge(self):
|
||||
|
||||
#: Open file descriptors for locks in this process. Used to prevent one process
|
||||
#: from opening the sam file many times for different byte range locks
|
||||
FILE_TRACKER = OpenFileTracker()
|
||||
file_tracker = OpenFileTracker()
|
||||
|
||||
|
||||
def _attempts_str(wait_time, nattempts):
|
||||
@@ -169,11 +165,11 @@ def _attempts_str(wait_time, nattempts):
|
||||
if nattempts <= 1:
|
||||
return ""
|
||||
|
||||
attempts = plural(nattempts, "attempt")
|
||||
return " after {} and {}".format(lang.pretty_seconds(wait_time), attempts)
|
||||
attempts = spack.util.string.plural(nattempts, "attempt")
|
||||
return " after {} and {}".format(pretty_seconds(wait_time), attempts)
|
||||
|
||||
|
||||
class LockType:
|
||||
class LockType(object):
|
||||
READ = 0
|
||||
WRITE = 1
|
||||
|
||||
@@ -192,11 +188,11 @@ def to_module(tid):
|
||||
return lock
|
||||
|
||||
@staticmethod
|
||||
def is_valid(op: int) -> bool:
|
||||
def is_valid(op):
|
||||
return op == LockType.READ or op == LockType.WRITE
|
||||
|
||||
|
||||
class Lock:
|
||||
class Lock(object):
|
||||
"""This is an implementation of a filesystem lock using Python's lockf.
|
||||
|
||||
In Python, ``lockf`` actually calls ``fcntl``, so this should work with
|
||||
@@ -211,16 +207,7 @@ class Lock:
|
||||
overlapping byte ranges in the same file).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path: str,
|
||||
*,
|
||||
start: int = 0,
|
||||
length: int = 0,
|
||||
default_timeout: Optional[float] = None,
|
||||
debug: bool = False,
|
||||
desc: str = "",
|
||||
) -> None:
|
||||
def __init__(self, path, start=0, length=0, default_timeout=None, debug=False, desc=""):
|
||||
"""Construct a new lock on the file at ``path``.
|
||||
|
||||
By default, the lock applies to the whole file. Optionally,
|
||||
@@ -233,17 +220,17 @@ def __init__(
|
||||
beginning of the file.
|
||||
|
||||
Args:
|
||||
path: path to the lock
|
||||
start: optional byte offset at which the lock starts
|
||||
length: optional number of bytes to lock
|
||||
default_timeout: seconds to wait for lock attempts,
|
||||
path (str): path to the lock
|
||||
start (int): optional byte offset at which the lock starts
|
||||
length (int): optional number of bytes to lock
|
||||
default_timeout (int): number of seconds to wait for lock attempts,
|
||||
where None means to wait indefinitely
|
||||
debug: debug mode specific to locking
|
||||
desc: optional debug message lock description, which is
|
||||
debug (bool): debug mode specific to locking
|
||||
desc (str): optional debug message lock description, which is
|
||||
helpful for distinguishing between different Spack locks.
|
||||
"""
|
||||
self.path = path
|
||||
self._file: Optional[IO] = None
|
||||
self._file = None
|
||||
self._reads = 0
|
||||
self._writes = 0
|
||||
|
||||
@@ -255,7 +242,7 @@ def __init__(
|
||||
self.debug = debug
|
||||
|
||||
# optional debug description
|
||||
self.desc = f" ({desc})" if desc else ""
|
||||
self.desc = " ({0})".format(desc) if desc else ""
|
||||
|
||||
# If the user doesn't set a default timeout, or if they choose
|
||||
# None, 0, etc. then lock attempts will not time out (unless the
|
||||
@@ -263,15 +250,11 @@ def __init__(
|
||||
self.default_timeout = default_timeout or None
|
||||
|
||||
# PID and host of lock holder (only used in debug mode)
|
||||
self.pid: Optional[int] = None
|
||||
self.old_pid: Optional[int] = None
|
||||
self.host: Optional[str] = None
|
||||
self.old_host: Optional[str] = None
|
||||
self.pid = self.old_pid = None
|
||||
self.host = self.old_host = None
|
||||
|
||||
@staticmethod
|
||||
def _poll_interval_generator(
|
||||
_wait_times: Optional[Tuple[float, float, float]] = None
|
||||
) -> Generator[float, None, None]:
|
||||
def _poll_interval_generator(_wait_times=None):
|
||||
"""This implements a backoff scheme for polling a contended resource
|
||||
by suggesting a succession of wait times between polls.
|
||||
|
||||
@@ -294,21 +277,21 @@ def _poll_interval_generator(
|
||||
num_requests += 1
|
||||
yield wait_time
|
||||
|
||||
def __repr__(self) -> str:
|
||||
def __repr__(self):
|
||||
"""Formal representation of the lock."""
|
||||
rep = "{0}(".format(self.__class__.__name__)
|
||||
for attr, value in self.__dict__.items():
|
||||
rep += "{0}={1}, ".format(attr, value.__repr__())
|
||||
return "{0})".format(rep.strip(", "))
|
||||
|
||||
def __str__(self) -> str:
|
||||
def __str__(self):
|
||||
"""Readable string (with key fields) of the lock."""
|
||||
location = "{0}[{1}:{2}]".format(self.path, self._start, self._length)
|
||||
timeout = "timeout={0}".format(self.default_timeout)
|
||||
activity = "#reads={0}, #writes={1}".format(self._reads, self._writes)
|
||||
return "({0}, {1}, {2})".format(location, timeout, activity)
|
||||
|
||||
def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
||||
def _lock(self, op, timeout=None):
|
||||
"""This takes a lock using POSIX locks (``fcntl.lockf``).
|
||||
|
||||
The lock is implemented as a spin lock using a nonblocking call
|
||||
@@ -327,7 +310,7 @@ def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
||||
# Create file and parent directories if they don't exist.
|
||||
if self._file is None:
|
||||
self._ensure_parent_directory()
|
||||
self._file = FILE_TRACKER.get_fh(self.path)
|
||||
self._file = file_tracker.get_fh(self.path)
|
||||
|
||||
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "r":
|
||||
# Attempt to upgrade to write lock w/a read-only file.
|
||||
@@ -336,7 +319,7 @@ def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
||||
|
||||
self._log_debug(
|
||||
"{} locking [{}:{}]: timeout {}".format(
|
||||
op_str.lower(), self._start, self._length, lang.pretty_seconds(timeout or 0)
|
||||
op_str.lower(), self._start, self._length, pretty_seconds(timeout or 0)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -360,20 +343,15 @@ def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
||||
total_wait_time = time.time() - start_time
|
||||
raise LockTimeoutError(op_str.lower(), self.path, total_wait_time, num_attempts)
|
||||
|
||||
def _poll_lock(self, op: int) -> bool:
|
||||
def _poll_lock(self, op):
|
||||
"""Attempt to acquire the lock in a non-blocking manner. Return whether
|
||||
the locking attempt succeeds
|
||||
"""
|
||||
assert self._file is not None, "cannot poll a lock without the file being set"
|
||||
module_op = LockType.to_module(op)
|
||||
try:
|
||||
# Try to get the lock (will raise if not available.)
|
||||
fcntl.lockf(
|
||||
self._file.fileno(),
|
||||
module_op | fcntl.LOCK_NB,
|
||||
self._length,
|
||||
self._start,
|
||||
os.SEEK_SET,
|
||||
self._file, module_op | fcntl.LOCK_NB, self._length, self._start, os.SEEK_SET
|
||||
)
|
||||
|
||||
# help for debugging distributed locking
|
||||
@@ -399,7 +377,7 @@ def _poll_lock(self, op: int) -> bool:
|
||||
|
||||
return False
|
||||
|
||||
def _ensure_parent_directory(self) -> str:
|
||||
def _ensure_parent_directory(self):
|
||||
parent = os.path.dirname(self.path)
|
||||
|
||||
# relative paths to lockfiles in the current directory have no parent
|
||||
@@ -418,22 +396,20 @@ def _ensure_parent_directory(self) -> str:
|
||||
raise
|
||||
return parent
|
||||
|
||||
def _read_log_debug_data(self) -> None:
|
||||
def _read_log_debug_data(self):
|
||||
"""Read PID and host data out of the file if it is there."""
|
||||
assert self._file is not None, "cannot read debug log without the file being set"
|
||||
self.old_pid = self.pid
|
||||
self.old_host = self.host
|
||||
|
||||
line = self._file.read()
|
||||
if line:
|
||||
pid, host = line.strip().split(",")
|
||||
_, _, pid = pid.rpartition("=")
|
||||
_, _, self.pid = pid.rpartition("=")
|
||||
_, _, self.host = host.rpartition("=")
|
||||
self.pid = int(pid)
|
||||
self.pid = int(self.pid)
|
||||
|
||||
def _write_log_debug_data(self) -> None:
|
||||
def _write_log_debug_data(self):
|
||||
"""Write PID and host data to the file, recording old values."""
|
||||
assert self._file is not None, "cannot write debug log without the file being set"
|
||||
self.old_pid = self.pid
|
||||
self.old_host = self.host
|
||||
|
||||
@@ -447,21 +423,20 @@ def _write_log_debug_data(self) -> None:
|
||||
self._file.flush()
|
||||
os.fsync(self._file.fileno())
|
||||
|
||||
def _unlock(self) -> None:
|
||||
def _unlock(self):
|
||||
"""Releases a lock using POSIX locks (``fcntl.lockf``)
|
||||
|
||||
Releases the lock regardless of mode. Note that read locks may
|
||||
be masquerading as write locks, but this removes either.
|
||||
|
||||
"""
|
||||
assert self._file is not None, "cannot unlock without the file being set"
|
||||
fcntl.lockf(self._file.fileno(), fcntl.LOCK_UN, self._length, self._start, os.SEEK_SET)
|
||||
FILE_TRACKER.release_by_fh(self._file)
|
||||
fcntl.lockf(self._file, fcntl.LOCK_UN, self._length, self._start, os.SEEK_SET)
|
||||
file_tracker.release_by_fh(self._file)
|
||||
self._file = None
|
||||
self._reads = 0
|
||||
self._writes = 0
|
||||
|
||||
def acquire_read(self, timeout: Optional[float] = None) -> bool:
|
||||
def acquire_read(self, timeout=None):
|
||||
"""Acquires a recursive, shared lock for reading.
|
||||
|
||||
Read and write locks can be acquired and released in arbitrary
|
||||
@@ -486,7 +461,7 @@ def acquire_read(self, timeout: Optional[float] = None) -> bool:
|
||||
self._reads += 1
|
||||
return False
|
||||
|
||||
def acquire_write(self, timeout: Optional[float] = None) -> bool:
|
||||
def acquire_write(self, timeout=None):
|
||||
"""Acquires a recursive, exclusive lock for writing.
|
||||
|
||||
Read and write locks can be acquired and released in arbitrary
|
||||
@@ -516,7 +491,7 @@ def acquire_write(self, timeout: Optional[float] = None) -> bool:
|
||||
self._writes += 1
|
||||
return False
|
||||
|
||||
def is_write_locked(self) -> bool:
|
||||
def is_write_locked(self):
|
||||
"""Check if the file is write locked
|
||||
|
||||
Return:
|
||||
@@ -533,7 +508,7 @@ def is_write_locked(self) -> bool:
|
||||
|
||||
return False
|
||||
|
||||
def downgrade_write_to_read(self, timeout: Optional[float] = None) -> None:
|
||||
def downgrade_write_to_read(self, timeout=None):
|
||||
"""
|
||||
Downgrade from an exclusive write lock to a shared read.
|
||||
|
||||
@@ -552,7 +527,7 @@ def downgrade_write_to_read(self, timeout: Optional[float] = None) -> None:
|
||||
else:
|
||||
raise LockDowngradeError(self.path)
|
||||
|
||||
def upgrade_read_to_write(self, timeout: Optional[float] = None) -> None:
|
||||
def upgrade_read_to_write(self, timeout=None):
|
||||
"""
|
||||
Attempts to upgrade from a shared read lock to an exclusive write.
|
||||
|
||||
@@ -571,7 +546,7 @@ def upgrade_read_to_write(self, timeout: Optional[float] = None) -> None:
|
||||
else:
|
||||
raise LockUpgradeError(self.path)
|
||||
|
||||
def release_read(self, release_fn: ReleaseFnType = None) -> bool:
|
||||
def release_read(self, release_fn=None):
|
||||
"""Releases a read lock.
|
||||
|
||||
Arguments:
|
||||
@@ -607,7 +582,7 @@ def release_read(self, release_fn: ReleaseFnType = None) -> bool:
|
||||
self._reads -= 1
|
||||
return False
|
||||
|
||||
def release_write(self, release_fn: ReleaseFnType = None) -> bool:
|
||||
def release_write(self, release_fn=None):
|
||||
"""Releases a write lock.
|
||||
|
||||
Arguments:
|
||||
@@ -648,65 +623,65 @@ def release_write(self, release_fn: ReleaseFnType = None) -> bool:
|
||||
else:
|
||||
return False
|
||||
|
||||
def cleanup(self) -> None:
|
||||
def cleanup(self):
|
||||
if self._reads == 0 and self._writes == 0:
|
||||
os.unlink(self.path)
|
||||
else:
|
||||
raise LockError("Attempting to cleanup active lock.")
|
||||
|
||||
def _get_counts_desc(self) -> str:
|
||||
def _get_counts_desc(self):
|
||||
return (
|
||||
"(reads {0}, writes {1})".format(self._reads, self._writes) if tty.is_verbose() else ""
|
||||
)
|
||||
|
||||
def _log_acquired(self, locktype, wait_time, nattempts) -> None:
|
||||
def _log_acquired(self, locktype, wait_time, nattempts):
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = "Acquired at %s" % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg(locktype, "{0}{1}".format(desc, attempts_part)))
|
||||
|
||||
def _log_acquiring(self, locktype) -> None:
|
||||
def _log_acquiring(self, locktype):
|
||||
self._log_debug(self._status_msg(locktype, "Acquiring"), level=3)
|
||||
|
||||
def _log_debug(self, *args, **kwargs) -> None:
|
||||
def _log_debug(self, *args, **kwargs):
|
||||
"""Output lock debug messages."""
|
||||
kwargs["level"] = kwargs.get("level", 2)
|
||||
tty.debug(*args, **kwargs)
|
||||
|
||||
def _log_downgraded(self, wait_time, nattempts) -> None:
|
||||
def _log_downgraded(self, wait_time, nattempts):
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = "Downgraded at %s" % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg("READ LOCK", "{0}{1}".format(desc, attempts_part)))
|
||||
|
||||
def _log_downgrading(self) -> None:
|
||||
def _log_downgrading(self):
|
||||
self._log_debug(self._status_msg("WRITE LOCK", "Downgrading"), level=3)
|
||||
|
||||
def _log_released(self, locktype) -> None:
|
||||
def _log_released(self, locktype):
|
||||
now = datetime.now()
|
||||
desc = "Released at %s" % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg(locktype, desc))
|
||||
|
||||
def _log_releasing(self, locktype) -> None:
|
||||
def _log_releasing(self, locktype):
|
||||
self._log_debug(self._status_msg(locktype, "Releasing"), level=3)
|
||||
|
||||
def _log_upgraded(self, wait_time, nattempts) -> None:
|
||||
def _log_upgraded(self, wait_time, nattempts):
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = "Upgraded at %s" % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg("WRITE LOCK", "{0}{1}".format(desc, attempts_part)))
|
||||
|
||||
def _log_upgrading(self) -> None:
|
||||
def _log_upgrading(self):
|
||||
self._log_debug(self._status_msg("READ LOCK", "Upgrading"), level=3)
|
||||
|
||||
def _status_msg(self, locktype: str, status: str) -> str:
|
||||
def _status_msg(self, locktype, status):
|
||||
status_desc = "[{0}] {1}".format(status, self._get_counts_desc())
|
||||
return "{0}{1.desc}: {1.path}[{1._start}:{1._length}] {2}".format(
|
||||
locktype, self, status_desc
|
||||
)
|
||||
|
||||
|
||||
class LockTransaction:
|
||||
class LockTransaction(object):
|
||||
"""Simple nested transaction context manager that uses a file lock.
|
||||
|
||||
Arguments:
|
||||
@@ -734,13 +709,7 @@ class LockTransaction:
|
||||
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
lock: Lock,
|
||||
acquire: Union[ReleaseFnType, ContextManager] = None,
|
||||
release: Union[ReleaseFnType, ContextManager] = None,
|
||||
timeout: Optional[float] = None,
|
||||
) -> None:
|
||||
def __init__(self, lock, acquire=None, release=None, timeout=None):
|
||||
self._lock = lock
|
||||
self._timeout = timeout
|
||||
self._acquire_fn = acquire
|
||||
@@ -755,20 +724,15 @@ def __enter__(self):
|
||||
else:
|
||||
return self._as
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_value: Optional[BaseException],
|
||||
traceback: Optional[TracebackType],
|
||||
) -> bool:
|
||||
def __exit__(self, type, value, traceback):
|
||||
suppress = False
|
||||
|
||||
def release_fn():
|
||||
if self._release_fn is not None:
|
||||
return self._release_fn(exc_type, exc_value, traceback)
|
||||
return self._release_fn(type, value, traceback)
|
||||
|
||||
if self._as and hasattr(self._as, "__exit__"):
|
||||
if self._as.__exit__(exc_type, exc_value, traceback):
|
||||
if self._as.__exit__(type, value, traceback):
|
||||
suppress = True
|
||||
|
||||
if self._exit(release_fn):
|
||||
@@ -776,12 +740,6 @@ def release_fn():
|
||||
|
||||
return suppress
|
||||
|
||||
def _enter(self) -> bool:
|
||||
return NotImplemented
|
||||
|
||||
def _exit(self, release_fn: ReleaseFnType) -> bool:
|
||||
return NotImplemented
|
||||
|
||||
|
||||
class ReadTransaction(LockTransaction):
|
||||
"""LockTransaction context manager that does a read and releases it."""
|
||||
@@ -812,7 +770,7 @@ class LockDowngradeError(LockError):
|
||||
|
||||
def __init__(self, path):
|
||||
msg = "Cannot downgrade lock from write to read on file: %s" % path
|
||||
super().__init__(msg)
|
||||
super(LockDowngradeError, self).__init__(msg)
|
||||
|
||||
|
||||
class LockLimitError(LockError):
|
||||
@@ -824,10 +782,10 @@ class LockTimeoutError(LockError):
|
||||
|
||||
def __init__(self, lock_type, path, time, attempts):
|
||||
fmt = "Timed out waiting for a {} lock after {}.\n Made {} {} on file: {}"
|
||||
super().__init__(
|
||||
super(LockTimeoutError, self).__init__(
|
||||
fmt.format(
|
||||
lock_type,
|
||||
lang.pretty_seconds(time),
|
||||
pretty_seconds(time),
|
||||
attempts,
|
||||
"attempt" if attempts == 1 else "attempts",
|
||||
path,
|
||||
@@ -840,7 +798,7 @@ class LockUpgradeError(LockError):
|
||||
|
||||
def __init__(self, path):
|
||||
msg = "Cannot upgrade lock from read to write on file: %s" % path
|
||||
super().__init__(msg)
|
||||
super(LockUpgradeError, self).__init__(msg)
|
||||
|
||||
|
||||
class LockPermissionError(LockError):
|
||||
@@ -852,7 +810,7 @@ class LockROFileError(LockPermissionError):
|
||||
|
||||
def __init__(self, path):
|
||||
msg = "Can't take write lock on read-only file: %s" % path
|
||||
super().__init__(msg)
|
||||
super(LockROFileError, self).__init__(msg)
|
||||
|
||||
|
||||
class CantCreateLockError(LockPermissionError):
|
||||
@@ -861,4 +819,4 @@ class CantCreateLockError(LockPermissionError):
|
||||
def __init__(self, path):
|
||||
msg = "cannot create lock '%s': " % path
|
||||
msg += "file does not exist and location is not writable"
|
||||
super().__init__(msg)
|
||||
super(LockError, self).__init__(msg)
|
||||
|
@@ -2,189 +2,77 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import errno
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from os.path import exists, join
|
||||
|
||||
from llnl.util import lang, tty
|
||||
|
||||
from ..path import system_path_filter
|
||||
from llnl.util import lang
|
||||
|
||||
if sys.platform == "win32":
|
||||
from win32file import CreateHardLink
|
||||
|
||||
is_windows = sys.platform == "win32"
|
||||
|
||||
|
||||
def symlink(source_path: str, link_path: str, allow_broken_symlinks: bool = not is_windows):
|
||||
def symlink(real_path, link_path):
|
||||
"""
|
||||
Create a link.
|
||||
Create a symbolic link.
|
||||
|
||||
On non-Windows and Windows with System Administrator
|
||||
privleges this will be a normal symbolic link via
|
||||
os.symlink.
|
||||
|
||||
On Windows without privledges the link will be a
|
||||
junction for a directory and a hardlink for a file.
|
||||
On Windows the various link types are:
|
||||
|
||||
Symbolic Link: A link to a file or directory on the
|
||||
same or different volume (drive letter) or even to
|
||||
a remote file or directory (using UNC in its path).
|
||||
Need System Administrator privileges to make these.
|
||||
|
||||
Hard Link: A link to a file on the same volume (drive
|
||||
letter) only. Every file (file's data) has at least 1
|
||||
hard link (file's name). But when this method creates
|
||||
a new hard link there will be 2. Deleting all hard
|
||||
links effectively deletes the file. Don't need System
|
||||
Administrator privileges.
|
||||
|
||||
Junction: A link to a directory on the same or different
|
||||
volume (drive letter) but not to a remote directory. Don't
|
||||
need System Administrator privileges.
|
||||
|
||||
Parameters:
|
||||
source_path (str): The real file or directory that the link points to.
|
||||
Must be absolute OR relative to the link.
|
||||
link_path (str): The path where the link will exist.
|
||||
allow_broken_symlinks (bool): On Linux or Mac, don't raise an exception if the source_path
|
||||
doesn't exist. This will still raise an exception on Windows.
|
||||
On Windows, use junctions if os.symlink fails.
|
||||
"""
|
||||
source_path = os.path.normpath(source_path)
|
||||
win_source_path = source_path
|
||||
link_path = os.path.normpath(link_path)
|
||||
|
||||
# Never allow broken links on Windows.
|
||||
if sys.platform == "win32" and allow_broken_symlinks:
|
||||
raise ValueError("allow_broken_symlinks parameter cannot be True on Windows.")
|
||||
|
||||
if not allow_broken_symlinks:
|
||||
# Perform basic checks to make sure symlinking will succeed
|
||||
if os.path.lexists(link_path):
|
||||
raise AlreadyExistsError(
|
||||
f"Link path ({link_path}) already exists. Cannot create link."
|
||||
)
|
||||
|
||||
if not os.path.exists(source_path):
|
||||
if os.path.isabs(source_path) and not allow_broken_symlinks:
|
||||
# An absolute source path that does not exist will result in a broken link.
|
||||
raise SymlinkError(
|
||||
f"Source path ({source_path}) is absolute but does not exist. Resulting "
|
||||
f"link would be broken so not making link."
|
||||
)
|
||||
else:
|
||||
# os.symlink can create a link when the given source path is relative to
|
||||
# the link path. Emulate this behavior and check to see if the source exists
|
||||
# relative to the link path ahead of link creation to prevent broken
|
||||
# links from being made.
|
||||
link_parent_dir = os.path.dirname(link_path)
|
||||
relative_path = os.path.join(link_parent_dir, source_path)
|
||||
if os.path.exists(relative_path):
|
||||
# In order to work on windows, the source path needs to be modified to be
|
||||
# relative because hardlink/junction dont resolve relative paths the same
|
||||
# way as os.symlink. This is ignored on other operating systems.
|
||||
win_source_path = relative_path
|
||||
elif not allow_broken_symlinks:
|
||||
raise SymlinkError(
|
||||
f"The source path ({source_path}) is not relative to the link path "
|
||||
f"({link_path}). Resulting link would be broken so not making link."
|
||||
)
|
||||
|
||||
# Create the symlink
|
||||
if sys.platform == "win32" and not _windows_can_symlink():
|
||||
_windows_create_link(win_source_path, link_path)
|
||||
if sys.platform != "win32":
|
||||
os.symlink(real_path, link_path)
|
||||
elif _win32_can_symlink():
|
||||
# Windows requires target_is_directory=True when the target is a dir.
|
||||
os.symlink(real_path, link_path, target_is_directory=os.path.isdir(real_path))
|
||||
else:
|
||||
os.symlink(source_path, link_path, target_is_directory=os.path.isdir(source_path))
|
||||
try:
|
||||
# Try to use junctions
|
||||
_win32_junction(real_path, link_path)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
# EEXIST error indicates that file we're trying to "link"
|
||||
# is already present, don't bother trying to copy which will also fail
|
||||
# just raise
|
||||
raise
|
||||
else:
|
||||
# If all else fails, fall back to copying files
|
||||
shutil.copyfile(real_path, link_path)
|
||||
|
||||
|
||||
def islink(path: str) -> bool:
|
||||
"""Override os.islink to give correct answer for spack logic.
|
||||
|
||||
For Non-Windows: a link can be determined with the os.path.islink method.
|
||||
Windows-only methods will return false for other operating systems.
|
||||
|
||||
For Windows: spack considers symlinks, hard links, and junctions to
|
||||
all be links, so if any of those are True, return True.
|
||||
|
||||
Args:
|
||||
path (str): path to check if it is a link.
|
||||
|
||||
Returns:
|
||||
bool - whether the path is any kind link or not.
|
||||
"""
|
||||
return any([os.path.islink(path), _windows_is_junction(path), _windows_is_hardlink(path)])
|
||||
def islink(path):
|
||||
return os.path.islink(path) or _win32_is_junction(path)
|
||||
|
||||
|
||||
def _windows_is_hardlink(path: str) -> bool:
|
||||
"""Determines if a path is a windows hard link. This is accomplished
|
||||
by looking at the number of links using os.stat. A non-hard-linked file
|
||||
will have a st_nlink value of 1, whereas a hard link will have a value
|
||||
larger than 1. Note that both the original and hard-linked file will
|
||||
return True because they share the same inode.
|
||||
# '_win32' functions based on
|
||||
# https://github.com/Erotemic/ubelt/blob/master/ubelt/util_links.py
|
||||
def _win32_junction(path, link):
|
||||
# junctions require absolute paths
|
||||
if not os.path.isabs(link):
|
||||
link = os.path.abspath(link)
|
||||
|
||||
Args:
|
||||
path (str): Windows path to check for a hard link
|
||||
# os.symlink will fail if link exists, emulate the behavior here
|
||||
if exists(link):
|
||||
raise OSError(errno.EEXIST, "File exists: %s -> %s" % (link, path))
|
||||
|
||||
Returns:
|
||||
bool - Whether the path is a hard link or not.
|
||||
"""
|
||||
if sys.platform != "win32" or os.path.islink(path) or not os.path.exists(path):
|
||||
return False
|
||||
if not os.path.isabs(path):
|
||||
parent = os.path.join(link, os.pardir)
|
||||
path = os.path.join(parent, path)
|
||||
path = os.path.abspath(path)
|
||||
|
||||
return os.stat(path).st_nlink > 1
|
||||
|
||||
|
||||
def _windows_is_junction(path: str) -> bool:
|
||||
"""Determines if a path is a windows junction. A junction can be
|
||||
determined using a bitwise AND operation between the file's
|
||||
attribute bitmask and the known junction bitmask (0x400).
|
||||
|
||||
Args:
|
||||
path (str): A non-file path
|
||||
|
||||
Returns:
|
||||
bool - whether the path is a junction or not.
|
||||
"""
|
||||
if sys.platform != "win32" or os.path.islink(path) or os.path.isfile(path):
|
||||
return False
|
||||
|
||||
import ctypes.wintypes
|
||||
|
||||
get_file_attributes = ctypes.windll.kernel32.GetFileAttributesW # type: ignore[attr-defined]
|
||||
get_file_attributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
||||
get_file_attributes.restype = ctypes.wintypes.DWORD
|
||||
|
||||
invalid_file_attributes = 0xFFFFFFFF
|
||||
reparse_point = 0x400
|
||||
file_attr = get_file_attributes(str(path))
|
||||
|
||||
if file_attr == invalid_file_attributes:
|
||||
return False
|
||||
|
||||
return file_attr & reparse_point > 0
|
||||
CreateHardLink(link, path)
|
||||
|
||||
|
||||
@lang.memoized
|
||||
def _windows_can_symlink() -> bool:
|
||||
"""
|
||||
Determines if windows is able to make a symlink depending on
|
||||
the system configuration and the level of the user's permissions.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
tty.warn("windows_can_symlink method can't be used on non-Windows OS.")
|
||||
return False
|
||||
|
||||
def _win32_can_symlink():
|
||||
tempdir = tempfile.mkdtemp()
|
||||
|
||||
dpath = os.path.join(tempdir, "dpath")
|
||||
fpath = os.path.join(tempdir, "fpath.txt")
|
||||
dpath = join(tempdir, "dpath")
|
||||
fpath = join(tempdir, "fpath.txt")
|
||||
|
||||
dlink = os.path.join(tempdir, "dlink")
|
||||
flink = os.path.join(tempdir, "flink.txt")
|
||||
dlink = join(tempdir, "dlink")
|
||||
flink = join(tempdir, "flink.txt")
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
@@ -208,140 +96,24 @@ def _windows_can_symlink() -> bool:
|
||||
return can_symlink_directories and can_symlink_files
|
||||
|
||||
|
||||
def _windows_create_link(source: str, link: str):
|
||||
def _win32_is_junction(path):
|
||||
"""
|
||||
Attempts to create a Hard Link or Junction as an alternative
|
||||
to a symbolic link. This is called when symbolic links cannot
|
||||
be created.
|
||||
Determines if a path is a win32 junction
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("windows_create_link method can't be used on non-Windows OS.")
|
||||
elif os.path.isdir(source):
|
||||
_windows_create_junction(source=source, link=link)
|
||||
elif os.path.isfile(source):
|
||||
_windows_create_hard_link(path=source, link=link)
|
||||
else:
|
||||
raise SymlinkError(
|
||||
f"Cannot create link from {source}. It is neither a file nor a directory."
|
||||
)
|
||||
if os.path.islink(path):
|
||||
return False
|
||||
|
||||
if sys.platform == "win32":
|
||||
import ctypes.wintypes
|
||||
|
||||
def _windows_create_junction(source: str, link: str):
|
||||
"""Duly verify that the path and link are eligible to create a junction,
|
||||
then create the junction.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("windows_create_junction method can't be used on non-Windows OS.")
|
||||
elif not os.path.exists(source):
|
||||
raise SymlinkError("Source path does not exist, cannot create a junction.")
|
||||
elif os.path.lexists(link):
|
||||
raise AlreadyExistsError("Link path already exists, cannot create a junction.")
|
||||
elif not os.path.isdir(source):
|
||||
raise SymlinkError("Source path is not a directory, cannot create a junction.")
|
||||
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW
|
||||
GetFileAttributes.argtypes = (ctypes.wintypes.LPWSTR,)
|
||||
GetFileAttributes.restype = ctypes.wintypes.DWORD
|
||||
|
||||
import subprocess
|
||||
INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
|
||||
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
|
||||
|
||||
cmd = ["cmd", "/C", "mklink", "/J", link, source]
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
out, err = proc.communicate()
|
||||
tty.debug(out.decode())
|
||||
if proc.returncode != 0:
|
||||
err = err.decode()
|
||||
tty.error(err)
|
||||
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
||||
res = GetFileAttributes(path)
|
||||
return res != INVALID_FILE_ATTRIBUTES and bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
|
||||
|
||||
|
||||
def _windows_create_hard_link(path: str, link: str):
|
||||
"""Duly verify that the path and link are eligible to create a hard
|
||||
link, then create the hard link.
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("windows_create_hard_link method can't be used on non-Windows OS.")
|
||||
elif not os.path.exists(path):
|
||||
raise SymlinkError(f"File path {path} does not exist. Cannot create hard link.")
|
||||
elif os.path.lexists(link):
|
||||
raise AlreadyExistsError(f"Link path ({link}) already exists. Cannot create hard link.")
|
||||
elif not os.path.isfile(path):
|
||||
raise SymlinkError(f"File path ({link}) is not a file. Cannot create hard link.")
|
||||
else:
|
||||
tty.debug(f"Creating hard link {link} pointing to {path}")
|
||||
CreateHardLink(link, path)
|
||||
|
||||
|
||||
def readlink(path: str):
|
||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||
if _windows_is_hardlink(path):
|
||||
return _windows_read_hard_link(path)
|
||||
elif _windows_is_junction(path):
|
||||
return _windows_read_junction(path)
|
||||
else:
|
||||
return os.readlink(path)
|
||||
|
||||
|
||||
def _windows_read_hard_link(link: str) -> str:
|
||||
"""Find all of the files that point to the same inode as the link"""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("Can't read hard link on non-Windows OS.")
|
||||
link = os.path.abspath(link)
|
||||
fsutil_cmd = ["fsutil", "hardlink", "list", link]
|
||||
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
out, err = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise SymlinkError(f"An error occurred while reading hard link: {err.decode()}")
|
||||
|
||||
# fsutil response does not include the drive name, so append it back to each linked file.
|
||||
drive, link_tail = os.path.splitdrive(os.path.abspath(link))
|
||||
links = set([os.path.join(drive, p) for p in out.decode().splitlines()])
|
||||
links.remove(link)
|
||||
if len(links) == 1:
|
||||
return links.pop()
|
||||
elif len(links) > 1:
|
||||
# TODO: How best to handle the case where 3 or more paths point to a single inode?
|
||||
raise SymlinkError(f"Found multiple paths pointing to the same inode {links}")
|
||||
else:
|
||||
raise SymlinkError("Cannot determine hard link source path.")
|
||||
|
||||
|
||||
def _windows_read_junction(link: str):
|
||||
"""Find the path that a junction points to."""
|
||||
if sys.platform != "win32":
|
||||
raise SymlinkError("Can't read junction on non-Windows OS.")
|
||||
|
||||
link = os.path.abspath(link)
|
||||
link_basename = os.path.basename(link)
|
||||
link_parent = os.path.dirname(link)
|
||||
fsutil_cmd = ["dir", "/a:l", link_parent]
|
||||
proc = subprocess.Popen(fsutil_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
out, err = proc.communicate()
|
||||
if proc.returncode != 0:
|
||||
raise SymlinkError(f"An error occurred while reading junction: {err.decode()}")
|
||||
matches = re.search(rf"<JUNCTION>\s+{link_basename} \[(.*)]", out.decode())
|
||||
if matches:
|
||||
return matches.group(1)
|
||||
else:
|
||||
raise SymlinkError("Could not find junction path.")
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def resolve_link_target_relative_to_the_link(link):
|
||||
"""
|
||||
os.path.isdir uses os.path.exists, which for links will check
|
||||
the existence of the link target. If the link target is relative to
|
||||
the link, we need to construct a pathname that is valid from
|
||||
our cwd (which may not be the same as the link's directory)
|
||||
"""
|
||||
target = readlink(link)
|
||||
if os.path.isabs(target):
|
||||
return target
|
||||
link_dir = os.path.dirname(os.path.abspath(link))
|
||||
return os.path.join(link_dir, target)
|
||||
|
||||
|
||||
class SymlinkError(RuntimeError):
|
||||
"""Exception class for errors raised while creating symlinks,
|
||||
junctions and hard links
|
||||
"""
|
||||
|
||||
|
||||
class AlreadyExistsError(SymlinkError):
|
||||
"""Link path already exists."""
|
||||
return False
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
import os
|
||||
@@ -12,7 +14,6 @@
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from sys import platform as _platform
|
||||
from typing import NoReturn
|
||||
|
||||
if _platform != "win32":
|
||||
import fcntl
|
||||
@@ -211,7 +212,6 @@ def info(message, *args, **kwargs):
|
||||
stream.write(line + "\n")
|
||||
else:
|
||||
stream.write(indent + _output_filter(str(arg)) + "\n")
|
||||
stream.flush()
|
||||
|
||||
|
||||
def verbose(message, *args, **kwargs):
|
||||
@@ -246,7 +246,7 @@ def warn(message, *args, **kwargs):
|
||||
info("Warning: " + str(message), *args, **kwargs)
|
||||
|
||||
|
||||
def die(message, *args, **kwargs) -> NoReturn:
|
||||
def die(message, *args, **kwargs):
|
||||
kwargs.setdefault("countback", 4)
|
||||
error(message, *args, **kwargs)
|
||||
sys.exit(1)
|
||||
|
@@ -6,6 +6,8 @@
|
||||
"""
|
||||
Routines for printing columnar output. See ``colify()`` for more information.
|
||||
"""
|
||||
from __future__ import division, unicode_literals
|
||||
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
|
@@ -59,6 +59,8 @@
|
||||
|
||||
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import re
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
@@ -68,7 +70,7 @@ class ColorParseError(Exception):
|
||||
"""Raised when a color format fails to parse."""
|
||||
|
||||
def __init__(self, message):
|
||||
super().__init__(message)
|
||||
super(ColorParseError, self).__init__(message)
|
||||
|
||||
|
||||
# Text styles for ansi codes
|
||||
@@ -203,7 +205,7 @@ def color_when(value):
|
||||
set_color_when(old_value)
|
||||
|
||||
|
||||
class match_to_ansi:
|
||||
class match_to_ansi(object):
|
||||
def __init__(self, color=True, enclose=False):
|
||||
self.color = _color_when_value(color)
|
||||
self.enclose = enclose
|
||||
@@ -319,7 +321,7 @@ def cescape(string):
|
||||
return string
|
||||
|
||||
|
||||
class ColorStream:
|
||||
class ColorStream(object):
|
||||
def __init__(self, stream, color=None):
|
||||
self._stream = stream
|
||||
self._color = color
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
"""Utility classes for logging the output of blocks of code.
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import atexit
|
||||
import ctypes
|
||||
import errno
|
||||
@@ -65,7 +67,7 @@ def _strip(line):
|
||||
return _escape.sub("", line)
|
||||
|
||||
|
||||
class keyboard_input:
|
||||
class keyboard_input(object):
|
||||
"""Context manager to disable line editing and echoing.
|
||||
|
||||
Use this with ``sys.stdin`` for keyboard input, e.g.::
|
||||
@@ -242,7 +244,7 @@ def __exit__(self, exc_type, exception, traceback):
|
||||
signal.signal(signum, old_handler)
|
||||
|
||||
|
||||
class Unbuffered:
|
||||
class Unbuffered(object):
|
||||
"""Wrapper for Python streams that forces them to be unbuffered.
|
||||
|
||||
This is implemented by forcing a flush after each write.
|
||||
@@ -287,7 +289,7 @@ def _file_descriptors_work(*streams):
|
||||
return False
|
||||
|
||||
|
||||
class FileWrapper:
|
||||
class FileWrapper(object):
|
||||
"""Represents a file. Can be an open stream, a path to a file (not opened
|
||||
yet), or neither. When unwrapped, it returns an open file (or file-like)
|
||||
object.
|
||||
@@ -329,7 +331,7 @@ def close(self):
|
||||
self.file.close()
|
||||
|
||||
|
||||
class MultiProcessFd:
|
||||
class MultiProcessFd(object):
|
||||
"""Return an object which stores a file descriptor and can be passed as an
|
||||
argument to a function run with ``multiprocessing.Process``, such that
|
||||
the file descriptor is available in the subprocess."""
|
||||
@@ -429,7 +431,7 @@ def log_output(*args, **kwargs):
|
||||
return nixlog(*args, **kwargs)
|
||||
|
||||
|
||||
class nixlog:
|
||||
class nixlog(object):
|
||||
"""
|
||||
Under the hood, we spawn a daemon and set up a pipe between this
|
||||
process and the daemon. The daemon writes our output to both the
|
||||
@@ -750,7 +752,7 @@ def close(self):
|
||||
os.close(self.saved_stream)
|
||||
|
||||
|
||||
class winlog:
|
||||
class winlog(object):
|
||||
"""
|
||||
Similar to nixlog, with underlying
|
||||
functionality ported to support Windows.
|
||||
@@ -780,7 +782,7 @@ def __enter__(self):
|
||||
raise RuntimeError("file argument must be set by __init__ ")
|
||||
|
||||
# Open both write and reading on logfile
|
||||
if isinstance(self.logfile, io.StringIO):
|
||||
if type(self.logfile) == io.StringIO:
|
||||
self._ioflag = True
|
||||
# cannot have two streams on tempfile, so we must make our own
|
||||
sys.stdout = self.logfile
|
||||
|
@@ -13,6 +13,8 @@
|
||||
|
||||
Note: The functionality in this module is unsupported on Windows
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
@@ -34,7 +36,7 @@
|
||||
pass
|
||||
|
||||
|
||||
class ProcessController:
|
||||
class ProcessController(object):
|
||||
"""Wrapper around some fundamental process control operations.
|
||||
|
||||
This allows one process (the controller) to drive another (the
|
||||
@@ -155,7 +157,7 @@ def wait_running(self):
|
||||
self.wait(lambda: "T" not in self.proc_status())
|
||||
|
||||
|
||||
class PseudoShell:
|
||||
class PseudoShell(object):
|
||||
"""Sets up controller and minion processes with a PTY.
|
||||
|
||||
You can create a ``PseudoShell`` if you want to test how some
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.22.0.dev0"
|
||||
__version__ = "0.21.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -8,18 +8,16 @@
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack.spec
|
||||
import spack.version
|
||||
from spack.compilers.clang import Clang
|
||||
from spack.spec import CompilerSpec
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
class ABI:
|
||||
class ABI(object):
|
||||
"""This class provides methods to test ABI compatibility between specs.
|
||||
The current implementation is rather rough and could be improved."""
|
||||
|
||||
def architecture_compatible(
|
||||
self, target: spack.spec.Spec, constraint: spack.spec.Spec
|
||||
) -> bool:
|
||||
def architecture_compatible(self, target, constraint):
|
||||
"""Return true if architecture of target spec is ABI compatible
|
||||
to the architecture of constraint spec. If either the target
|
||||
or constraint specs have no architecture, target is also defined
|
||||
@@ -36,7 +34,7 @@ def _gcc_get_libstdcxx_version(self, version):
|
||||
a compiler's libstdc++ or libgcc_s"""
|
||||
from spack.build_environment import dso_suffix
|
||||
|
||||
spec = spack.spec.CompilerSpec("gcc", version)
|
||||
spec = CompilerSpec("gcc", version)
|
||||
compilers = spack.compilers.compilers_for_spec(spec)
|
||||
if not compilers:
|
||||
return None
|
||||
@@ -79,20 +77,16 @@ def _gcc_compiler_compare(self, pversion, cversion):
|
||||
return False
|
||||
return plib == clib
|
||||
|
||||
def _intel_compiler_compare(
|
||||
self, pversion: spack.version.ClosedOpenRange, cversion: spack.version.ClosedOpenRange
|
||||
) -> bool:
|
||||
def _intel_compiler_compare(self, pversion, cversion):
|
||||
"""Returns true iff the intel version pversion and cversion
|
||||
are ABI compatible"""
|
||||
|
||||
# Test major and minor versions. Ignore build version.
|
||||
pv = pversion.lo
|
||||
cv = cversion.lo
|
||||
return pv.up_to(2) == cv.up_to(2)
|
||||
if len(pversion.version) < 2 or len(cversion.version) < 2:
|
||||
return False
|
||||
return pversion.version[:2] == cversion.version[:2]
|
||||
|
||||
def compiler_compatible(
|
||||
self, parent: spack.spec.Spec, child: spack.spec.Spec, loose: bool = False
|
||||
) -> bool:
|
||||
def compiler_compatible(self, parent, child, **kwargs):
|
||||
"""Return true if compilers for parent and child are ABI compatible."""
|
||||
if not parent.compiler or not child.compiler:
|
||||
return True
|
||||
@@ -101,7 +95,7 @@ def compiler_compatible(
|
||||
# Different compiler families are assumed ABI incompatible
|
||||
return False
|
||||
|
||||
if loose:
|
||||
if kwargs.get("loose", False):
|
||||
return True
|
||||
|
||||
# TODO: Can we move the specialized ABI matching stuff
|
||||
@@ -122,10 +116,9 @@ def compiler_compatible(
|
||||
return True
|
||||
return False
|
||||
|
||||
def compatible(
|
||||
self, target: spack.spec.Spec, constraint: spack.spec.Spec, loose: bool = False
|
||||
) -> bool:
|
||||
def compatible(self, target, constraint, **kwargs):
|
||||
"""Returns true if target spec is ABI compatible to constraint spec"""
|
||||
loosematch = kwargs.get("loose", False)
|
||||
return self.architecture_compatible(target, constraint) and self.compiler_compatible(
|
||||
target, constraint, loose=loose
|
||||
target, constraint, loose=loosematch
|
||||
)
|
||||
|
@@ -38,14 +38,10 @@ def _search_duplicate_compilers(error_cls):
|
||||
import ast
|
||||
import collections
|
||||
import collections.abc
|
||||
import glob
|
||||
import inspect
|
||||
import io
|
||||
import itertools
|
||||
import pathlib
|
||||
import pickle
|
||||
import re
|
||||
import warnings
|
||||
from urllib.request import urlopen
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -55,7 +51,6 @@ def _search_duplicate_compilers(error_cls):
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.crypto
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.variant
|
||||
|
||||
#: Map an audit tag to a list of callables implementing checks
|
||||
@@ -65,7 +60,7 @@ def _search_duplicate_compilers(error_cls):
|
||||
GROUPS = collections.defaultdict(list)
|
||||
|
||||
|
||||
class Error:
|
||||
class Error(object):
|
||||
"""Information on an error reported in a test."""
|
||||
|
||||
def __init__(self, summary, details):
|
||||
@@ -252,88 +247,6 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _deprecated_preferences(error_cls):
|
||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.22)"""
|
||||
# TODO (v0.22): remove this audit as the attributes will not be allowed in config
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
|
||||
def make_error(attribute_name, config_data, summary):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
dict_view = syaml.syaml_dict((k, v) for k, v in config_data.items() if k == attribute_name)
|
||||
syaml.dump_config(dict_view, stream=s, blame=True)
|
||||
return error_cls(summary=summary, details=[s.getvalue()])
|
||||
|
||||
if "all" in packages_yaml and "version" in packages_yaml["all"]:
|
||||
summary = "Using the deprecated 'version' attribute under 'packages:all'"
|
||||
errors.append(make_error("version", packages_yaml["all"], summary))
|
||||
|
||||
for package_name in packages_yaml:
|
||||
if package_name == "all":
|
||||
continue
|
||||
|
||||
package_conf = packages_yaml[package_name]
|
||||
for attribute in ("compiler", "providers", "target"):
|
||||
if attribute not in package_conf:
|
||||
continue
|
||||
summary = (
|
||||
f"Using the deprecated '{attribute}' attribute " f"under 'packages:{package_name}'"
|
||||
)
|
||||
errors.append(make_error(attribute, package_conf, summary))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _avoid_mismatched_variants(error_cls):
|
||||
"""Warns if variant preferences have mismatched types or names."""
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
|
||||
def make_error(config_data, summary):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
syaml.dump_config(config_data, stream=s, blame=True)
|
||||
return error_cls(summary=summary, details=[s.getvalue()])
|
||||
|
||||
for pkg_name in packages_yaml:
|
||||
# 'all:' must be more forgiving, since it is setting defaults for everything
|
||||
if pkg_name == "all" or "variants" not in packages_yaml[pkg_name]:
|
||||
continue
|
||||
|
||||
preferences = packages_yaml[pkg_name]["variants"]
|
||||
if not isinstance(preferences, list):
|
||||
preferences = [preferences]
|
||||
|
||||
for variants in preferences:
|
||||
current_spec = spack.spec.Spec(variants)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant in current_spec.variants.values():
|
||||
# Variant does not exist at all
|
||||
if variant.name not in pkg_cls.variants:
|
||||
summary = (
|
||||
f"Setting a preference for the '{pkg_name}' package to the "
|
||||
f"non-existing variant '{variant.name}'"
|
||||
)
|
||||
errors.append(make_error(preferences, summary))
|
||||
continue
|
||||
|
||||
# Variant cannot accept this value
|
||||
s = spack.spec.Spec(pkg_name)
|
||||
try:
|
||||
s.update_variant_validate(variant.name, variant.value)
|
||||
except Exception:
|
||||
summary = (
|
||||
f"Setting the variant '{variant.name}' of the '{pkg_name}' package "
|
||||
f"to the invalid value '{str(variant)}'"
|
||||
)
|
||||
errors.append(make_error(preferences, summary))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
#: Sanity checks on package directives
|
||||
package_directives = AuditClass(
|
||||
group="packages",
|
||||
@@ -373,7 +286,7 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||
|
||||
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
||||
@@ -391,48 +304,33 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
|
||||
@package_directives
|
||||
def _check_patch_urls(pkgs, error_cls):
|
||||
"""Ensure that patches fetched from GitHub and GitLab have stable sha256
|
||||
hashes."""
|
||||
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
||||
github_patch_url_re = (
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
)
|
||||
# Only .diff URLs have stable/full hashes:
|
||||
# https://forum.gitlab.com/t/patches-with-full-index/29313
|
||||
gitlab_patch_url_re = (
|
||||
r"^https?://(?:.+)?gitlab(?:.+)/"
|
||||
r".+/.+/-/(?:commit|merge_requests)/[a-fA-F0-9]+\.(?:patch|diff)"
|
||||
".+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||
)
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
for condition, patches in pkg_cls.patches.items():
|
||||
for patch in patches:
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
continue
|
||||
|
||||
if re.match(github_patch_url_re, patch.url):
|
||||
full_index_arg = "?full_index=1"
|
||||
if not patch.url.endswith(full_index_arg):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name, full_index_arg
|
||||
),
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
elif re.match(gitlab_patch_url_re, patch.url):
|
||||
if not patch.url.endswith(".diff"):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with .diff".format(
|
||||
pkg_cls.name
|
||||
),
|
||||
[patch.url],
|
||||
)
|
||||
if not re.match(github_patch_url_re, patch.url):
|
||||
continue
|
||||
|
||||
full_index_arg = "?full_index=1"
|
||||
if not patch.url.endswith(full_index_arg):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name, full_index_arg
|
||||
),
|
||||
[patch.url],
|
||||
)
|
||||
)
|
||||
|
||||
return errors
|
||||
|
||||
@@ -444,7 +342,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
name_definitions = collections.defaultdict(list)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
|
||||
for cls_item in inspect.getmro(pkg_cls):
|
||||
for name in RESERVED_NAMES:
|
||||
@@ -485,7 +383,7 @@ def _ensure_packages_are_pickeleable(pkgs, error_cls):
|
||||
"""Ensure that package objects are pickleable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
pickle.dumps(pkg)
|
||||
@@ -526,7 +424,7 @@ def _ensure_all_versions_can_produce_a_fetcher(pkgs, error_cls):
|
||||
"""Ensure all versions in a package can produce a fetcher"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
spack.fetch_strategy.check_pkg_attributes(pkg)
|
||||
@@ -551,7 +449,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
]
|
||||
for pkg_name in pkgs:
|
||||
details = []
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
with open(filename, "r") as package_file:
|
||||
for i, line in enumerate(package_file):
|
||||
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
||||
@@ -563,7 +461,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
error_msg = "Package '{}' contains boilerplate that need to be removed"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), details))
|
||||
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
if not pkg_cls.__doc__:
|
||||
error_msg = "Package '{}' miss a docstring"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), []))
|
||||
@@ -576,7 +474,7 @@ def _ensure_all_packages_use_sha256_checksums(pkgs, error_cls):
|
||||
"""Ensure no packages use md5 checksums"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
if pkg_cls.manual_download:
|
||||
continue
|
||||
|
||||
@@ -613,7 +511,7 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
"""Ensure that methods modifying the build environment are ported to builder classes."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
buildsystem_variant, _ = pkg_cls.variants["build_system"]
|
||||
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
|
||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
|
||||
@@ -640,7 +538,7 @@ def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
|
||||
# Does the homepage have http, and if so, does https work?
|
||||
if pkg_cls.homepage.startswith("http://"):
|
||||
@@ -664,7 +562,7 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
"""Report unknown or wrong variants in directives for this package"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
|
||||
# Check "conflicts" directive
|
||||
for conflict, triggers in pkg_cls.conflicts.items():
|
||||
@@ -730,15 +628,15 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(dependency_name)
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(dependency_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
@@ -777,7 +675,7 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
"""Ensures that variant defaults are present and parsable from cli"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
default_is_parsable = (
|
||||
@@ -811,56 +709,30 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _ensure_variants_have_descriptions(pkgs, error_cls):
|
||||
"""Ensures that all variants have a description."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
if not variant.description:
|
||||
error_msg = "Variant '{}' in package '{}' is missing a description"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
|
||||
"""Report if version constraints used in directives are not satisfiable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
dependencies_to_check = []
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# Skip virtual dependencies for the time being, check on
|
||||
# their versions can be added later
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
dependencies_to_check.extend([edge.spec for edge in dependency_data.values()])
|
||||
|
||||
host_architecture = spack.spec.ArchSpec.default_arch()
|
||||
for s in dependencies_to_check:
|
||||
dependency_pkg_cls = None
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
|
||||
# Some packages have hacks that might cause failures on some platform
|
||||
# Allow to explicitly set conditions to skip version checks in that case
|
||||
skip_conditions = getattr(dependency_pkg_cls, "skip_version_audit", [])
|
||||
skip_version_check = False
|
||||
for condition in skip_conditions:
|
||||
if host_architecture.satisfies(spack.spec.Spec(condition).architecture):
|
||||
skip_version_check = True
|
||||
break
|
||||
assert skip_version_check or any(
|
||||
v.intersects(s.versions) for v in list(dependency_pkg_cls.versions)
|
||||
)
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
assert any(v.intersects(s.versions) for v in list(dependency_pkg_cls.versions))
|
||||
except Exception:
|
||||
summary = (
|
||||
"{0}: dependency on {1} cannot be satisfied by known versions of {1.name}"
|
||||
"{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}"
|
||||
).format(pkg_name, s)
|
||||
details = ["happening in " + filename]
|
||||
if dependency_pkg_cls is not None:
|
||||
@@ -889,7 +761,7 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
except variant_exceptions as e:
|
||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||
summary = summary.format(directive)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
|
||||
error_msg = str(e).strip()
|
||||
if isinstance(e, KeyError):
|
||||
@@ -900,123 +772,3 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
errors.append(err)
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _named_specs_in_when_arguments(pkgs, error_cls):
|
||||
"""Reports named specs in the 'when=' attribute of a directive.
|
||||
|
||||
Note that 'conflicts' is the only directive allowing that.
|
||||
"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
def _extracts_errors(triggers, summary):
|
||||
_errors = []
|
||||
for trigger in list(triggers):
|
||||
when_spec = spack.spec.Spec(trigger)
|
||||
if when_spec.name is not None and when_spec.name != pkg_name:
|
||||
details = [f"using '{trigger}', should be '^{trigger}'"]
|
||||
_errors.append(error_cls(summary=summary, details=details))
|
||||
return _errors
|
||||
|
||||
for dname, triggers in pkg_cls.dependencies.items():
|
||||
summary = f"{pkg_name}: wrong 'when=' condition for the '{dname}' dependency"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
for vname, (variant, triggers) in pkg_cls.variants.items():
|
||||
summary = f"{pkg_name}: wrong 'when=' condition for the '{vname}' variant"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
for provided, triggers in pkg_cls.provided.items():
|
||||
summary = f"{pkg_name}: wrong 'when=' condition for the '{provided}' virtual"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
for _, triggers in pkg_cls.requirements.items():
|
||||
triggers = [when_spec for when_spec, _, _ in triggers]
|
||||
summary = f"{pkg_name}: wrong 'when=' condition in 'requires' directive"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
triggers = list(pkg_cls.patches)
|
||||
summary = f"{pkg_name}: wrong 'when=' condition in 'patch' directives"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
triggers = list(pkg_cls.resources)
|
||||
summary = f"{pkg_name}: wrong 'when=' condition in 'resource' directives"
|
||||
errors.extend(_extracts_errors(triggers, summary))
|
||||
|
||||
return llnl.util.lang.dedupe(errors)
|
||||
|
||||
|
||||
#: Sanity checks on package directives
|
||||
external_detection = AuditClass(
|
||||
group="externals",
|
||||
tag="PKG-EXTERNALS",
|
||||
description="Sanity checks for external software detection",
|
||||
kwargs=("pkgs",),
|
||||
)
|
||||
|
||||
|
||||
def packages_with_detection_tests():
|
||||
"""Return the list of packages with a corresponding detection_test.yaml file."""
|
||||
import spack.config
|
||||
import spack.util.path
|
||||
|
||||
to_be_tested = []
|
||||
for current_repo in spack.repo.PATH.repos:
|
||||
namespace = current_repo.namespace
|
||||
packages_dir = pathlib.PurePath(current_repo.packages_path)
|
||||
pattern = packages_dir / "**" / "detection_test.yaml"
|
||||
pkgs_with_tests = [
|
||||
f"{namespace}.{str(pathlib.PurePath(x).parent.name)}" for x in glob.glob(str(pattern))
|
||||
]
|
||||
to_be_tested.extend(pkgs_with_tests)
|
||||
|
||||
return to_be_tested
|
||||
|
||||
|
||||
@external_detection
|
||||
def _test_detection_by_executable(pkgs, error_cls):
|
||||
"""Test drive external detection for packages"""
|
||||
import spack.detection
|
||||
|
||||
errors = []
|
||||
|
||||
# Filter the packages and retain only the ones with detection tests
|
||||
pkgs_with_tests = packages_with_detection_tests()
|
||||
selected_pkgs = []
|
||||
for current_package in pkgs_with_tests:
|
||||
_, unqualified_name = spack.repo.partition_package_name(current_package)
|
||||
# Check for both unqualified name and qualified name
|
||||
if unqualified_name in pkgs or current_package in pkgs:
|
||||
selected_pkgs.append(current_package)
|
||||
selected_pkgs.sort()
|
||||
|
||||
if not selected_pkgs:
|
||||
summary = "No detection test to run"
|
||||
details = [f' "{p}" has no detection test' for p in pkgs]
|
||||
warnings.warn("\n".join([summary] + details))
|
||||
return errors
|
||||
|
||||
for pkg_name in selected_pkgs:
|
||||
for idx, test_runner in enumerate(
|
||||
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
|
||||
):
|
||||
specs = test_runner.execute()
|
||||
expected_specs = test_runner.expected_specs
|
||||
|
||||
not_detected = set(expected_specs) - set(specs)
|
||||
if not_detected:
|
||||
summary = pkg_name + ": cannot detect some specs"
|
||||
details = [f'"{s}" was not detected [test_id={idx}]' for s in sorted(not_detected)]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
not_expected = set(specs) - set(expected_specs)
|
||||
if not_expected:
|
||||
summary = pkg_name + ": detected unexpected specs"
|
||||
msg = '"{0}" was detected, but was not expected [test_id={1}]'
|
||||
details = [msg.format(s, idx) for s in sorted(not_expected)]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
return errors
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Function and classes needed to bootstrap Spack itself."""
|
||||
|
||||
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
|
||||
from .config import ensure_bootstrap_configuration, is_bootstrapping
|
||||
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||
from .status import status_message
|
||||
@@ -18,5 +18,4 @@
|
||||
"ensure_environment_dependencies",
|
||||
"BootstrapEnvironment",
|
||||
"status_message",
|
||||
"store_path",
|
||||
]
|
||||
|
@@ -50,7 +50,7 @@ def _try_import_from_store(
|
||||
# We have to run as part of this python interpreter
|
||||
query_spec += " ^" + spec_for_current_python()
|
||||
|
||||
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
||||
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
pkg = candidate_spec["python"].package
|
||||
@@ -183,7 +183,7 @@ def _executables_in_store(
|
||||
executables_str = ", ".join(executables)
|
||||
msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'"
|
||||
tty.debug(msg.format(executables_str, query_spec))
|
||||
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
||||
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||
if installed_specs:
|
||||
for concrete_spec in installed_specs:
|
||||
bin_dir = concrete_spec.prefix.bin
|
||||
@@ -213,8 +213,7 @@ def _root_spec(spec_str: str) -> str:
|
||||
if str(spack.platforms.host()) == "darwin":
|
||||
spec_str += " %apple-clang"
|
||||
elif str(spack.platforms.host()) == "windows":
|
||||
# TODO (johnwparent): Remove version constraint when clingo patch is up
|
||||
spec_str += " %msvc@:19.37"
|
||||
spec_str += " %msvc"
|
||||
else:
|
||||
spec_str += " %gcc"
|
||||
|
||||
|
@@ -124,9 +124,9 @@ def _read_and_sanitize_configuration() -> Dict[str, Any]:
|
||||
def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||
config_scopes: MutableSequence["spack.config.ConfigScope"] = [
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS)
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)
|
||||
]
|
||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
@@ -143,28 +143,25 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
def _add_compilers_if_missing() -> None:
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
new_compilers = spack.compilers.find_new_compilers(
|
||||
mixed_toolchain=sys.platform == "darwin"
|
||||
)
|
||||
new_compilers = spack.compilers.find_new_compilers()
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _ensure_bootstrap_configuration() -> Generator:
|
||||
spack.store.ensure_singleton_created()
|
||||
bootstrap_store_path = store_path()
|
||||
user_configuration = _read_and_sanitize_configuration()
|
||||
with spack.environment.no_active_environment():
|
||||
with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
|
||||
spack.platforms.real_host()
|
||||
), spack.repo.use_repositories(spack.paths.packages_path):
|
||||
), spack.repo.use_repositories(spack.paths.packages_path), spack.store.use_store(
|
||||
bootstrap_store_path
|
||||
):
|
||||
# Default configuration scopes excluding command line
|
||||
# and builtin but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes), spack.store.use_store(
|
||||
bootstrap_store_path, extra_data={"padded_length": 0}
|
||||
):
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
# We may need to compile code from sources, so ensure we
|
||||
# have compilers for the current platform
|
||||
_add_compilers_if_missing()
|
||||
|
@@ -214,7 +214,7 @@ def _install_and_test(
|
||||
with spack.config.override(self.mirror_scope):
|
||||
# This index is currently needed to get the compiler used to build some
|
||||
# specs that we know by dag hash.
|
||||
spack.binary_distribution.BINARY_INDEX.regenerate_spec_cache()
|
||||
spack.binary_distribution.binary_index.regenerate_spec_cache()
|
||||
index = spack.binary_distribution.update_cache_and_get_specs()
|
||||
|
||||
if not index:
|
||||
@@ -228,7 +228,7 @@ def _install_and_test(
|
||||
if not abstract_spec.intersects(candidate_spec):
|
||||
continue
|
||||
|
||||
if python_spec is not None and not abstract_spec.intersects(f"^{python_spec}"):
|
||||
if python_spec is not None and python_spec not in abstract_spec:
|
||||
continue
|
||||
|
||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||
@@ -291,10 +291,6 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||
# This is needed to help the old concretizer taking the `setuptools` dependency
|
||||
# only when bootstrapping from sources on Python 3.12
|
||||
if spec_for_current_python() == "python@3.12":
|
||||
concrete_spec.constrain("+force_setuptools")
|
||||
|
||||
if module == "clingo":
|
||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||
@@ -450,11 +446,16 @@ def ensure_executables_in_path_or_raise(
|
||||
current_bootstrapper.last_search["spec"],
|
||||
current_bootstrapper.last_search["command"],
|
||||
)
|
||||
cmd.add_default_envmod(
|
||||
spack.user_environment.environment_modifications_for_specs(
|
||||
concrete_spec, set_package_py_globals=False
|
||||
env_mods = spack.util.environment.EnvironmentModifications()
|
||||
for dep in concrete_spec.traverse(
|
||||
root=True, order="post", deptype=("link", "run")
|
||||
):
|
||||
env_mods.extend(
|
||||
spack.user_environment.environment_modifications_for_spec(
|
||||
dep, set_package_py_globals=False
|
||||
)
|
||||
)
|
||||
)
|
||||
cmd.add_default_envmod(env_mods)
|
||||
return cmd
|
||||
|
||||
assert exception_handler, (
|
||||
@@ -475,22 +476,15 @@ def ensure_executables_in_path_or_raise(
|
||||
def _add_externals_if_missing() -> None:
|
||||
search_list = [
|
||||
# clingo
|
||||
"cmake",
|
||||
"bison",
|
||||
spack.repo.path.get_pkg_class("cmake"),
|
||||
spack.repo.path.get_pkg_class("bison"),
|
||||
# GnuPG
|
||||
"gawk",
|
||||
# develop deps
|
||||
"git",
|
||||
spack.repo.path.get_pkg_class("gawk"),
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append("winbison")
|
||||
externals = spack.detection.by_path(search_list)
|
||||
# System git is typically deprecated, so mark as non-buildable to force it as external
|
||||
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
|
||||
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)
|
||||
spack.detection.update_configuration(
|
||||
non_buildable_externals, scope="bootstrap", buildable=False
|
||||
)
|
||||
search_list.append(spack.repo.path.get_pkg_class("winbison"))
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||
|
||||
|
||||
def clingo_root_spec() -> str:
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user