Compare commits

..

2 Commits

Author SHA1 Message Date
Tom Scogland
d4a607d316 Merge branch 'develop' into load-run-deps 2023-09-29 19:02:09 +01:00
Tom Scogland
da4e825754 cmd/load: only process run deps for load
original patch thanks to @tgamblin
fixes #37704
2023-09-29 19:58:52 +02:00
860 changed files with 7774 additions and 28987 deletions

View File

@@ -22,8 +22,8 @@ jobs:
matrix:
operating_system: ["ubuntu-latest", "macos-latest"]
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
with:
python-version: ${{inputs.python_version}}
- name: Install Python packages

View File

@@ -24,7 +24,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \
cmake bison bison-devel libstdc++-static
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
with:
fetch-depth: 0
- name: Setup non-root user
@@ -42,8 +42,8 @@ jobs:
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack bootstrap disable github-actions-v0.3
spack external find cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
@@ -62,7 +62,7 @@ jobs:
make patch unzip xz-utils python3 python3-dev tree \
cmake bison
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
with:
fetch-depth: 0
- name: Setup non-root user
@@ -80,8 +80,8 @@ jobs:
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack bootstrap disable github-actions-v0.3
spack external find cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
@@ -99,7 +99,7 @@ jobs:
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
with:
fetch-depth: 0
- name: Setup non-root user
@@ -133,7 +133,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \
cmake bison
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
with:
fetch-depth: 0
- name: Setup repo
@@ -145,8 +145,8 @@ jobs:
- name: Bootstrap clingo
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack bootstrap disable github-actions-v0.3
spack external find cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
@@ -158,16 +158,13 @@ jobs:
run: |
brew install cmake bison@2.7 tree
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
with:
python-version: "3.12"
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
- name: Bootstrap clingo
run: |
source share/spack/setup-env.sh
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack bootstrap disable github-actions-v0.3
spack external find --not-buildable cmake bison
spack -d solve zlib
tree ~/.spack/bootstrap/store/
@@ -182,7 +179,7 @@ jobs:
run: |
brew install tree
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
- name: Bootstrap clingo
run: |
set -ex
@@ -207,7 +204,7 @@ jobs:
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
with:
fetch-depth: 0
- name: Setup repo
@@ -250,7 +247,7 @@ jobs:
bzip2 curl file g++ gcc patchelf gfortran git gzip \
make patch unzip xz-utils python3 python3-dev tree
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
with:
fetch-depth: 0
- name: Setup non-root user
@@ -268,7 +265,6 @@ jobs:
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.4
spack bootstrap disable spack-install
spack -d gpg list
tree ~/.spack/bootstrap/store/
@@ -287,7 +283,7 @@ jobs:
make patch unzip xz-utils python3 python3-dev tree \
gawk
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
with:
fetch-depth: 0
- name: Setup non-root user
@@ -306,8 +302,8 @@ jobs:
run: |
source share/spack/setup-env.sh
spack solve zlib
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack bootstrap disable github-actions-v0.3
spack -d gpg list
tree ~/.spack/bootstrap/store/
@@ -320,11 +316,10 @@ jobs:
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
spack bootstrap disable github-actions-v0.4
spack bootstrap disable spack-install
spack -d gpg list
tree ~/.spack/bootstrap/store/
@@ -338,13 +333,13 @@ jobs:
# Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
- name: Bootstrap GnuPG
run: |
source share/spack/setup-env.sh
spack solve zlib
spack bootstrap disable github-actions-v0.5
spack bootstrap disable github-actions-v0.4
spack bootstrap disable github-actions-v0.3
spack -d gpg list
tree ~/.spack/bootstrap/store/

View File

@@ -38,11 +38,12 @@ jobs:
# Meaning of the various items in the matrix list
# 0: Container name (e.g. ubuntu-bionic)
# 1: Platforms to build for
# 2: Base image (e.g. ubuntu:22.04)
# 2: Base image (e.g. ubuntu:18.04)
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
[centos7, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:7'],
[centos-stream, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream'],
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
[ubuntu-bionic, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:18.04'],
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
@@ -55,22 +56,20 @@ jobs:
if: github.repository == 'spack/spack'
steps:
- name: Checkout
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
- uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934
id: docker_meta
with:
images: |
ghcr.io/${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
${{ github.repository_owner }}/${{ matrix.dockerfile[0] }}
tags: |
type=schedule,pattern=nightly
type=schedule,pattern=develop
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=ref,event=branch
type=ref,event=pr
- name: Set Container Tag Normal (Nightly)
run: |
container="${{ matrix.dockerfile[0] }}:latest"
echo "container=${container}" >> $GITHUB_ENV
echo "versioned=${container}" >> $GITHUB_ENV
# On a new release create a container with the same tag as the release.
- name: Set Container Tag on Release
if: github.event_name == 'release'
run: |
versioned="${{matrix.dockerfile[0]}}:${GITHUB_REF##*/}"
echo "versioned=${versioned}" >> $GITHUB_ENV
- name: Generate the Dockerfile
env:
@@ -93,13 +92,13 @@ jobs:
path: dockerfiles
- name: Set up QEMU
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # @v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # @v1
- name: Log in to GitHub Container Registry
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -107,18 +106,21 @@ jobs:
- name: Log in to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # @v2
with:
context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }}
push: ${{ github.event_name != 'pull_request' }}
cache-from: type=gha
cache-to: type=gha,mode=max
tags: ${{ steps.docker_meta.outputs.tags }}
labels: ${{ steps.docker_meta.outputs.labels }}
tags: |
spack/${{ env.container }}
spack/${{ env.versioned }}
ghcr.io/spack/${{ env.container }}
ghcr.io/spack/${{ env.versioned }}

View File

@@ -35,7 +35,7 @@ jobs:
core: ${{ steps.filter.outputs.core }}
packages: ${{ steps.filter.outputs.packages }}
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
if: ${{ github.event_name == 'push' }}
with:
fetch-depth: 0

View File

@@ -14,10 +14,10 @@ jobs:
build-paraview-deps:
runs-on: windows-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
with:
fetch-depth: 0
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
with:
python-version: 3.9
- name: Install Python packages

View File

@@ -1,7 +1,7 @@
black==23.10.1
black==23.9.1
clingo==5.6.2
flake8==6.1.0
isort==5.12.0
mypy==1.6.1
mypy==1.5.1
types-six==1.16.21.9
vermin==1.5.2

View File

@@ -15,7 +15,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest]
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
concretizer: ['clingo']
on_develop:
- ${{ github.ref == 'refs/heads/develop' }}
@@ -45,16 +45,12 @@ jobs:
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
- python-version: '3.11'
os: ubuntu-latest
concretizer: 'clingo'
on_develop: false
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
with:
python-version: ${{ matrix.python-version }}
- name: Install System packages
@@ -98,10 +94,10 @@ jobs:
shell:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
with:
python-version: '3.11'
- name: Install System packages
@@ -137,7 +133,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
- name: Setup repo and non-root user
run: |
git --version
@@ -156,10 +152,10 @@ jobs:
clingo-cffi:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
with:
python-version: '3.11'
- name: Install System packages
@@ -189,12 +185,12 @@ jobs:
runs-on: macos-latest
strategy:
matrix:
python-version: ["3.11"]
python-version: ["3.10"]
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
with:
fetch-depth: 0
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
with:
python-version: ${{ matrix.python-version }}
- name: Install Python packages

View File

@@ -18,8 +18,8 @@ jobs:
validate:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
with:
python-version: '3.11'
cache: 'pip'
@@ -35,10 +35,10 @@ jobs:
style:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
with:
fetch-depth: 0
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
with:
python-version: '3.11'
cache: 'pip'
@@ -69,7 +69,7 @@ jobs:
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # @v2
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
- name: Setup repo and non-root user
run: |
git --version

View File

@@ -15,10 +15,10 @@ jobs:
unit-tests:
runs-on: windows-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
with:
fetch-depth: 0
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
with:
python-version: 3.9
- name: Install Python packages
@@ -39,10 +39,10 @@ jobs:
unit-tests-cmd:
runs-on: windows-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
with:
fetch-depth: 0
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
with:
python-version: 3.9
- name: Install Python packages
@@ -63,10 +63,10 @@ jobs:
build-abseil:
runs-on: windows-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
with:
fetch-depth: 0
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
with:
python-version: 3.9
- name: Install Python packages

View File

@@ -1,33 +1,3 @@
# v0.20.3 (2023-10-31)
## Bugfixes
- Fix a bug where `spack mirror set-url` would drop configured connection info (reverts #34210)
- Fix a minor issue with package hash computation for Python 3.12 (#40328)
# v0.20.2 (2023-10-03)
## Features in this release
Spack now supports Python 3.12 (#40155)
## Bugfixes
- Improve escaping in Tcl module files (#38375)
- Make repo cache work on repositories with zero mtime (#39214)
- Ignore errors for newer, incompatible buildcache version (#40279)
- Print an error when git is required, but missing (#40254)
- Ensure missing build dependencies get installed when using `spack install --overwrite` (#40252)
- Fix an issue where Spack freezes when the build process unexpectedly exits (#39015)
- Fix a bug where installation failures cause an unrelated `NameError` to be thrown (#39017)
- Fix an issue where Spack package versions would be incorrectly derived from git tags (#39414)
- Fix a bug triggered when file locking fails internally (#39188)
- Prevent "spack external find" to error out when a directory cannot be accessed (#38755)
- Fix multiple performance regressions in environments (#38771)
- Add more ignored modules to `pyproject.toml` for `mypy` (#38769)
# v0.20.1 (2023-07-10)
## Spack Bugfixes

View File

@@ -27,53 +27,12 @@
# And here's the CITATION.cff format:
#
cff-version: 1.2.0
type: software
message: "If you are referencing Spack in a publication, please cite the paper below."
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
abstract: >-
Large HPC centers spend considerable time supporting software for thousands of users, but the complexity of HPC software is quickly outpacing the capabilities of existing software management tools.
Scientific applications require specific versions of compilers, MPI, and other dependency libraries, so using a single, standard software stack is infeasible.
However, managing many configurations is difficult because the configuration space is combinatorial in size.
We introduce Spack, a tool used at Lawrence Livermore National Laboratory to manage this complexity.
Spack provides a novel, re- cursive specification syntax to invoke parametric builds of packages and dependencies.
It allows any number of builds to coexist on the same system, and it ensures that installed packages can find their dependencies, regardless of the environment.
We show through real-world use cases that Spack supports diverse and demanding applications, bringing order to HPC software chaos.
preferred-citation:
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
type: conference-paper
url: "https://tgamblin.github.io/pubs/spack-sc15.pdf"
doi: "10.1145/2807591.2807623"
url: "https://github.com/spack/spack"
authors:
- family-names: "Gamblin"
given-names: "Todd"
- family-names: "LeGendre"
given-names: "Matthew"
- family-names: "Collette"
given-names: "Michael R."
- family-names: "Lee"
given-names: "Gregory L."
- family-names: "Moody"
given-names: "Adam"
- family-names: "de Supinski"
given-names: "Bronis R."
- family-names: "Futral"
given-names: "Scott"
conference:
name: "Supercomputing 2015 (SC15)"
city: "Austin"
region: "Texas"
country: "US"
date-start: 2015-11-15
date-end: 2015-11-20
month: 11
year: 2015
identifiers:
- description: "The concept DOI of the work."
type: doi
value: 10.1145/2807591.2807623
- description: "The DOE Document Release Number of the work"
type: other
value: "LLNL-CONF-669890"
authors:
- family-names: "Gamblin"
given-names: "Todd"
- family-names: "LeGendre"
@@ -88,3 +47,12 @@ authors:
given-names: "Bronis R."
- family-names: "Futral"
given-names: "Scott"
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
conference:
name: "Supercomputing 2015 (SC15)"
city: "Austin"
region: "Texas"
country: "USA"
month: November 15-20
year: 2015
notes: LLNL-CONF-669890

View File

@@ -7,7 +7,6 @@
[![Read the Docs](https://readthedocs.org/projects/spack/badge/?version=latest)](https://spack.readthedocs.io)
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![Slack](https://slack.spack.io/badge.svg)](https://slack.spack.io)
[![Matrix](https://img.shields.io/matrix/spack-space%3Amatrix.org?label=Matrix)](https://matrix.to/#/#spack-space:matrix.org)
Spack is a multi-platform package manager that builds and installs
multiple versions and configurations of software. It works on Linux,
@@ -63,10 +62,7 @@ Resources:
* **Slack workspace**: [spackpm.slack.com](https://spackpm.slack.com).
To get an invitation, visit [slack.spack.io](https://slack.spack.io).
* **Matrix space**: [#spack-space:matrix.org](https://matrix.to/#/#spack-space:matrix.org):
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
* [**Github Discussions**](https://github.com/spack/spack/discussions):
not just for discussions, but also Q&A.
* [**Github Discussions**](https://github.com/spack/spack/discussions): not just for discussions, also Q&A.
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack)
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
`@mention` us!

View File

@@ -9,15 +9,15 @@ bootstrap:
# may not be able to bootstrap all the software that Spack needs,
# depending on its type.
sources:
- name: 'github-actions-v0.5'
metadata: $spack/share/spack/bootstrap/github-actions-v0.5
- name: 'github-actions-v0.4'
metadata: $spack/share/spack/bootstrap/github-actions-v0.4
- name: 'github-actions-v0.3'
metadata: $spack/share/spack/bootstrap/github-actions-v0.3
- name: 'spack-install'
metadata: $spack/share/spack/bootstrap/spack-install
trusted:
# By default we trust bootstrapping from sources and from binaries
# produced on Github via the workflow
github-actions-v0.5: true
github-actions-v0.4: true
github-actions-v0.3: true
spack-install: true

View File

@@ -41,4 +41,4 @@ concretizer:
# "none": allows a single node for any package in the DAG.
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
strategy: minimal
strategy: none

View File

@@ -229,11 +229,3 @@ config:
flags:
# Whether to keep -Werror flags active in package builds.
keep_werror: 'none'
# A mapping of aliases that can be used to define new commands. For instance,
# `sp: spec -I` will define a new command `sp` that will execute `spec` with
# the `-I` argument. Aliases cannot override existing commands.
aliases:
concretise: concretize
containerise: containerize
rm: remove

View File

@@ -46,12 +46,10 @@ modules:
tcl:
all:
autoload: direct
hide_implicits: true
# Default configurations if lmod is enabled
lmod:
all:
autoload: direct
hide_implicits: true
hierarchy:
- mpi

View File

@@ -1,3 +1,4 @@
package_list.html
command_index.rst
spack*.rst
llnl*.rst

View File

@@ -45,8 +45,7 @@ Listing available packages
To install software with Spack, you need to know what software is
available. You can see a list of available package names at the
`packages.spack.io <https://packages.spack.io>`_ website, or
using the ``spack list`` command.
:ref:`package-list` webpage, or using the ``spack list`` command.
.. _cmd-spack-list:
@@ -61,7 +60,7 @@ can install:
:ellipsis: 10
There are thousands of them, so we've truncated the output above, but you
can find a `full list here <https://packages.spack.io>`_.
can find a :ref:`full list here <package-list>`.
Packages are listed by name in alphabetical order.
A pattern to match with no wildcards, ``*`` or ``?``,
will be treated as though it started and ended with
@@ -1526,30 +1525,6 @@ any MPI implementation will do. If another package depends on
error. Likewise, if you try to plug in some package that doesn't
provide MPI, Spack will raise an error.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Explicit binding of virtual dependencies
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
There are packages that provide more than just one virtual dependency. When interacting with them, users
might want to utilize just a subset of what they could provide, and use other providers for virtuals they
need.
It is possible to be more explicit and tell Spack which dependency should provide which virtual, using a
special syntax:
.. code-block:: console
$ spack spec strumpack ^[virtuals=mpi] intel-parallel-studio+mkl ^[virtuals=lapack] openblas
Concretizing the spec above produces the following DAG:
.. figure:: images/strumpack_virtuals.svg
:scale: 60 %
:align: center
where ``intel-parallel-studio`` *could* provide ``mpi``, ``lapack``, and ``blas`` but is used only for the former. The ``lapack``
and ``blas`` dependencies are satisfied by ``openblas``.
^^^^^^^^^^^^^^^^^^^^^^^^
Specifying Specs by Hash
^^^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -155,182 +155,16 @@ List of popular build caches
* `Extreme-scale Scientific Software Stack (E4S) <https://e4s-project.github.io/>`_: `build cache <https://oaciss.uoregon.edu/e4s/inventory.html>`_
----------
Relocation
----------
When using buildcaches across different machines, it is likely that the install
root will be different from the one used to build the binaries.
To address this issue, Spack automatically relocates all paths encoded in binaries
and scripts to their new location upon install.
Note that there are some cases where this is not possible: if binaries are built in
a relatively short path, and then installed to a longer path, there may not be enough
space in the binary to encode the new path. In this case, Spack will fail to install
the package from the build cache, and a source build is required.
To reduce the likelihood of this happening, it is highly recommended to add padding to
the install root during the build, as specified in the :ref:`config <config-yaml>`
section of the configuration:
.. code-block:: yaml
config:
install_tree:
root: /opt/spack
padded_length: 128
-----------------------------------------
OCI / Docker V2 registries as build cache
-----------------------------------------
Spack can also use OCI or Docker V2 registries such as Dockerhub, Quay.io,
Github Packages, GitLab Container Registry, JFrog Artifactory, and others
as build caches. This is a convenient way to share binaries using public
infrastructure, or to cache Spack built binaries in Github Actions and
GitLab CI.
To get started, configure an OCI mirror using ``oci://`` as the scheme,
and optionally specify a username and password (or personal access token):
.. code-block:: console
$ spack mirror add --oci-username username --oci-password password my_registry oci://example.com/my_image
Spack follows the naming conventions of Docker, with Dockerhub as the default
registry. To use Dockerhub, you can omit the registry domain:
.. code-block:: console
$ spack mirror add --oci-username username --oci-password password my_registry oci://username/my_image
From here, you can use the mirror as any other build cache:
.. code-block:: console
$ spack buildcache push my_registry <specs...> # push to the registry
$ spack install <specs...> # install from the registry
A unique feature of buildcaches on top of OCI registries is that it's incredibly
easy to generate get a runnable container image with the binaries installed. This
is a great way to make applications available to users without requiring them to
install Spack -- all you need is Docker, Podman or any other OCI-compatible container
runtime.
To produce container images, all you need to do is add the ``--base-image`` flag
when pushing to the build cache:
.. code-block:: console
$ spack buildcache push --base-image ubuntu:20.04 my_registry ninja
Pushed to example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
$ docker run -it example.com/my_image:ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
root@e4c2b6f6b3f4:/# ninja --version
1.11.1
If ``--base-image`` is not specified, distroless images are produced. In practice,
you won't be able to run these as containers, since they don't come with libc and
other system dependencies. However, they are still compatible with tools like
``skopeo``, ``podman``, and ``docker`` for pulling and pushing.
.. note::
The docker ``overlayfs2`` storage driver is limited to 128 layers, above which a
``max depth exceeded`` error may be produced when pulling the image. There
are `alternative drivers <https://docs.docker.com/storage/storagedriver/>`_.
------------------------------------
Spack build cache for GitHub Actions
------------------------------------
To significantly speed up Spack in GitHub Actions, binaries can be cached in
GitHub Packages. This service is an OCI registry that can be linked to a GitHub
repository.
A typical workflow is to include a ``spack.yaml`` environment in your repository
that specifies the packages to install, the target architecture, and the build
cache to use under ``mirrors``:
.. code-block:: yaml
spack:
specs:
- python@3.11
config:
install_tree:
root: /opt/spack
padded_length: 128
packages:
all:
require: target=x86_64_v2
mirrors:
local-buildcache: oci://ghcr.io/<organization>/<repository>
A GitHub action can then be used to install the packages and push them to the
build cache:
.. code-block:: yaml
name: Install Spack packages
on: push
env:
SPACK_COLOR: always
jobs:
example:
runs-on: ubuntu-22.04
permissions:
packages: write
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Checkout Spack
uses: actions/checkout@v3
with:
repository: spack/spack
path: spack
- name: Setup Spack
run: echo "$PWD/spack/bin" >> "$GITHUB_PATH"
- name: Concretize
run: spack -e . concretize
- name: Install
run: spack -e . install --no-check-signature
- name: Run tests
run: ./my_view/bin/python3 -c 'print("hello world")'
- name: Push to buildcache
run: |
spack -e . mirror set --oci-username ${{ github.actor }} --oci-password "${{ secrets.GITHUB_TOKEN }}" local-buildcache
spack -e . buildcache push --base-image ubuntu:22.04 --unsigned --update-index local-buildcache
if: ${{ !cancelled() }}
The first time this action runs, it will build the packages from source and
push them to the build cache. Subsequent runs will pull the binaries from the
build cache. The concretizer will ensure that prebuilt binaries are favored
over source builds.
The build cache entries appear in the GitHub Packages section of your repository,
and contain instructions for pulling and running them with ``docker`` or ``podman``.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Using Spack's public build cache for GitHub Actions
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Spack offers a public build cache for GitHub Actions with a set of common packages,
which lets you get started quickly. See the following resources for more information:
* `spack/github-actions-buildcache <https://github.com/spack/github-actions-buildcache>`_
Initial build and later installation do not necessarily happen at the same
location. Spack provides a relocation capability and corrects for RPATHs and
non-relocatable scripts. However, many packages compile paths into binary
artifacts directly. In such cases, the build instructions of this package would
need to be adjusted for better re-locatability.
.. _cmd-spack-buildcache:

View File

@@ -3,103 +3,6 @@
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _concretizer-options:
==========================================
Concretization Settings (concretizer.yaml)
==========================================
The ``concretizer.yaml`` configuration file allows to customize aspects of the
algorithm used to select the dependencies you install. The default configuration
is the following:
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
:language: yaml
--------------------------------
Reuse already installed packages
--------------------------------
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
whether it will do a "fresh" installation and prefer the latest settings from
``package.py`` files and ``packages.yaml`` (``false``).
You can use:
.. code-block:: console
% spack install --reuse <spec>
to enable reuse for a single installation, and you can use:
.. code-block:: console
spack install --fresh <spec>
to do a fresh install if ``reuse`` is enabled by default.
``reuse: true`` is the default.
------------------------------------------
Selection of the target microarchitectures
------------------------------------------
The options under the ``targets`` attribute control which targets are considered during a solve.
Currently the options in this section are only configurable from the ``concretizer.yaml`` file
and there are no corresponding command line arguments to enable them for a single solve.
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
If set to:
.. code-block:: yaml
concretizer:
targets:
granularity: microarchitectures
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
compatibility. If instead the option is set to:
.. code-block:: yaml
concretizer:
targets:
granularity: generic
Spack will consider only generic microarchitectures. For instance, when running on an
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
``x86_64_v3`` as the best target in the latter case.
The ``host_compatible`` option is a Boolean option that determines whether or not the
microarchitectures considered during the solve are constrained to be compatible with the
host Spack is currently running on. For instance, if this option is set to ``true``, a
user cannot concretize for ``target=icelake`` while running on an Haswell node.
---------------
Duplicate nodes
---------------
The ``duplicates`` attribute controls whether the DAG can contain multiple configurations of
the same package. This is mainly relevant for build dependencies, which may have their version
pinned by some nodes, and thus be required at different versions by different nodes in the same
DAG.
The ``strategy`` option controls how the solver deals with duplicates. If the value is ``none``,
then a single configuration per package is allowed in the DAG. This means, for instance, that only
a single ``cmake`` or a single ``py-setuptools`` version is allowed. The result would be a slightly
faster concretization, at the expense of making a few specs unsolvable.
If the value is ``minimal`` Spack will allow packages tagged as ``build-tools`` to have duplicates.
This allows, for instance, to concretize specs whose nodes require different, and incompatible, ranges
of some build tool. For instance, in the figure below the latest `py-shapely` requires a newer `py-setuptools`,
while `py-numpy` still needs an older version:
.. figure:: images/shapely_duplicates.svg
:scale: 70 %
:align: center
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
default behavior is ``duplicates:strategy:minimal``.
.. _build-settings:
================================
@@ -329,6 +232,76 @@ Specific limitations include:
then Spack will not add a new external entry (``spack config blame packages``
can help locate all external entries).
.. _concretizer-options:
----------------------
Concretizer options
----------------------
``packages.yaml`` gives the concretizer preferences for specific packages,
but you can also use ``concretizer.yaml`` to customize aspects of the
algorithm it uses to select the dependencies you install:
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
:language: yaml
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Reuse already installed packages
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
whether it will do a "fresh" installation and prefer the latest settings from
``package.py`` files and ``packages.yaml`` (``false``).
You can use:
.. code-block:: console
% spack install --reuse <spec>
to enable reuse for a single installation, and you can use:
.. code-block:: console
spack install --fresh <spec>
to do a fresh install if ``reuse`` is enabled by default.
``reuse: true`` is the default.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Selection of the target microarchitectures
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The options under the ``targets`` attribute control which targets are considered during a solve.
Currently the options in this section are only configurable from the ``concretizer.yaml`` file
and there are no corresponding command line arguments to enable them for a single solve.
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
If set to:
.. code-block:: yaml
concretizer:
targets:
granularity: microarchitectures
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
compatibility. If instead the option is set to:
.. code-block:: yaml
concretizer:
targets:
granularity: generic
Spack will consider only generic microarchitectures. For instance, when running on an
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
``x86_64_v3`` as the best target in the latter case.
The ``host_compatible`` option is a Boolean option that determines whether or not the
microarchitectures considered during the solve are constrained to be compatible with the
host Spack is currently running on. For instance, if this option is set to ``true``, a
user cannot concretize for ``target=icelake`` while running on an Haswell node.
.. _package-requirements:
--------------------
@@ -526,52 +499,56 @@ Package Preferences
In some cases package requirements can be too strong, and package
preferences are the better option. Package preferences do not impose
constraints on packages for particular versions or variants values,
they rather only set defaults. The concretizer is free to change
them if it must, due to other constraints, and also prefers reusing
installed packages over building new ones that are a better match for
preferences.
they rather only set defaults -- the concretizer is free to change
them if it must due to other constraints. Also note that package
preferences are of lower priority than reuse of already installed
packages.
Most package preferences (``compilers``, ``target`` and ``providers``)
can only be set globally under the ``all`` section of ``packages.yaml``:
.. code-block:: yaml
packages:
all:
compiler: [gcc@12.2.0, clang@12:, oneapi@2023:]
target: [x86_64_v3]
providers:
mpi: [mvapich2, mpich, openmpi]
These preferences override Spack's default and effectively reorder priorities
when looking for the best compiler, target or virtual package provider. Each
preference takes an ordered list of spec constraints, with earlier entries in
the list being preferred over later entries.
In the example above all packages prefer to be compiled with ``gcc@12.2.0``,
to target the ``x86_64_v3`` microarchitecture and to use ``mvapich2`` if they
depend on ``mpi``.
The ``variants`` and ``version`` preferences can be set under
package specific sections of the ``packages.yaml`` file:
Here's an example ``packages.yaml`` file that sets preferred packages:
.. code-block:: yaml
packages:
opencv:
compiler: [gcc@4.9]
variants: +debug
gperftools:
version: [2.2, 2.4, 2.3]
all:
compiler: [gcc@4.4.7, 'gcc@4.6:', intel, clang, pgi]
target: [sandybridge]
providers:
mpi: [mvapich2, mpich, openmpi]
In this case, the preference for ``opencv`` is to build with debug options, while
``gperftools`` prefers version 2.2 over 2.4.
At a high level, this example is specifying how packages are preferably
concretized. The opencv package should prefer using GCC 4.9 and
be built with debug options. The gperftools package should prefer version
2.2 over 2.4. Every package on the system should prefer mvapich2 for
its MPI and GCC 4.4.7 (except for opencv, which overrides this by preferring GCC 4.9).
These options are used to fill in implicit defaults. Any of them can be overwritten
on the command line if explicitly requested.
Any preference can be overwritten on the command line if explicitly requested.
Package preferences accept the follow keys or components under
the specific package (or ``all``) section: ``compiler``, ``variants``,
``version``, ``providers``, and ``target``. Each component has an
ordered list of spec ``constraints``, with earlier entries in the
list being preferred over later entries.
Preferences cannot overcome explicit constraints, as they only set a preferred
ordering among homogeneous attribute values. Going back to the example, if
``gperftools@2.3:`` was requested, then Spack will install version 2.4
since the most preferred version 2.2 is prohibited by the version constraint.
Sometimes a package installation may have constraints that forbid
the first concretization rule, in which case Spack will use the first
legal concretization rule. Going back to the example, if a user
requests gperftools 2.3 or later, then Spack will install version 2.4
as the 2.4 version of gperftools is preferred over 2.3.
An explicit concretization rule in the preferred section will always
take preference over unlisted concretizations. In the above example,
xlc isn't listed in the compiler list. Every listed compiler from
gcc to pgi will thus be preferred over the xlc compiler.
The syntax for the ``provider`` section differs slightly from other
concretization rules. A provider lists a value that packages may
``depends_on`` (e.g, MPI) and a list of rules for fulfilling that
dependency.
.. _package_permissions:

View File

@@ -127,9 +127,9 @@ check out a commit from the ``master`` branch, you would want to add:
.. code-block:: python
depends_on("autoconf", type="build", when="@master")
depends_on("automake", type="build", when="@master")
depends_on("libtool", type="build", when="@master")
depends_on('autoconf', type='build', when='@master')
depends_on('automake', type='build', when='@master')
depends_on('libtool', type='build', when='@master')
It is typically redundant to list the ``m4`` macro processor package as a
dependency, since ``autoconf`` already depends on it.
@@ -145,7 +145,7 @@ example, the ``bash`` shell is used to run the ``autogen.sh`` script.
.. code-block:: python
def autoreconf(self, spec, prefix):
which("bash")("autogen.sh")
which('bash')('autogen.sh')
"""""""""""""""""""""""""""""""""""""""
patching configure or Makefile.in files
@@ -186,9 +186,9 @@ To opt out of this feature, use the following setting:
To enable it conditionally on different architectures, define a property and
make the package depend on ``gnuconfig`` as a build dependency:
.. code-block:: python
.. code-block
depends_on("gnuconfig", when="@1.0:")
depends_on('gnuconfig', when='@1.0:')
@property
def patch_config_files(self):
@@ -230,7 +230,7 @@ version, this can be done like so:
@property
def force_autoreconf(self):
return self.version == Version("1.2.3")
return self.version == Version('1.2.3')
^^^^^^^^^^^^^^^^^^^^^^^
Finding configure flags
@@ -278,22 +278,13 @@ function like so:
def configure_args(self):
args = []
if self.spec.satisfies("+mpi"):
args.append("--enable-mpi")
if '+mpi' in self.spec:
args.append('--enable-mpi')
else:
args.append("--disable-mpi")
args.append('--disable-mpi')
return args
Alternatively, you can use the :ref:`enable_or_disable <autotools_enable_or_disable>` helper:
.. code-block:: python
def configure_args(self):
return [self.enable_or_disable("mpi")]
Note that we are explicitly disabling MPI support if it is not
requested. This is important, as many Autotools packages will enable
options by default if the dependencies are found, and disable them
@@ -304,11 +295,9 @@ and `here <https://wiki.gentoo.org/wiki/Project:Quality_Assurance/Automagic_depe
for a rationale as to why these so-called "automagic" dependencies
are a problem.
.. note::
By default, Autotools installs packages to ``/usr``. We don't want this,
so Spack automatically adds ``--prefix=/path/to/installation/prefix``
to your list of ``configure_args``. You don't need to add this yourself.
By default, Autotools installs packages to ``/usr``. We don't want this,
so Spack automatically adds ``--prefix=/path/to/installation/prefix``
to your list of ``configure_args``. You don't need to add this yourself.
^^^^^^^^^^^^^^^^
Helper functions
@@ -319,8 +308,6 @@ You may have noticed that most of the Autotools flags are of the form
``--without-baz``. Since these flags are so common, Spack provides a
couple of helper functions to make your life easier.
.. _autotools_enable_or_disable:
"""""""""""""""""
enable_or_disable
"""""""""""""""""
@@ -332,11 +319,11 @@ typically used to enable or disable some feature within the package.
.. code-block:: python
variant(
"memchecker",
'memchecker',
default=False,
description="Memchecker support for debugging [degrades performance]"
description='Memchecker support for debugging [degrades performance]'
)
config_args.extend(self.enable_or_disable("memchecker"))
config_args.extend(self.enable_or_disable('memchecker'))
In this example, specifying the variant ``+memchecker`` will generate
the following configuration options:
@@ -356,15 +343,15 @@ the ``with_or_without`` method.
.. code-block:: python
variant(
"schedulers",
'schedulers',
values=disjoint_sets(
("auto",), ("alps", "lsf", "tm", "slurm", "sge", "loadleveler")
).with_non_feature_values("auto", "none"),
('auto',), ('alps', 'lsf', 'tm', 'slurm', 'sge', 'loadleveler')
).with_non_feature_values('auto', 'none'),
description="List of schedulers for which support is enabled; "
"'auto' lets openmpi determine",
)
if not spec.satisfies("schedulers=auto"):
config_args.extend(self.with_or_without("schedulers"))
if 'schedulers=auto' not in spec:
config_args.extend(self.with_or_without('schedulers'))
In this example, specifying the variant ``schedulers=slurm,sge`` will
generate the following configuration options:
@@ -389,16 +376,16 @@ generated, using the ``activation_value`` argument to
.. code-block:: python
variant(
"fabrics",
'fabrics',
values=disjoint_sets(
("auto",), ("psm", "psm2", "verbs", "mxm", "ucx", "libfabric")
).with_non_feature_values("auto", "none"),
('auto',), ('psm', 'psm2', 'verbs', 'mxm', 'ucx', 'libfabric')
).with_non_feature_values('auto', 'none'),
description="List of fabrics that are enabled; "
"'auto' lets openmpi determine",
)
if not spec.satisfies("fabrics=auto"):
config_args.extend(self.with_or_without("fabrics",
activation_value="prefix"))
if 'fabrics=auto' not in spec:
config_args.extend(self.with_or_without('fabrics',
activation_value='prefix'))
``activation_value`` accepts a callable that generates the configure
parameter value given the variant value; but the special value
@@ -422,16 +409,16 @@ When Spack variants and configure flags do not correspond one-to-one, the
.. code-block:: python
variant("debug_tools", default=False)
config_args += self.enable_or_disable("debug-tools", variant="debug_tools")
variant('debug_tools', default=False)
config_args += self.enable_or_disable('debug-tools', variant='debug_tools')
Or when one variant controls multiple flags:
.. code-block:: python
variant("debug_tools", default=False)
config_args += self.with_or_without("memchecker", variant="debug_tools")
config_args += self.with_or_without("profiler", variant="debug_tools")
variant('debug_tools', default=False)
config_args += self.with_or_without('memchecker', variant='debug_tools')
config_args += self.with_or_without('profiler', variant='debug_tools')
""""""""""""""""""""
@@ -445,8 +432,8 @@ For example:
.. code-block:: python
variant("profiler", when="@2.0:")
config_args += self.with_or_without("profiler")
variant('profiler', when='@2.0:')
config_args += self.with_or_without('profiler')
will neither add ``--with-profiler`` nor ``--without-profiler`` when the version is
below ``2.0``.
@@ -465,10 +452,10 @@ the variant values require atypical behavior.
def with_or_without_verbs(self, activated):
# Up through version 1.6, this option was named --with-openib.
# In version 1.7, it was renamed to be --with-verbs.
opt = "verbs" if self.spec.satisfies("@1.7:") else "openib"
opt = 'verbs' if self.spec.satisfies('@1.7:') else 'openib'
if not activated:
return f"--without-{opt}"
return f"--with-{opt}={self.spec['rdma-core'].prefix}"
return '--without-{0}'.format(opt)
return '--with-{0}={1}'.format(opt, self.spec['rdma-core'].prefix)
Defining ``with_or_without_verbs`` overrides the behavior of a
``fabrics=verbs`` variant, changing the configure-time option to
@@ -492,7 +479,7 @@ do this like so:
.. code-block:: python
configure_directory = "src"
configure_directory = 'src'
^^^^^^^^^^^^^^^^^^^^^^
Building out of source
@@ -504,7 +491,7 @@ This can be done using the ``build_directory`` variable:
.. code-block:: python
build_directory = "spack-build"
build_directory = 'spack-build'
By default, Spack will build the package in the same directory that
contains the ``configure`` script
@@ -527,8 +514,8 @@ library or build the documentation, you can add these like so:
.. code-block:: python
build_targets = ["all", "docs"]
install_targets = ["install", "docs"]
build_targets = ['all', 'docs']
install_targets = ['install', 'docs']
^^^^^^^
Testing

View File

@@ -87,7 +87,7 @@ A typical usage of these methods may look something like this:
.. code-block:: python
def initconfig_mpi_entries(self):
def initconfig_mpi_entries(self)
# Get existing MPI configurations
entries = super(self, Foo).initconfig_mpi_entries()
@@ -95,25 +95,25 @@ A typical usage of these methods may look something like this:
# This spec has an MPI variant, and we need to enable MPI when it is on.
# This hypothetical package controls MPI with the ``FOO_MPI`` option to
# cmake.
if self.spec.satisfies("+mpi"):
entries.append(cmake_cache_option("FOO_MPI", True, "enable mpi"))
if '+mpi' in self.spec:
entries.append(cmake_cache_option('FOO_MPI', True, "enable mpi"))
else:
entries.append(cmake_cache_option("FOO_MPI", False, "disable mpi"))
entries.append(cmake_cache_option('FOO_MPI', False, "disable mpi"))
def initconfig_package_entries(self):
# Package specific options
entries = []
entries.append("#Entries for build options")
entries.append('#Entries for build options')
bar_on = self.spec.satisfies("+bar")
entries.append(cmake_cache_option("FOO_BAR", bar_on, "toggle bar"))
bar_on = '+bar' in self.spec
entries.append(cmake_cache_option('FOO_BAR', bar_on, 'toggle bar'))
entries.append("#Entries for dependencies")
entries.append('#Entries for dependencies')
if self.spec["blas"].name == "baz": # baz is our blas provider
entries.append(cmake_cache_string("FOO_BLAS", "baz", "Use baz"))
entries.append(cmake_cache_path("BAZ_PREFIX", self.spec["baz"].prefix))
if self.spec['blas'].name == 'baz': # baz is our blas provider
entries.append(cmake_cache_string('FOO_BLAS', 'baz', 'Use baz'))
entries.append(cmake_cache_path('BAZ_PREFIX', self.spec['baz'].prefix))
^^^^^^^^^^^^^^^^^^^^^^
External documentation

View File

@@ -54,8 +54,8 @@ to terminate such build attempts with a suitable message:
.. code-block:: python
conflicts("cuda_arch=none", when="+cuda",
msg="CUDA architecture is required")
conflicts('cuda_arch=none', when='+cuda',
msg='CUDA architecture is required')
Similarly, if your software does not support all versions of the property,
you could add ``conflicts`` to your package for those versions. For example,
@@ -66,13 +66,13 @@ custom message should a user attempt such a build:
.. code-block:: python
unsupported_cuda_archs = [
"10", "11", "12", "13",
"20", "21",
"30", "32", "35", "37"
'10', '11', '12', '13',
'20', '21',
'30', '32', '35', '37'
]
for value in unsupported_cuda_archs:
conflicts(f"cuda_arch={value}", when="+cuda",
msg=f"CUDA architecture {value} is not supported")
conflicts('cuda_arch={0}'.format(value), when='+cuda',
msg='CUDA architecture {0} is not supported'.format(value))
^^^^^^^
Methods
@@ -107,16 +107,16 @@ class of your package. For example, you can add it to your
spec = self.spec
args = []
...
if spec.satisfies("+cuda"):
if '+cuda' in spec:
# Set up the cuda macros needed by the build
args.append("-DWITH_CUDA=ON")
cuda_arch_list = spec.variants["cuda_arch"].value
args.append('-DWITH_CUDA=ON')
cuda_arch_list = spec.variants['cuda_arch'].value
cuda_arch = cuda_arch_list[0]
if cuda_arch != "none":
args.append(f"-DCUDA_FLAGS=-arch=sm_{cuda_arch}")
if cuda_arch != 'none':
args.append('-DCUDA_FLAGS=-arch=sm_{0}'.format(cuda_arch))
else:
# Ensure build with cuda is disabled
args.append("-DWITH_CUDA=OFF")
args.append('-DWITH_CUDA=OFF')
...
return args
@@ -125,7 +125,7 @@ You will need to customize options as needed for your build.
This example also illustrates how to check for the ``cuda`` variant using
``self.spec`` and how to retrieve the ``cuda_arch`` variant's value, which
is a list, using ``self.spec.variants["cuda_arch"].value``.
is a list, using ``self.spec.variants['cuda_arch'].value``.
With over 70 packages using ``CudaPackage`` as of January 2021 there are
lots of examples to choose from to get more ideas for using this package.

View File

@@ -57,13 +57,13 @@ If you look at the ``perl`` package, you'll see:
.. code-block:: python
phases = ["configure", "build", "install"]
phases = ['configure', 'build', 'install']
Similarly, ``cmake`` defines:
.. code-block:: python
phases = ["bootstrap", "build", "install"]
phases = ['bootstrap', 'build', 'install']
If we look at the ``cmake`` example, this tells Spack's ``PackageBase``
class to run the ``bootstrap``, ``build``, and ``install`` functions
@@ -78,7 +78,7 @@ If we look at ``perl``, we see that it defines a ``configure`` method:
.. code-block:: python
def configure(self, spec, prefix):
configure = Executable("./Configure")
configure = Executable('./Configure')
configure(*self.configure_args())
There is also a corresponding ``configure_args`` function that handles
@@ -92,7 +92,7 @@ phases are pretty simple:
make()
def install(self, spec, prefix):
make("install")
make('install')
The ``cmake`` package looks very similar, but with a ``bootstrap``
function instead of ``configure``:
@@ -100,14 +100,14 @@ function instead of ``configure``:
.. code-block:: python
def bootstrap(self, spec, prefix):
bootstrap = Executable("./bootstrap")
bootstrap = Executable('./bootstrap')
bootstrap(*self.bootstrap_args())
def build(self, spec, prefix):
make()
def install(self, spec, prefix):
make("install")
make('install')
Again, there is a ``boostrap_args`` function that determines the
correct bootstrap flags to use.
@@ -128,16 +128,16 @@ before or after a particular phase. For example, in ``perl``, we see:
.. code-block:: python
@run_after("install")
@run_after('install')
def install_cpanm(self):
spec = self.spec
if spec.satisfies("+cpanm"):
with working_dir(join_path("cpanm", "cpanm")):
perl = spec["perl"].command
perl("Makefile.PL")
if '+cpanm' in spec:
with working_dir(join_path('cpanm', 'cpanm')):
perl = spec['perl'].command
perl('Makefile.PL')
make()
make("install")
make('install')
This extra step automatically installs ``cpanm`` in addition to the
base Perl installation.
@@ -174,10 +174,10 @@ In the ``perl`` package, we can see:
.. code-block:: python
@run_after("build")
@run_after('build')
@on_package_attributes(run_tests=True)
def test(self):
make("test")
make('test')
As you can guess, this runs ``make test`` *after* building the package,
if and only if testing is requested. Again, this is not specific to
@@ -189,7 +189,7 @@ custom build systems, it can be added to existing build systems as well.
.. code-block:: python
@run_after("install")
@run_after('install')
@on_package_attributes(run_tests=True)
works as expected. However, if you reverse the ordering:
@@ -197,7 +197,7 @@ custom build systems, it can be added to existing build systems as well.
.. code-block:: python
@on_package_attributes(run_tests=True)
@run_after("install")
@run_after('install')
the tests will always be run regardless of whether or not
``--test=root`` is requested. See https://github.com/spack/spack/issues/3833

View File

@@ -25,8 +25,8 @@ use Spack to build packages with the tools.
The Spack Python class ``IntelOneapiPackage`` is a base class that is
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
``IntelOneapiTbb`` and other classes to implement the oneAPI
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
list of available oneAPI packages, or use::
packages. See the :ref:`package-list` for the full list of available
oneAPI packages or use::
spack list -d oneAPI

View File

@@ -59,7 +59,7 @@ using GNU Make, you should add a dependency on ``gmake``:
.. code-block:: python
depends_on("gmake", type="build")
depends_on('gmake', type='build')
^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -93,8 +93,8 @@ there are any other variables you need to set, you can do this in the
.. code-block:: python
def edit(self, spec, prefix):
env["PREFIX"] = prefix
env["BLASLIB"] = spec["blas"].libs.ld_flags
env['PREFIX'] = prefix
env['BLASLIB'] = spec['blas'].libs.ld_flags
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cbench/package.py>`_
@@ -113,7 +113,7 @@ you can do this like so:
.. code-block:: python
build_targets = ["CC=cc"]
build_targets = ['CC=cc']
If you do need access to the spec, you can create a property like so:
@@ -125,8 +125,8 @@ If you do need access to the spec, you can create a property like so:
spec = self.spec
return [
"CC=cc",
f"BLASLIB={spec['blas'].libs.ld_flags}",
'CC=cc',
'BLASLIB={0}'.format(spec['blas'].libs.ld_flags),
]
@@ -145,12 +145,12 @@ and a ``filter_file`` method to help with this. For example:
.. code-block:: python
def edit(self, spec, prefix):
makefile = FileFilter("Makefile")
makefile = FileFilter('Makefile')
makefile.filter(r"^\s*CC\s*=.*", f"CC = {spack_cc}")
makefile.filter(r"^\s*CXX\s*=.*", f"CXX = {spack_cxx}")
makefile.filter(r"^\s*F77\s*=.*", f"F77 = {spack_f77}")
makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}")
makefile.filter(r'^\s*CC\s*=.*', 'CC = ' + spack_cc)
makefile.filter(r'^\s*CXX\s*=.*', 'CXX = ' + spack_cxx)
makefile.filter(r'^\s*F77\s*=.*', 'F77 = ' + spack_f77)
makefile.filter(r'^\s*FC\s*=.*', 'FC = ' + spack_fc)
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/stream/package.py>`_
@@ -181,16 +181,16 @@ well for storing variables:
def edit(self, spec, prefix):
config = {
"CC": "cc",
"MAKE": "make",
'CC': 'cc',
'MAKE': 'make',
}
if spec.satisfies("+blas"):
config["BLAS_LIBS"] = spec["blas"].libs.joined()
if '+blas' in spec:
config['BLAS_LIBS'] = spec['blas'].libs.joined()
with open("make.inc", "w") as inc:
with open('make.inc', 'w') as inc:
for key in config:
inc.write(f"{key} = {config[key]}\n")
inc.write('{0} = {1}\n'.format(key, config[key]))
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/elk/package.py>`_
@@ -204,14 +204,14 @@ them in a list:
def edit(self, spec, prefix):
config = [
f"INSTALL_DIR = {prefix}",
"INCLUDE_DIR = $(INSTALL_DIR)/include",
"LIBRARY_DIR = $(INSTALL_DIR)/lib",
'INSTALL_DIR = {0}'.format(prefix),
'INCLUDE_DIR = $(INSTALL_DIR)/include',
'LIBRARY_DIR = $(INSTALL_DIR)/lib',
]
with open("make.inc", "w") as inc:
with open('make.inc', 'w') as inc:
for var in config:
inc.write(f"{var}\n")
inc.write('{0}\n'.format(var))
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/hpl/package.py>`_
@@ -284,7 +284,7 @@ can tell Spack where to locate it like so:
.. code-block:: python
build_directory = "src"
build_directory = 'src'
^^^^^^^^^^^^^^^^^^^
@@ -299,8 +299,8 @@ install the package:
def install(self, spec, prefix):
mkdir(prefix.bin)
install("foo", prefix.bin)
install_tree("lib", prefix.lib)
install('foo', prefix.bin)
install_tree('lib', prefix.lib)
^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -152,16 +152,16 @@ set. Once set, ``pypi`` will be used to define the ``homepage``,
.. code-block:: python
homepage = "https://pypi.org/project/setuptools/"
url = "https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip"
list_url = "https://pypi.org/simple/setuptools/"
homepage = 'https://pypi.org/project/setuptools/'
url = 'https://pypi.org/packages/source/s/setuptools/setuptools-49.2.0.zip'
list_url = 'https://pypi.org/simple/setuptools/'
is equivalent to:
.. code-block:: python
pypi = "setuptools/setuptools-49.2.0.zip"
pypi = 'setuptools/setuptools-49.2.0.zip'
If a package has a different homepage listed on PyPI, you can
@@ -208,7 +208,7 @@ dependencies to your package:
.. code-block:: python
depends_on("py-setuptools@42:", type="build")
depends_on('py-setuptools@42:', type='build')
Note that ``py-wheel`` is already listed as a build dependency in the
@@ -232,7 +232,7 @@ Look for dependencies under the following keys:
* ``dependencies`` under ``[project]``
These packages are required for building and installation. You can
add them with ``type=("build", "run")``.
add them with ``type=('build', 'run')``.
* ``[project.optional-dependencies]``
@@ -279,12 +279,12 @@ distutils library, and has almost the exact same API. In addition to
* ``setup_requires``
These packages are usually only needed at build-time, so you can
add them with ``type="build"``.
add them with ``type='build'``.
* ``install_requires``
These packages are required for building and installation. You can
add them with ``type=("build", "run")``.
add them with ``type=('build', 'run')``.
* ``extras_require``
@@ -296,7 +296,7 @@ distutils library, and has almost the exact same API. In addition to
These are packages that are required to run the unit tests for the
package. These dependencies can be specified using the
``type="test"`` dependency type. However, the PyPI tarballs rarely
``type='test'`` dependency type. However, the PyPI tarballs rarely
contain unit tests, so there is usually no reason to add these.
See https://setuptools.pypa.io/en/latest/userguide/dependency_management.html
@@ -321,7 +321,7 @@ older versions of flit may use the following keys:
* ``requires`` under ``[tool.flit.metadata]``
These packages are required for building and installation. You can
add them with ``type=("build", "run")``.
add them with ``type=('build', 'run')``.
* ``[tool.flit.metadata.requires-extra]``
@@ -434,12 +434,12 @@ the BLAS/LAPACK library you want pkg-config to search for:
.. code-block:: python
depends_on("py-pip@22.1:", type="build")
depends_on('py-pip@22.1:', type='build')
def config_settings(self, spec, prefix):
return {
"blas": spec["blas"].libs.names[0],
"lapack": spec["lapack"].libs.names[0],
'blas': spec['blas'].libs.names[0],
'lapack': spec['lapack'].libs.names[0],
}
@@ -463,10 +463,10 @@ has an optional dependency on ``libyaml`` that can be enabled like so:
def global_options(self, spec, prefix):
options = []
if spec.satisfies("+libyaml"):
options.append("--with-libyaml")
if '+libyaml' in spec:
options.append('--with-libyaml')
else:
options.append("--without-libyaml")
options.append('--without-libyaml')
return options
@@ -492,10 +492,10 @@ allows you to specify the directories to search for ``libyaml``:
def install_options(self, spec, prefix):
options = []
if spec.satisfies("+libyaml"):
if '+libyaml' in spec:
options.extend([
spec["libyaml"].libs.search_flags,
spec["libyaml"].headers.include_flags,
spec['libyaml'].libs.search_flags,
spec['libyaml'].headers.include_flags,
])
return options
@@ -556,7 +556,7 @@ detected are wrong, you can provide the names yourself by overriding
.. code-block:: python
import_modules = ["six"]
import_modules = ['six']
Sometimes the list of module names to import depends on how the
@@ -571,9 +571,9 @@ This can be expressed like so:
@property
def import_modules(self):
modules = ["yaml"]
if self.spec.satisfies("+libyaml"):
modules.append("yaml.cyaml")
modules = ['yaml']
if '+libyaml' in self.spec:
modules.append('yaml.cyaml')
return modules
@@ -586,14 +586,14 @@ Instead of defining the ``import_modules`` explicitly, only the subset
of module names to be skipped can be defined by using ``skip_modules``.
If a defined module has submodules, they are skipped as well, e.g.,
in case the ``plotting`` modules should be excluded from the
automatically detected ``import_modules`` ``["nilearn", "nilearn.surface",
"nilearn.plotting", "nilearn.plotting.data"]`` set:
automatically detected ``import_modules`` ``['nilearn', 'nilearn.surface',
'nilearn.plotting', 'nilearn.plotting.data']`` set:
.. code-block:: python
skip_modules = ["nilearn.plotting"]
skip_modules = ['nilearn.plotting']
This will set ``import_modules`` to ``["nilearn", "nilearn.surface"]``
This will set ``import_modules`` to ``['nilearn', 'nilearn.surface']``
Import tests can be run during the installation using ``spack install
--test=root`` or at any time after the installation using
@@ -612,11 +612,11 @@ after the ``install`` phase:
.. code-block:: python
@run_after("install")
@run_after('install')
@on_package_attributes(run_tests=True)
def install_test(self):
with working_dir("spack-test", create=True):
python("-c", "import numpy; numpy.test('full', verbose=2)")
with working_dir('spack-test', create=True):
python('-c', 'import numpy; numpy.test("full", verbose=2)')
when testing is enabled during the installation (i.e., ``spack install
@@ -638,7 +638,7 @@ provides Python bindings in a ``python`` directory, you can use:
.. code-block:: python
build_directory = "python"
build_directory = 'python'
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -81,27 +81,28 @@ class of your package. For example, you can add it to your
class MyRocmPackage(CMakePackage, ROCmPackage):
...
# Ensure +rocm and amdgpu_targets are passed to dependencies
depends_on("mydeppackage", when="+rocm")
depends_on('mydeppackage', when='+rocm')
for val in ROCmPackage.amdgpu_targets:
depends_on(f"mydeppackage amdgpu_target={val}",
when=f"amdgpu_target={val}")
depends_on('mydeppackage amdgpu_target={0}'.format(val),
when='amdgpu_target={0}'.format(val))
...
def cmake_args(self):
spec = self.spec
args = []
...
if spec.satisfies("+rocm"):
if '+rocm' in spec:
# Set up the hip macros needed by the build
args.extend([
"-DENABLE_HIP=ON",
f"-DHIP_ROOT_DIR={spec['hip'].prefix}"])
rocm_archs = spec.variants["amdgpu_target"].value
if "none" not in rocm_archs:
args.append(f"-DHIP_HIPCC_FLAGS=--amdgpu-target={','.join(rocm_archs}")
'-DENABLE_HIP=ON',
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix)])
rocm_archs = spec.variants['amdgpu_target'].value
if 'none' not in rocm_archs:
args.append('-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
.format(",".join(rocm_archs)))
else:
# Ensure build with hip is disabled
args.append("-DENABLE_HIP=OFF")
args.append('-DENABLE_HIP=OFF')
...
return args
...
@@ -113,7 +114,7 @@ build.
This example also illustrates how to check for the ``rocm`` variant using
``self.spec`` and how to retrieve the ``amdgpu_target`` variant's value
using ``self.spec.variants["amdgpu_target"].value``.
using ``self.spec.variants['amdgpu_target'].value``.
All five packages using ``ROCmPackage`` as of January 2021 also use the
:ref:`CudaPackage <cudapackage>`. So it is worth looking at those packages

View File

@@ -57,7 +57,7 @@ overridden like so:
.. code-block:: python
def test(self):
scons("check")
scons('check')
^^^^^^^^^^^^^^^
@@ -88,7 +88,7 @@ base class already contains:
.. code-block:: python
depends_on("scons", type="build")
depends_on('scons', type='build')
If you want to specify a particular version requirement, you can override
@@ -96,7 +96,7 @@ this in your package:
.. code-block:: python
depends_on("scons@2.3.0:", type="build")
depends_on('scons@2.3.0:', type='build')
^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -238,14 +238,14 @@ the package build phase. This is done by overriding ``build_args`` like so:
def build_args(self, spec, prefix):
args = [
f"PREFIX={prefix}",
f"ZLIB={spec['zlib'].prefix}",
'PREFIX={0}'.format(prefix),
'ZLIB={0}'.format(spec['zlib'].prefix),
]
if spec.satisfies("+debug"):
args.append("DEBUG=yes")
if '+debug' in spec:
args.append('DEBUG=yes')
else:
args.append("DEBUG=no")
args.append('DEBUG=no')
return args
@@ -275,8 +275,8 @@ environment variables. For example, cantera has the following option:
* env_vars: [ string ]
Environment variables to propagate through to SCons. Either the
string "all" or a comma separated list of variable names, e.g.
"LD_LIBRARY_PATH,HOME".
- default: "LD_LIBRARY_PATH,PYTHONPATH"
'LD_LIBRARY_PATH,HOME'.
- default: 'LD_LIBRARY_PATH,PYTHONPATH'
In the case of cantera, using ``env_vars=all`` allows us to use

View File

@@ -48,6 +48,9 @@
os.environ["COLIFY_SIZE"] = "25x120"
os.environ["COLUMNS"] = "120"
# Generate full package list if needed
subprocess.call(["spack", "list", "--format=html", "--update=package_list.html"])
# Generate a command index if an update is needed
subprocess.call(
[
@@ -204,7 +207,6 @@ def setup(sphinx):
("py:class", "clingo.Control"),
("py:class", "six.moves.urllib.parse.ParseResult"),
("py:class", "TextIO"),
("py:class", "hashlib._Hash"),
# Spack classes that are private and we don't want to expose
("py:class", "spack.provider_index._IndexBase"),
("py:class", "spack.repo._PrependFileLoader"),

View File

@@ -304,17 +304,3 @@ To work properly, this requires your terminal to reset its title after
Spack has finished its work, otherwise Spack's status information will
remain in the terminal's title indefinitely. Most terminals should already
be set up this way and clear Spack's status information.
-----------
``aliases``
-----------
Aliases can be used to define new Spack commands. They can be either shortcuts
for longer commands or include specific arguments for convenience. For instance,
if users want to use ``spack install``'s ``-v`` argument all the time, they can
create a new alias called ``inst`` that will always call ``install -v``:
.. code-block:: yaml
aliases:
inst: install -v

View File

@@ -212,12 +212,18 @@ under the ``container`` attribute of environments:
final:
- libgomp
# Extra instructions
extra_instructions:
final: |
RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ "' >> ~/.bashrc
# Labels for the image
labels:
app: "gromacs"
mpi: "mpich"
A detailed description of the options available can be found in the :ref:`container_config_options` section.
A detailed description of the options available can be found in the
:ref:`container_config_options` section.
-------------------
Setting Base Images
@@ -519,13 +525,6 @@ the example below:
COPY data /share/myapp/data
{% endblock %}
The Dockerfile is generated by running:
.. code-block:: console
$ spack -e /opt/environment containerize
Note that the environment must be active for spack to read the template.
The recipe that gets generated contains the two extra instruction that we added in our template extension:
.. code-block:: Dockerfile

View File

@@ -310,11 +310,53 @@ Once all of the dependencies are installed, you can try building the documentati
$ make clean
$ make
If you see any warning or error messages, you will have to correct those before your PR
is accepted. If you are editing the documentation, you should be running the
documentation tests to make sure there are no errors. Documentation changes can result
in some obfuscated warning messages. If you don't understand what they mean, feel free
to ask when you submit your PR.
If you see any warning or error messages, you will have to correct those before
your PR is accepted.
If you are editing the documentation, you should obviously be running the
documentation tests. But even if you are simply adding a new package, your
changes could cause the documentation tests to fail:
.. code-block:: console
package_list.rst:8745: WARNING: Block quote ends without a blank line; unexpected unindent.
At first, this error message will mean nothing to you, since you didn't edit
that file. Until you look at line 8745 of the file in question:
.. code-block:: rst
Description:
NetCDF is a set of software libraries and self-describing, machine-
independent data formats that support the creation, access, and sharing
of array-oriented scientific data.
Our documentation includes :ref:`a list of all Spack packages <package-list>`.
If you add a new package, its docstring is added to this page. The problem in
this case was that the docstring looked like:
.. code-block:: python
class Netcdf(Package):
"""
NetCDF is a set of software libraries and self-describing,
machine-independent data formats that support the creation,
access, and sharing of array-oriented scientific data.
"""
Docstrings cannot start with a newline character, or else Sphinx will complain.
Instead, they should look like:
.. code-block:: python
class Netcdf(Package):
"""NetCDF is a set of software libraries and self-describing,
machine-independent data formats that support the creation,
access, and sharing of array-oriented scientific data."""
Documentation changes can result in much more obfuscated warning messages.
If you don't understand what they mean, feel free to ask when you submit
your PR.
--------
Coverage

Binary file not shown.

Before

Width:  |  Height:  |  Size: 296 KiB

File diff suppressed because it is too large Load Diff

Before

Width:  |  Height:  |  Size: 108 KiB

View File

@@ -1,534 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><!-- Generated by graphviz version 2.40.1 (20161225.0304)
--><!-- Title: G Pages: 1 --><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="3044pt" height="1683pt" viewBox="0.00 0.00 3043.65 1682.80">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 1678.8)">
<title>G</title>
<polygon fill="#ffffff" stroke="transparent" points="-4,4 -4,-1678.8 3039.6456,-1678.8 3039.6456,4 -4,4"/>
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn -->
<g id="node1" class="node">
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2407.965,-1198.3002C2407.965,-1198.3002 1948.1742,-1198.3002 1948.1742,-1198.3002 1942.1742,-1198.3002 1936.1742,-1192.3002 1936.1742,-1186.3002 1936.1742,-1186.3002 1936.1742,-1123.6998 1936.1742,-1123.6998 1936.1742,-1117.6998 1942.1742,-1111.6998 1948.1742,-1111.6998 1948.1742,-1111.6998 2407.965,-1111.6998 2407.965,-1111.6998 2413.965,-1111.6998 2419.965,-1117.6998 2419.965,-1123.6998 2419.965,-1123.6998 2419.965,-1186.3002 2419.965,-1186.3002 2419.965,-1192.3002 2413.965,-1198.3002 2407.965,-1198.3002"/>
<text text-anchor="middle" x="2178.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">netlib-scalapack@2.2.0%gcc@9.4.0/hkcrbrt</text>
</g>
<!-- o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="node8" class="node">
<title>o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M901.2032,-1039.5002C901.2032,-1039.5002 486.936,-1039.5002 486.936,-1039.5002 480.936,-1039.5002 474.936,-1033.5002 474.936,-1027.5002 474.936,-1027.5002 474.936,-964.8998 474.936,-964.8998 474.936,-958.8998 480.936,-952.8998 486.936,-952.8998 486.936,-952.8998 901.2032,-952.8998 901.2032,-952.8998 907.2032,-952.8998 913.2032,-958.8998 913.2032,-964.8998 913.2032,-964.8998 913.2032,-1027.5002 913.2032,-1027.5002 913.2032,-1033.5002 907.2032,-1039.5002 901.2032,-1039.5002"/>
<text text-anchor="middle" x="694.0696" y="-989" font-family="Monaco" font-size="24.00" fill="#000000">openblas@0.3.21%gcc@9.4.0/o524geb</text>
</g>
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn&#45;&gt;o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="edge10" class="edge">
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn-&gt;o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1936.1981,-1113.832C1933.0949,-1113.4088 1930.0059,-1112.9948 1926.9392,-1112.5915 1575.405,-1066.3348 1485.3504,-1074.0879 1131.9752,-1040.5955 1064.2267,-1034.1713 990.6114,-1026.9648 923.4066,-1020.2975"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1936.4684,-1111.8504C1933.3606,-1111.4265 1930.2716,-1111.0125 1927.2,-1110.6085 1575.2335,-1064.3422 1485.1789,-1072.0953 1132.164,-1038.6045 1064.4216,-1032.1808 990.8062,-1024.9744 923.604,-1018.3073"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="923.505,-1015.7853 913.2081,-1018.2801 922.8133,-1022.751 923.505,-1015.7853"/>
<text text-anchor="middle" x="1368.79" y="-1067.6346" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
</g>
<!-- 2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
<g id="node23" class="node">
<title>2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2767.3081,-1039.5002C2767.3081,-1039.5002 2166.8311,-1039.5002 2166.8311,-1039.5002 2160.8311,-1039.5002 2154.8311,-1033.5002 2154.8311,-1027.5002 2154.8311,-1027.5002 2154.8311,-964.8998 2154.8311,-964.8998 2154.8311,-958.8998 2160.8311,-952.8998 2166.8311,-952.8998 2166.8311,-952.8998 2767.3081,-952.8998 2767.3081,-952.8998 2773.3081,-952.8998 2779.3081,-958.8998 2779.3081,-964.8998 2779.3081,-964.8998 2779.3081,-1027.5002 2779.3081,-1027.5002 2779.3081,-1033.5002 2773.3081,-1039.5002 2767.3081,-1039.5002"/>
<text text-anchor="middle" x="2467.0696" y="-989" font-family="Monaco" font-size="24.00" fill="#000000">intel-parallel-studio@cluster.2020.4%gcc@9.4.0/2w3nq3n</text>
</g>
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn&#45;&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
<g id="edge29" class="edge">
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn-&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2256.5586,-1110.7308C2294.3103,-1089.9869 2339.6329,-1065.083 2378.4976,-1043.7276"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2257.5217,-1112.4836C2295.2735,-1091.7397 2340.5961,-1066.8358 2379.4607,-1045.4804"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2381.116,-1047.4235 2388.1946,-1039.5403 2377.745,-1041.2886 2381.116,-1047.4235"/>
<text text-anchor="middle" x="2286.6606" y="-1079.8414" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
</g>
<!-- gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="node27" class="node">
<title>gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1539.1928,-1039.5002C1539.1928,-1039.5002 1152.9464,-1039.5002 1152.9464,-1039.5002 1146.9464,-1039.5002 1140.9464,-1033.5002 1140.9464,-1027.5002 1140.9464,-1027.5002 1140.9464,-964.8998 1140.9464,-964.8998 1140.9464,-958.8998 1146.9464,-952.8998 1152.9464,-952.8998 1152.9464,-952.8998 1539.1928,-952.8998 1539.1928,-952.8998 1545.1928,-952.8998 1551.1928,-958.8998 1551.1928,-964.8998 1551.1928,-964.8998 1551.1928,-1027.5002 1551.1928,-1027.5002 1551.1928,-1033.5002 1545.1928,-1039.5002 1539.1928,-1039.5002"/>
<text text-anchor="middle" x="1346.0696" y="-989" font-family="Monaco" font-size="24.00" fill="#000000">cmake@3.25.1%gcc@9.4.0/gguve5i</text>
</g>
<!-- hkcrbrtf2qex6rvzuok5tzdrbam55pdn&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge17" class="edge">
<title>hkcrbrtf2qex6rvzuok5tzdrbam55pdn-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1950.9968,-1111.6597C1829.5529,-1088.4802 1680.8338,-1060.0949 1561.2457,-1037.2697"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.7091,-1033.795 1551.2303,-1035.3581 1560.3967,-1040.6709 1561.7091,-1033.795"/>
</g>
<!-- i4avrindvhcamhurzbfdaggbj2zgsrrh -->
<g id="node2" class="node">
<title>i4avrindvhcamhurzbfdaggbj2zgsrrh</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1536.3649,-86.7002C1536.3649,-86.7002 1155.7743,-86.7002 1155.7743,-86.7002 1149.7743,-86.7002 1143.7743,-80.7002 1143.7743,-74.7002 1143.7743,-74.7002 1143.7743,-12.0998 1143.7743,-12.0998 1143.7743,-6.0998 1149.7743,-.0998 1155.7743,-.0998 1155.7743,-.0998 1536.3649,-.0998 1536.3649,-.0998 1542.3649,-.0998 1548.3649,-6.0998 1548.3649,-12.0998 1548.3649,-12.0998 1548.3649,-74.7002 1548.3649,-74.7002 1548.3649,-80.7002 1542.3649,-86.7002 1536.3649,-86.7002"/>
<text text-anchor="middle" x="1346.0696" y="-36.2" font-family="Monaco" font-size="24.00" fill="#000000">pkgconf@1.8.0%gcc@9.4.0/i4avrin</text>
</g>
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl -->
<g id="node3" class="node">
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M849.3673,-721.9002C849.3673,-721.9002 480.7719,-721.9002 480.7719,-721.9002 474.7719,-721.9002 468.7719,-715.9002 468.7719,-709.9002 468.7719,-709.9002 468.7719,-647.2998 468.7719,-647.2998 468.7719,-641.2998 474.7719,-635.2998 480.7719,-635.2998 480.7719,-635.2998 849.3673,-635.2998 849.3673,-635.2998 855.3673,-635.2998 861.3673,-641.2998 861.3673,-647.2998 861.3673,-647.2998 861.3673,-709.9002 861.3673,-709.9002 861.3673,-715.9002 855.3673,-721.9002 849.3673,-721.9002"/>
<text text-anchor="middle" x="665.0696" y="-671.4" font-family="Monaco" font-size="24.00" fill="#000000">perl@5.36.0%gcc@9.4.0/ywrpvv2</text>
</g>
<!-- h3ujmb3ts4kxxxv77knh2knuystuerbx -->
<g id="node7" class="node">
<title>h3ujmb3ts4kxxxv77knh2knuystuerbx</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M392.4016,-563.1002C392.4016,-563.1002 19.7376,-563.1002 19.7376,-563.1002 13.7376,-563.1002 7.7376,-557.1002 7.7376,-551.1002 7.7376,-551.1002 7.7376,-488.4998 7.7376,-488.4998 7.7376,-482.4998 13.7376,-476.4998 19.7376,-476.4998 19.7376,-476.4998 392.4016,-476.4998 392.4016,-476.4998 398.4016,-476.4998 404.4016,-482.4998 404.4016,-488.4998 404.4016,-488.4998 404.4016,-551.1002 404.4016,-551.1002 404.4016,-557.1002 398.4016,-563.1002 392.4016,-563.1002"/>
<text text-anchor="middle" x="206.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">bzip2@1.0.8%gcc@9.4.0/h3ujmb3</text>
</g>
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl&#45;&gt;h3ujmb3ts4kxxxv77knh2knuystuerbx -->
<g id="edge9" class="edge">
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl-&gt;h3ujmb3ts4kxxxv77knh2knuystuerbx</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M539.3189,-636.1522C477.7157,-614.8394 403.4197,-589.1353 340.5959,-567.4002"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M539.9728,-634.2622C478.3696,-612.9494 404.0736,-587.2452 341.2498,-565.5101"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="341.9365,-563.1023 331.3417,-563.1403 339.6478,-569.7176 341.9365,-563.1023"/>
</g>
<!-- uabgssx6lsgrevwbttslldnr5nzguprj -->
<g id="node19" class="node">
<title>uabgssx6lsgrevwbttslldnr5nzguprj</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1298.2296,-563.1002C1298.2296,-563.1002 937.9096,-563.1002 937.9096,-563.1002 931.9096,-563.1002 925.9096,-557.1002 925.9096,-551.1002 925.9096,-551.1002 925.9096,-488.4998 925.9096,-488.4998 925.9096,-482.4998 931.9096,-476.4998 937.9096,-476.4998 937.9096,-476.4998 1298.2296,-476.4998 1298.2296,-476.4998 1304.2296,-476.4998 1310.2296,-482.4998 1310.2296,-488.4998 1310.2296,-488.4998 1310.2296,-551.1002 1310.2296,-551.1002 1310.2296,-557.1002 1304.2296,-563.1002 1298.2296,-563.1002"/>
<text text-anchor="middle" x="1118.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">gdbm@1.23%gcc@9.4.0/uabgssx</text>
</g>
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl&#45;&gt;uabgssx6lsgrevwbttslldnr5nzguprj -->
<g id="edge44" class="edge">
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl-&gt;uabgssx6lsgrevwbttslldnr5nzguprj</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M788.523,-634.2635C849.3209,-612.9507 922.6457,-587.2465 984.6483,-565.5114"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M789.1847,-636.1509C849.9825,-614.8381 923.3073,-589.1339 985.3099,-567.3988"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="986.1559,-569.7515 994.435,-563.1403 983.8402,-563.1456 986.1559,-569.7515"/>
</g>
<!-- gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb -->
<g id="node20" class="node">
<title>gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M896.1744,-563.1002C896.1744,-563.1002 433.9648,-563.1002 433.9648,-563.1002 427.9648,-563.1002 421.9648,-557.1002 421.9648,-551.1002 421.9648,-551.1002 421.9648,-488.4998 421.9648,-488.4998 421.9648,-482.4998 427.9648,-476.4998 433.9648,-476.4998 433.9648,-476.4998 896.1744,-476.4998 896.1744,-476.4998 902.1744,-476.4998 908.1744,-482.4998 908.1744,-488.4998 908.1744,-488.4998 908.1744,-551.1002 908.1744,-551.1002 908.1744,-557.1002 902.1744,-563.1002 896.1744,-563.1002"/>
<text text-anchor="middle" x="665.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">berkeley-db@18.1.40%gcc@9.4.0/gkw4dg2</text>
</g>
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl&#45;&gt;gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb -->
<g id="edge23" class="edge">
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl-&gt;gkw4dg2p7rdnhru3m6lcnsjbzyr7g3hb</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M664.0696,-635.2072C664.0696,-616.1263 664.0696,-593.5257 664.0696,-573.4046"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M666.0696,-635.2072C666.0696,-616.1263 666.0696,-593.5257 666.0696,-573.4046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="668.5697,-573.1403 665.0696,-563.1403 661.5697,-573.1404 668.5697,-573.1403"/>
</g>
<!-- nizxi5u5bbrzhzwfy2qb7hatlhuswlrz -->
<g id="node24" class="node">
<title>nizxi5u5bbrzhzwfy2qb7hatlhuswlrz</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2195.2248,-563.1002C2195.2248,-563.1002 1840.9144,-563.1002 1840.9144,-563.1002 1834.9144,-563.1002 1828.9144,-557.1002 1828.9144,-551.1002 1828.9144,-551.1002 1828.9144,-488.4998 1828.9144,-488.4998 1828.9144,-482.4998 1834.9144,-476.4998 1840.9144,-476.4998 1840.9144,-476.4998 2195.2248,-476.4998 2195.2248,-476.4998 2201.2248,-476.4998 2207.2248,-482.4998 2207.2248,-488.4998 2207.2248,-488.4998 2207.2248,-551.1002 2207.2248,-551.1002 2207.2248,-557.1002 2201.2248,-563.1002 2195.2248,-563.1002"/>
<text text-anchor="middle" x="2018.0696" y="-512.6" font-family="Monaco" font-size="24.00" fill="#000000">zlib@1.2.13%gcc@9.4.0/nizxi5u</text>
</g>
<!-- ywrpvv2hgooeepdke33exkqrtdpd5gkl&#45;&gt;nizxi5u5bbrzhzwfy2qb7hatlhuswlrz -->
<g id="edge4" class="edge">
<title>ywrpvv2hgooeepdke33exkqrtdpd5gkl-&gt;nizxi5u5bbrzhzwfy2qb7hatlhuswlrz</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M861.3292,-654.5584C1116.9929,-624.5514 1561.4447,-572.3867 1818.5758,-542.2075"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M861.5624,-656.5447C1117.2261,-626.5378 1561.6778,-574.373 1818.8089,-544.1939"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1819.373,-546.6449 1828.8968,-542.003 1818.5569,-539.6926 1819.373,-546.6449"/>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id -->
<g id="node4" class="node">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2383.212,-1674.7002C2383.212,-1674.7002 1972.9272,-1674.7002 1972.9272,-1674.7002 1966.9272,-1674.7002 1960.9272,-1668.7002 1960.9272,-1662.7002 1960.9272,-1662.7002 1960.9272,-1600.0998 1960.9272,-1600.0998 1960.9272,-1594.0998 1966.9272,-1588.0998 1972.9272,-1588.0998 1972.9272,-1588.0998 2383.212,-1588.0998 2383.212,-1588.0998 2389.212,-1588.0998 2395.212,-1594.0998 2395.212,-1600.0998 2395.212,-1600.0998 2395.212,-1662.7002 2395.212,-1662.7002 2395.212,-1668.7002 2389.212,-1674.7002 2383.212,-1674.7002"/>
<text text-anchor="middle" x="2178.0696" y="-1624.2" font-family="Monaco" font-size="24.00" fill="#000000">strumpack@7.0.1%gcc@9.4.0/idvshq5</text>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;hkcrbrtf2qex6rvzuok5tzdrbam55pdn -->
<g id="edge33" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;hkcrbrtf2qex6rvzuok5tzdrbam55pdn</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2177.0696,-1587.8598C2177.0696,-1500.5185 2177.0696,-1304.1624 2177.0696,-1208.8885"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2179.0696,-1587.8598C2179.0696,-1500.5185 2179.0696,-1304.1624 2179.0696,-1208.8885"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2181.5697,-1208.611 2178.0696,-1198.611 2174.5697,-1208.611 2181.5697,-1208.611"/>
<text text-anchor="middle" x="2125.9224" y="-1397.5399" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=scalapack</text>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="edge8" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1960.6199,-1629.1097C1600.5855,-1621.4505 897.1143,-1596.5054 662.748,-1516.9469 459.8544,-1447.9506 281.1117,-1289.236 401.2427,-1111.0377 418.213,-1086.3492 472.759,-1062.01 530.3793,-1041.9698"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1960.6625,-1627.1101C1600.6564,-1619.4517 897.1852,-1594.5067 663.3912,-1515.0531 461.1823,-1446.4551 282.4397,-1287.7405 402.8965,-1112.1623 419.028,-1088.1757 473.574,-1063.8364 531.0362,-1043.8589"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="532.0142,-1046.1665 540.3395,-1039.6137 529.7449,-1039.5445 532.0142,-1046.1665"/>
<text text-anchor="middle" x="1175.5163" y="-1600.8866" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
</g>
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh -->
<g id="node12" class="node">
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M3003.3872,-1357.1002C3003.3872,-1357.1002 2606.752,-1357.1002 2606.752,-1357.1002 2600.752,-1357.1002 2594.752,-1351.1002 2594.752,-1345.1002 2594.752,-1345.1002 2594.752,-1282.4998 2594.752,-1282.4998 2594.752,-1276.4998 2600.752,-1270.4998 2606.752,-1270.4998 2606.752,-1270.4998 3003.3872,-1270.4998 3003.3872,-1270.4998 3009.3872,-1270.4998 3015.3872,-1276.4998 3015.3872,-1282.4998 3015.3872,-1282.4998 3015.3872,-1345.1002 3015.3872,-1345.1002 3015.3872,-1351.1002 3009.3872,-1357.1002 3003.3872,-1357.1002"/>
<text text-anchor="middle" x="2805.0696" y="-1306.6" font-family="Monaco" font-size="24.00" fill="#000000">parmetis@4.0.3%gcc@9.4.0/imopnxj</text>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;imopnxjmv7cwzyiecdw2saq42qvpnauh -->
<g id="edge51" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;imopnxjmv7cwzyiecdw2saq42qvpnauh</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2393.6993,-1587.0809C2455.3565,-1569.7539 2521.1771,-1546.2699 2577.5864,-1515.1245 2649.1588,-1475.6656 2717.4141,-1409.6691 2759.9512,-1363.9364"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2394.2404,-1589.0062C2456.0286,-1571.6376 2521.8491,-1548.1536 2578.5528,-1516.8755 2650.5491,-1477.1034 2718.8043,-1411.107 2761.4156,-1365.2986"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2763.3454,-1366.8938 2767.5512,-1357.1695 2758.1992,-1362.1485 2763.3454,-1366.8938"/>
</g>
<!-- ern66gyp6qmhmpod4jaynxx4weoberfm -->
<g id="node13" class="node">
<title>ern66gyp6qmhmpod4jaynxx4weoberfm</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M2928.3784,-1198.3002C2928.3784,-1198.3002 2563.7608,-1198.3002 2563.7608,-1198.3002 2557.7608,-1198.3002 2551.7608,-1192.3002 2551.7608,-1186.3002 2551.7608,-1186.3002 2551.7608,-1123.6998 2551.7608,-1123.6998 2551.7608,-1117.6998 2557.7608,-1111.6998 2563.7608,-1111.6998 2563.7608,-1111.6998 2928.3784,-1111.6998 2928.3784,-1111.6998 2934.3784,-1111.6998 2940.3784,-1117.6998 2940.3784,-1123.6998 2940.3784,-1123.6998 2940.3784,-1186.3002 2940.3784,-1186.3002 2940.3784,-1192.3002 2934.3784,-1198.3002 2928.3784,-1198.3002"/>
<text text-anchor="middle" x="2746.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">metis@5.1.0%gcc@9.4.0/ern66gy</text>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;ern66gyp6qmhmpod4jaynxx4weoberfm -->
<g id="edge25" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;ern66gyp6qmhmpod4jaynxx4weoberfm</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2371.6269,-1587.103C2443.5875,-1567.249 2513.691,-1542.0963 2537.3223,-1515.3355 2611.3482,-1433.6645 2525.4748,-1364.8484 2585.2274,-1269.8608 2602.2478,-1243.3473 2627.3929,-1221.1402 2652.8797,-1203.3777"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2372.1589,-1589.0309C2444.2629,-1569.1315 2514.3664,-1543.9788 2538.8169,-1516.6645 2612.5989,-1432.1038 2526.7255,-1363.2878 2586.9118,-1270.9392 2603.5717,-1244.8464 2628.7168,-1222.6393 2654.0229,-1205.0188"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2655.7411,-1206.8749 2662.0621,-1198.3722 2651.8184,-1201.0773 2655.7411,-1206.8749"/>
</g>
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf -->
<g id="node14" class="node">
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1964.017,-1357.1002C1964.017,-1357.1002 1532.1222,-1357.1002 1532.1222,-1357.1002 1526.1222,-1357.1002 1520.1222,-1351.1002 1520.1222,-1345.1002 1520.1222,-1345.1002 1520.1222,-1282.4998 1520.1222,-1282.4998 1520.1222,-1276.4998 1526.1222,-1270.4998 1532.1222,-1270.4998 1532.1222,-1270.4998 1964.017,-1270.4998 1964.017,-1270.4998 1970.017,-1270.4998 1976.017,-1276.4998 1976.017,-1282.4998 1976.017,-1282.4998 1976.017,-1345.1002 1976.017,-1345.1002 1976.017,-1351.1002 1970.017,-1357.1002 1964.017,-1357.1002"/>
<text text-anchor="middle" x="1748.0696" y="-1306.6" font-family="Monaco" font-size="24.00" fill="#000000">butterflypack@2.2.2%gcc@9.4.0/nqiyrxl</text>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;nqiyrxlid6tikfpvoqdpvsjt5drs2obf -->
<g id="edge26" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;nqiyrxlid6tikfpvoqdpvsjt5drs2obf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2118.5874,-1588.7094C2039.1194,-1530.0139 1897.9154,-1425.72 1814.4793,-1364.0937"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2119.7757,-1587.1006C2040.3076,-1528.4052 1899.1036,-1424.1112 1815.6675,-1362.485"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1817.0581,-1360.404 1806.9348,-1357.2781 1812.8992,-1366.0347 1817.0581,-1360.404"/>
</g>
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu -->
<g id="node16" class="node">
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1106.2192,-1515.9002C1106.2192,-1515.9002 683.92,-1515.9002 683.92,-1515.9002 677.92,-1515.9002 671.92,-1509.9002 671.92,-1503.9002 671.92,-1503.9002 671.92,-1441.2998 671.92,-1441.2998 671.92,-1435.2998 677.92,-1429.2998 683.92,-1429.2998 683.92,-1429.2998 1106.2192,-1429.2998 1106.2192,-1429.2998 1112.2192,-1429.2998 1118.2192,-1435.2998 1118.2192,-1441.2998 1118.2192,-1441.2998 1118.2192,-1503.9002 1118.2192,-1503.9002 1118.2192,-1509.9002 1112.2192,-1515.9002 1106.2192,-1515.9002"/>
<text text-anchor="middle" x="895.0696" y="-1465.4" font-family="Monaco" font-size="24.00" fill="#000000">slate@2022.07.00%gcc@9.4.0/4bu62ky</text>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;4bu62kyfuh4ikdkuyxfxjxanf7e7qopu -->
<g id="edge5" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;4bu62kyfuh4ikdkuyxfxjxanf7e7qopu</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1960.6663,-1605.4991C1729.5518,-1576.8935 1365.2868,-1531.8075 1128.237,-1502.4673"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1960.912,-1603.5143C1729.7975,-1574.9086 1365.5325,-1529.8227 1128.4827,-1500.4825"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1128.5789,-1497.9754 1118.2247,-1500.2204 1127.719,-1504.9224 1128.5789,-1497.9754"/>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
<g id="edge20" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2395.1113,-1591.5061C2621.5772,-1545.7968 2953.3457,-1462.5053 3023.2362,-1356.6473 3049.986,-1316.785 3021.2047,-1131.5143 3003.3326,-1112.2759 2971.8969,-1077.7826 2884.3944,-1052.6467 2789.1441,-1034.9179"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2395.507,-1593.4665C2622.0642,-1547.7366 2953.8327,-1464.4452 3024.903,-1357.7527 3051.9623,-1316.478 3023.181,-1131.2073 3004.8066,-1110.9241 2972.4491,-1075.8603 2884.9466,-1050.7244 2789.5102,-1032.9517"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2789.9449,-1030.4898 2779.4781,-1032.132 2788.6845,-1037.3754 2789.9449,-1030.4898"/>
<text text-anchor="middle" x="2611.7445" y="-1537.8321" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
</g>
<!-- 7rzbmgoxhmm2jhellkgcjmn62uklf22x -->
<g id="node25" class="node">
<title>7rzbmgoxhmm2jhellkgcjmn62uklf22x</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1749.1952,-1515.9002C1749.1952,-1515.9002 1398.944,-1515.9002 1398.944,-1515.9002 1392.944,-1515.9002 1386.944,-1509.9002 1386.944,-1503.9002 1386.944,-1503.9002 1386.944,-1441.2998 1386.944,-1441.2998 1386.944,-1435.2998 1392.944,-1429.2998 1398.944,-1429.2998 1398.944,-1429.2998 1749.1952,-1429.2998 1749.1952,-1429.2998 1755.1952,-1429.2998 1761.1952,-1435.2998 1761.1952,-1441.2998 1761.1952,-1441.2998 1761.1952,-1503.9002 1761.1952,-1503.9002 1761.1952,-1509.9002 1755.1952,-1515.9002 1749.1952,-1515.9002"/>
<text text-anchor="middle" x="1574.0696" y="-1465.4" font-family="Monaco" font-size="24.00" fill="#000000">zfp@0.5.5%gcc@9.4.0/7rzbmgo</text>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;7rzbmgoxhmm2jhellkgcjmn62uklf22x -->
<g id="edge36" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;7rzbmgoxhmm2jhellkgcjmn62uklf22x</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2012.7697,-1588.9743C1930.7903,-1567.4208 1831.729,-1541.3762 1748.4742,-1519.4874"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2013.2782,-1587.0401C1931.2989,-1565.4866 1832.2376,-1539.442 1748.9827,-1517.5531"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1749.477,-1515.0982 1738.9157,-1515.9403 1747.697,-1521.8681 1749.477,-1515.0982"/>
</g>
<!-- idvshq5nqmygzd4uo62mdispwgxsw7id&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge3" class="edge">
<title>idvshq5nqmygzd4uo62mdispwgxsw7id-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2229.2864,-1587.9836C2336.2076,-1492.3172 2562.5717,-1260.0833 2429.0696,-1111.6 2372.2327,-1048.3851 1860.8259,-1017.0375 1561.5401,-1003.9799"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.5673,-1000.4779 1551.4253,-1003.5421 1561.2645,-1007.4714 1561.5673,-1000.4779"/>
</g>
<!-- mujlx42xgttdc6u6rmiftsktpsrcmpbs -->
<g id="node5" class="node">
<title>mujlx42xgttdc6u6rmiftsktpsrcmpbs</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M912.4048,-1198.3002C912.4048,-1198.3002 475.7344,-1198.3002 475.7344,-1198.3002 469.7344,-1198.3002 463.7344,-1192.3002 463.7344,-1186.3002 463.7344,-1186.3002 463.7344,-1123.6998 463.7344,-1123.6998 463.7344,-1117.6998 469.7344,-1111.6998 475.7344,-1111.6998 475.7344,-1111.6998 912.4048,-1111.6998 912.4048,-1111.6998 918.4048,-1111.6998 924.4048,-1117.6998 924.4048,-1123.6998 924.4048,-1123.6998 924.4048,-1186.3002 924.4048,-1186.3002 924.4048,-1192.3002 918.4048,-1198.3002 912.4048,-1198.3002"/>
<text text-anchor="middle" x="694.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">blaspp@2022.07.00%gcc@9.4.0/mujlx42</text>
</g>
<!-- mujlx42xgttdc6u6rmiftsktpsrcmpbs&#45;&gt;o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="edge16" class="edge">
<title>mujlx42xgttdc6u6rmiftsktpsrcmpbs-&gt;o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M693.0696,-1111.6072C693.0696,-1092.5263 693.0696,-1069.9257 693.0696,-1049.8046"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M695.0696,-1111.6072C695.0696,-1092.5263 695.0696,-1069.9257 695.0696,-1049.8046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="697.5697,-1049.5403 694.0696,-1039.5403 690.5697,-1049.5404 697.5697,-1049.5403"/>
<text text-anchor="middle" x="657.8516" y="-1079.8482" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas</text>
</g>
<!-- mujlx42xgttdc6u6rmiftsktpsrcmpbs&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge28" class="edge">
<title>mujlx42xgttdc6u6rmiftsktpsrcmpbs-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M872.2315,-1111.6072C960.9952,-1089.988 1068.311,-1063.8504 1158.3512,-1041.9204"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1159.2354,-1045.3074 1168.1232,-1039.5403 1157.5789,-1038.5062 1159.2354,-1045.3074"/>
</g>
<!-- htzjns66gmq6pjofohp26djmjnpbegho -->
<g id="node6" class="node">
<title>htzjns66gmq6pjofohp26djmjnpbegho</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M2663.3553,-880.7002C2663.3553,-880.7002 2270.7839,-880.7002 2270.7839,-880.7002 2264.7839,-880.7002 2258.7839,-874.7002 2258.7839,-868.7002 2258.7839,-868.7002 2258.7839,-806.0998 2258.7839,-806.0998 2258.7839,-800.0998 2264.7839,-794.0998 2270.7839,-794.0998 2270.7839,-794.0998 2663.3553,-794.0998 2663.3553,-794.0998 2669.3553,-794.0998 2675.3553,-800.0998 2675.3553,-806.0998 2675.3553,-806.0998 2675.3553,-868.7002 2675.3553,-868.7002 2675.3553,-874.7002 2669.3553,-880.7002 2663.3553,-880.7002"/>
<text text-anchor="middle" x="2467.0696" y="-830.2" font-family="Monaco" font-size="24.00" fill="#000000">patchelf@0.16.1%gcc@9.4.0/htzjns6</text>
</g>
<!-- xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6 -->
<g id="node15" class="node">
<title>xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M394.2232,-404.3002C394.2232,-404.3002 17.916,-404.3002 17.916,-404.3002 11.916,-404.3002 5.916,-398.3002 5.916,-392.3002 5.916,-392.3002 5.916,-329.6998 5.916,-329.6998 5.916,-323.6998 11.916,-317.6998 17.916,-317.6998 17.916,-317.6998 394.2232,-317.6998 394.2232,-317.6998 400.2232,-317.6998 406.2232,-323.6998 406.2232,-329.6998 406.2232,-329.6998 406.2232,-392.3002 406.2232,-392.3002 406.2232,-398.3002 400.2232,-404.3002 394.2232,-404.3002"/>
<text text-anchor="middle" x="206.0696" y="-353.8" font-family="Monaco" font-size="24.00" fill="#000000">diffutils@3.8%gcc@9.4.0/xm3ldz3</text>
</g>
<!-- h3ujmb3ts4kxxxv77knh2knuystuerbx&#45;&gt;xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6 -->
<g id="edge1" class="edge">
<title>h3ujmb3ts4kxxxv77knh2knuystuerbx-&gt;xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M206.0696,-476.4072C206.0696,-457.3263 206.0696,-434.7257 206.0696,-414.6046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="209.5697,-414.3403 206.0696,-404.3403 202.5697,-414.3404 209.5697,-414.3403"/>
</g>
<!-- o524gebsxavobkte3k5fglgwnedfkadf&#45;&gt;ywrpvv2hgooeepdke33exkqrtdpd5gkl -->
<g id="edge11" class="edge">
<title>o524gebsxavobkte3k5fglgwnedfkadf-&gt;ywrpvv2hgooeepdke33exkqrtdpd5gkl</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M690.0981,-952.705C684.8522,-895.2533 675.6173,-794.1153 669.9514,-732.0637"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="673.4345,-731.7184 669.0396,-722.0781 666.4635,-732.355 673.4345,-731.7184"/>
</g>
<!-- 4vsmjofkhntilgzh4zebluqak5mdsu3x -->
<g id="node9" class="node">
<title>4vsmjofkhntilgzh4zebluqak5mdsu3x</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1977.9121,-721.9002C1977.9121,-721.9002 1386.2271,-721.9002 1386.2271,-721.9002 1380.2271,-721.9002 1374.2271,-715.9002 1374.2271,-709.9002 1374.2271,-709.9002 1374.2271,-647.2998 1374.2271,-647.2998 1374.2271,-641.2998 1380.2271,-635.2998 1386.2271,-635.2998 1386.2271,-635.2998 1977.9121,-635.2998 1977.9121,-635.2998 1983.9121,-635.2998 1989.9121,-641.2998 1989.9121,-647.2998 1989.9121,-647.2998 1989.9121,-709.9002 1989.9121,-709.9002 1989.9121,-715.9002 1983.9121,-721.9002 1977.9121,-721.9002"/>
<text text-anchor="middle" x="1682.0696" y="-671.4" font-family="Monaco" font-size="24.00" fill="#000000">ca-certificates-mozilla@2023-01-10%gcc@9.4.0/4vsmjof</text>
</g>
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow -->
<g id="node10" class="node">
<title>xiro2z6na56qdd4czjhj54eag3ekbiow</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M988.1824,-1357.1002C988.1824,-1357.1002 533.9568,-1357.1002 533.9568,-1357.1002 527.9568,-1357.1002 521.9568,-1351.1002 521.9568,-1345.1002 521.9568,-1345.1002 521.9568,-1282.4998 521.9568,-1282.4998 521.9568,-1276.4998 527.9568,-1270.4998 533.9568,-1270.4998 533.9568,-1270.4998 988.1824,-1270.4998 988.1824,-1270.4998 994.1824,-1270.4998 1000.1824,-1276.4998 1000.1824,-1282.4998 1000.1824,-1282.4998 1000.1824,-1345.1002 1000.1824,-1345.1002 1000.1824,-1351.1002 994.1824,-1357.1002 988.1824,-1357.1002"/>
<text text-anchor="middle" x="761.0696" y="-1306.6" font-family="Monaco" font-size="24.00" fill="#000000">lapackpp@2022.07.00%gcc@9.4.0/xiro2z6</text>
</g>
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow&#45;&gt;mujlx42xgttdc6u6rmiftsktpsrcmpbs -->
<g id="edge37" class="edge">
<title>xiro2z6na56qdd4czjhj54eag3ekbiow-&gt;mujlx42xgttdc6u6rmiftsktpsrcmpbs</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M741.8402,-1270.7959C733.6789,-1251.4525 723.9915,-1228.4917 715.4149,-1208.1641"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M743.6829,-1270.0185C735.5216,-1250.675 725.8342,-1227.7143 717.2576,-1207.3866"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="719.4676,-1206.1933 712.3555,-1198.3403 713.0181,-1208.9144 719.4676,-1206.1933"/>
</g>
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow&#45;&gt;o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="edge35" class="edge">
<title>xiro2z6na56qdd4czjhj54eag3ekbiow-&gt;o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M597.2326,-1271.3826C534.1471,-1251.0571 472.8527,-1225.5904 454.2471,-1198.9688 432.1275,-1166.6075 433.5639,-1144.2113 454.2226,-1111.0684 472.6194,-1081.8657 500.3255,-1060.004 530.6572,-1043.4601"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M597.8458,-1269.4789C534.9144,-1249.2102 473.6201,-1223.7435 455.8921,-1197.8312 434.1234,-1166.7355 435.5598,-1144.3393 455.9166,-1112.1316 473.8583,-1083.4358 501.5644,-1061.5741 531.6142,-1045.2163"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="532.9062,-1047.362 540.1422,-1039.6231 529.6595,-1041.1605 532.9062,-1047.362"/>
<text text-anchor="middle" x="474.3109" y="-1250.2598" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
</g>
<!-- xiro2z6na56qdd4czjhj54eag3ekbiow&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge45" class="edge">
<title>xiro2z6na56qdd4czjhj54eag3ekbiow-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M833.5823,-1270.3956C865.3249,-1250.0918 902.2709,-1224.6296 933.0696,-1198.4 973.2414,-1164.1878 969.8532,-1140.395 1014.0696,-1111.6 1058.5051,-1082.6623 1111.0286,-1060.0733 1161.029,-1042.8573"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1162.313,-1046.1177 1170.6621,-1039.5953 1160.0678,-1039.4876 1162.313,-1046.1177"/>
</g>
<!-- j5rupoqliu7kasm6xndl7ui32wgawkru -->
<g id="node11" class="node">
<title>j5rupoqliu7kasm6xndl7ui32wgawkru</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1527.3625,-245.5002C1527.3625,-245.5002 1164.7767,-245.5002 1164.7767,-245.5002 1158.7767,-245.5002 1152.7767,-239.5002 1152.7767,-233.5002 1152.7767,-233.5002 1152.7767,-170.8998 1152.7767,-170.8998 1152.7767,-164.8998 1158.7767,-158.8998 1164.7767,-158.8998 1164.7767,-158.8998 1527.3625,-158.8998 1527.3625,-158.8998 1533.3625,-158.8998 1539.3625,-164.8998 1539.3625,-170.8998 1539.3625,-170.8998 1539.3625,-233.5002 1539.3625,-233.5002 1539.3625,-239.5002 1533.3625,-245.5002 1527.3625,-245.5002"/>
<text text-anchor="middle" x="1346.0696" y="-195" font-family="Monaco" font-size="24.00" fill="#000000">ncurses@6.4%gcc@9.4.0/j5rupoq</text>
</g>
<!-- j5rupoqliu7kasm6xndl7ui32wgawkru&#45;&gt;i4avrindvhcamhurzbfdaggbj2zgsrrh -->
<g id="edge15" class="edge">
<title>j5rupoqliu7kasm6xndl7ui32wgawkru-&gt;i4avrindvhcamhurzbfdaggbj2zgsrrh</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1346.0696,-158.8072C1346.0696,-139.7263 1346.0696,-117.1257 1346.0696,-97.0046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1349.5697,-96.7403 1346.0696,-86.7403 1342.5697,-96.7404 1349.5697,-96.7403"/>
<text text-anchor="middle" x="1292.7436" y="-127.0482" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=pkgconfig</text>
</g>
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh&#45;&gt;ern66gyp6qmhmpod4jaynxx4weoberfm -->
<g id="edge19" class="edge">
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh-&gt;ern66gyp6qmhmpod4jaynxx4weoberfm</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2788.0102,-1270.7555C2780.8234,-1251.412 2772.2926,-1228.4513 2764.7402,-1208.1236"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2789.885,-1270.0589C2782.6982,-1250.7155 2774.1674,-1227.7547 2766.615,-1207.4271"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2768.9358,-1206.4953 2762.1721,-1198.3403 2762.3741,-1208.9332 2768.9358,-1206.4953"/>
</g>
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh&#45;&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
<g id="edge12" class="edge">
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh-&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2907.2846,-1269.5018C2936.475,-1251.8137 2964.9158,-1228.1116 2981.1904,-1197.9236 2999.477,-1164.2363 3005.2125,-1141.4693 2981.289,-1112.225 2954.5472,-1078.5579 2876.5297,-1053.8974 2789.2983,-1036.3535"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M2908.3216,-1271.2119C2937.7554,-1253.3501 2966.1962,-1229.648 2982.9488,-1198.8764 3001.4164,-1164.7249 3007.1519,-1141.9579 2982.8502,-1110.975 2955.15,-1076.6509 2877.1325,-1051.9904 2789.6927,-1034.3928"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2790.125,-1031.93 2779.6364,-1033.4269 2788.7692,-1038.7974 2790.125,-1031.93"/>
<text text-anchor="middle" x="2836.0561" y="-1059.5023" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
</g>
<!-- imopnxjmv7cwzyiecdw2saq42qvpnauh&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge49" class="edge">
<title>imopnxjmv7cwzyiecdw2saq42qvpnauh-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2883.731,-1270.4691C2909.4451,-1251.9243 2934.9956,-1227.7144 2949.0696,-1198.4 2965.7663,-1163.6227 2975.3506,-1139.841 2949.0696,-1111.6 2925.7161,-1086.5049 1993.0368,-1031.9055 1561.3071,-1007.9103"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.3813,-1004.4092 1551.2026,-1007.3492 1560.9931,-1011.3984 1561.3813,-1004.4092"/>
</g>
<!-- ern66gyp6qmhmpod4jaynxx4weoberfm&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge50" class="edge">
<title>ern66gyp6qmhmpod4jaynxx4weoberfm-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2551.6031,-1113.7387C2547.0531,-1112.9948 2542.537,-1112.2802 2538.0696,-1111.6 2198.5338,-1059.8997 1800.8632,-1026.8711 1561.4583,-1009.9443"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1561.4619,-1006.436 1551.2407,-1009.2249 1560.9702,-1013.4187 1561.4619,-1006.436"/>
</g>
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf&#45;&gt;hkcrbrtf2qex6rvzuok5tzdrbam55pdn -->
<g id="edge34" class="edge">
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf-&gt;hkcrbrtf2qex6rvzuok5tzdrbam55pdn</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1865.2226,-1269.4691C1922.6966,-1248.2438 1991.964,-1222.6632 2050.6644,-1200.985"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1865.9154,-1271.3453C1923.3894,-1250.12 1992.6569,-1224.5394 2051.3572,-1202.8612"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2052.5441,-1205.088 2060.7123,-1198.3403 2050.119,-1198.5215 2052.5441,-1205.088"/>
<text text-anchor="middle" x="1910.9073" y="-1238.6056" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=scalapack</text>
</g>
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf&#45;&gt;o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="edge52" class="edge">
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf-&gt;o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1519.9696,-1290.6844C1394.6018,-1273.3057 1237.6631,-1244.7294 1102.7507,-1199.3478 1021.8138,-1171.8729 1008.1992,-1149.8608 932.6248,-1112.4956 887.1715,-1089.9216 836.578,-1065.4054 793.6914,-1044.8018"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1520.2442,-1288.7034C1394.9601,-1271.3381 1238.0214,-1242.7618 1103.3885,-1197.4522 1023.5148,-1170.8208 1009.9002,-1148.8087 933.5144,-1110.7044 888.0436,-1088.1218 837.4502,-1063.6056 794.5574,-1042.999"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="795.6235,-1040.7377 785.0938,-1039.565 792.5939,-1047.0482 795.6235,-1040.7377"/>
<text text-anchor="middle" x="1046.8307" y="-1202.5988" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
</g>
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef -->
<g id="node21" class="node">
<title>lfh3aovn65e66cs24qiehq3nd2ddojef</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1547.9922,-1198.3002C1547.9922,-1198.3002 1144.147,-1198.3002 1144.147,-1198.3002 1138.147,-1198.3002 1132.147,-1192.3002 1132.147,-1186.3002 1132.147,-1186.3002 1132.147,-1123.6998 1132.147,-1123.6998 1132.147,-1117.6998 1138.147,-1111.6998 1144.147,-1111.6998 1144.147,-1111.6998 1547.9922,-1111.6998 1547.9922,-1111.6998 1553.9922,-1111.6998 1559.9922,-1117.6998 1559.9922,-1123.6998 1559.9922,-1123.6998 1559.9922,-1186.3002 1559.9922,-1186.3002 1559.9922,-1192.3002 1553.9922,-1198.3002 1547.9922,-1198.3002"/>
<text text-anchor="middle" x="1346.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">arpack-ng@3.8.0%gcc@9.4.0/lfh3aov</text>
</g>
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf&#45;&gt;lfh3aovn65e66cs24qiehq3nd2ddojef -->
<g id="edge46" class="edge">
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf-&gt;lfh3aovn65e66cs24qiehq3nd2ddojef</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1637.8539,-1271.3373C1584.2332,-1250.1557 1519.6324,-1224.6368 1464.827,-1202.9873"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1638.5887,-1269.4771C1584.968,-1248.2956 1520.3672,-1222.7767 1465.5618,-1201.1272"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1466.3716,-1198.7592 1455.785,-1198.3403 1463.7998,-1205.2696 1466.3716,-1198.7592"/>
</g>
<!-- 57joith2sqq6sehge54vlloyolm36mdu -->
<g id="node22" class="node">
<title>57joith2sqq6sehge54vlloyolm36mdu</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1906.2352,-1198.3002C1906.2352,-1198.3002 1589.904,-1198.3002 1589.904,-1198.3002 1583.904,-1198.3002 1577.904,-1192.3002 1577.904,-1186.3002 1577.904,-1186.3002 1577.904,-1123.6998 1577.904,-1123.6998 1577.904,-1117.6998 1583.904,-1111.6998 1589.904,-1111.6998 1589.904,-1111.6998 1906.2352,-1111.6998 1906.2352,-1111.6998 1912.2352,-1111.6998 1918.2352,-1117.6998 1918.2352,-1123.6998 1918.2352,-1123.6998 1918.2352,-1186.3002 1918.2352,-1186.3002 1918.2352,-1192.3002 1912.2352,-1198.3002 1906.2352,-1198.3002"/>
<text text-anchor="middle" x="1748.0696" y="-1147.8" font-family="Monaco" font-size="24.00" fill="#000000">sed@4.8%gcc@9.4.0/57joith</text>
</g>
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf&#45;&gt;57joith2sqq6sehge54vlloyolm36mdu -->
<g id="edge27" class="edge">
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf-&gt;57joith2sqq6sehge54vlloyolm36mdu</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1748.0696,-1270.4072C1748.0696,-1251.3263 1748.0696,-1228.7257 1748.0696,-1208.6046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1751.5697,-1208.3403 1748.0696,-1198.3403 1744.5697,-1208.3404 1751.5697,-1208.3403"/>
</g>
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf&#45;&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
<g id="edge24" class="edge">
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf-&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1975.9734,-1301.684C2148.2819,-1288.3961 2365.6859,-1259.5384 2428.3689,-1197.6866 2466.9261,-1160.1438 2472.9783,-1095.7153 2471.5152,-1049.9701"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1976.1272,-1303.678C2148.5451,-1290.3788 2365.949,-1261.521 2429.7703,-1199.1134 2468.9173,-1160.3309 2474.9695,-1095.9024 2473.5142,-1049.9065"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2476.0078,-1049.7027 2472.0657,-1039.8686 2469.0147,-1050.0146 2476.0078,-1049.7027"/>
<text text-anchor="middle" x="2207.8884" y="-1273.0053" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
</g>
<!-- nqiyrxlid6tikfpvoqdpvsjt5drs2obf&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge6" class="edge">
<title>nqiyrxlid6tikfpvoqdpvsjt5drs2obf-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1520.1614,-1301.6771C1362.9712,-1287.992 1173.582,-1259.0928 1123.0696,-1198.4 1098.3914,-1168.7481 1103.0165,-1144.5563 1123.0696,-1111.6 1140.5998,-1082.79 1167.9002,-1060.8539 1197.4647,-1044.2681"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1199.1408,-1047.3408 1206.2789,-1039.5114 1195.8163,-1041.1806 1199.1408,-1047.3408"/>
</g>
<!-- ogcucq2eod3xusvvied5ol2iobui4nsb -->
<g id="node18" class="node">
<title>ogcucq2eod3xusvvied5ol2iobui4nsb</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M400.2088,-245.5002C400.2088,-245.5002 11.9304,-245.5002 11.9304,-245.5002 5.9304,-245.5002 -.0696,-239.5002 -.0696,-233.5002 -.0696,-233.5002 -.0696,-170.8998 -.0696,-170.8998 -.0696,-164.8998 5.9304,-158.8998 11.9304,-158.8998 11.9304,-158.8998 400.2088,-158.8998 400.2088,-158.8998 406.2088,-158.8998 412.2088,-164.8998 412.2088,-170.8998 412.2088,-170.8998 412.2088,-233.5002 412.2088,-233.5002 412.2088,-239.5002 406.2088,-245.5002 400.2088,-245.5002"/>
<text text-anchor="middle" x="206.0696" y="-195" font-family="Monaco" font-size="24.00" fill="#000000">libiconv@1.17%gcc@9.4.0/ogcucq2</text>
</g>
<!-- xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6&#45;&gt;ogcucq2eod3xusvvied5ol2iobui4nsb -->
<g id="edge47" class="edge">
<title>xm3ldz3y3msfdc3hzshvxpbpg5hnt6o6-&gt;ogcucq2eod3xusvvied5ol2iobui4nsb</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M205.0696,-317.6072C205.0696,-298.5263 205.0696,-275.9257 205.0696,-255.8046"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M207.0696,-317.6072C207.0696,-298.5263 207.0696,-275.9257 207.0696,-255.8046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="209.5697,-255.5403 206.0696,-245.5403 202.5697,-255.5404 209.5697,-255.5403"/>
<text text-anchor="middle" x="165.5739" y="-285.8482" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=iconv</text>
</g>
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu&#45;&gt;mujlx42xgttdc6u6rmiftsktpsrcmpbs -->
<g id="edge42" class="edge">
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu-&gt;mujlx42xgttdc6u6rmiftsktpsrcmpbs</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M672.6614,-1430.2151C600.7916,-1411.3548 534.1254,-1386.9583 512.2667,-1357.7962 489.0909,-1326.029 493.54,-1304.0273 512.1928,-1269.9192 527.5256,-1242.0821 552.3382,-1220.1508 578.9347,-1203.0434"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M673.169,-1428.2806C601.4789,-1409.4766 534.8127,-1385.0802 513.8725,-1356.6038 491.0512,-1326.4254 495.5003,-1304.4237 513.9464,-1270.8808 528.8502,-1243.5806 553.6627,-1221.6493 580.016,-1204.7259"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="581.46,-1206.7724 588.1193,-1198.532 577.7747,-1200.8211 581.46,-1206.7724"/>
</g>
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu&#45;&gt;o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="edge43" class="edge">
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu-&gt;o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M680.4783,-1430.2246C600.8632,-1410.3933 522.8724,-1385.2921 493.3877,-1357.9314 411.1392,-1281.1573 374.1678,-1206.1582 435.2305,-1111.0561 454.3431,-1081.6726 482.5021,-1059.8261 513.5088,-1043.3725"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M680.9617,-1428.2839C601.476,-1408.4895 523.4851,-1383.3883 494.7515,-1356.4686 412.9331,-1280.273 375.9616,-1205.2739 436.9087,-1112.1439 455.569,-1083.2528 483.728,-1061.4063 514.4455,-1045.1396"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="515.8631,-1047.2236 523.1893,-1039.5699 512.6893,-1040.9844 515.8631,-1047.2236"/>
<text text-anchor="middle" x="453.0969" y="-1356.92" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas</text>
</g>
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu&#45;&gt;xiro2z6na56qdd4czjhj54eag3ekbiow -->
<g id="edge38" class="edge">
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu-&gt;xiro2z6na56qdd4czjhj54eag3ekbiow</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M857.6892,-1429.8521C840.9235,-1409.9835 820.9375,-1386.2985 803.4466,-1365.5705"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M859.2178,-1428.5623C842.4521,-1408.6937 822.466,-1385.0087 804.9751,-1364.2807"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="806.7654,-1362.5258 797.6414,-1357.1403 801.4156,-1367.0402 806.7654,-1362.5258"/>
</g>
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu&#45;&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
<g id="edge13" class="edge">
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu-&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1118.1783,-1450.5735C1412.4221,-1422.447 1902.6188,-1374.0528 1984.8578,-1356.2227 2203.916,-1308.9943 2329.6342,-1377.1305 2461.2658,-1197.8052 2492.3675,-1156.1664 2488.743,-1094.1171 2480.3694,-1050.0521"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1118.3686,-1452.5644C1412.6186,-1424.4374 1902.8153,-1376.0432 1985.2814,-1358.1773 2202.963,-1310.7526 2328.6812,-1378.8889 2462.8734,-1198.9948 2494.3641,-1156.0498 2490.7395,-1094.0005 2482.3343,-1049.6791"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2484.7438,-1048.9818 2479.3189,-1039.8812 2477.8845,-1050.3784 2484.7438,-1048.9818"/>
<text text-anchor="middle" x="1820.4407" y="-1379.7188" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
</g>
<!-- 4bu62kyfuh4ikdkuyxfxjxanf7e7qopu&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge32" class="edge">
<title>4bu62kyfuh4ikdkuyxfxjxanf7e7qopu-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M947.2173,-1428.5496C968.7089,-1408.5917 992.2747,-1383.3345 1008.2117,-1356.6861 1067.0588,-1259.8646 1008.3745,-1197.6371 1084.3226,-1110.9351 1110.3076,-1081.7965 1144.7149,-1059.7578 1180.1804,-1043.0531"/>
<path fill="none" stroke="#daa520" stroke-width="2" d="M948.5783,-1430.0151C970.1712,-1409.9561 993.737,-1384.6989 1009.9275,-1357.7139 1068.5139,-1258.4924 1009.8295,-1196.2649 1085.8166,-1112.2649 1111.3864,-1083.4807 1145.7936,-1061.442 1181.0322,-1044.8626"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1182.4567,-1046.9607 1190.1008,-1039.6246 1179.5503,-1040.5926 1182.4567,-1046.9607"/>
</g>
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo -->
<g id="node17" class="node">
<title>5xerf6imlgo4xlubacr4mljacc3edexo</title>
<path fill="#add8e6" stroke="#000000" stroke-width="4" d="M1822.3657,-880.7002C1822.3657,-880.7002 1437.7735,-880.7002 1437.7735,-880.7002 1431.7735,-880.7002 1425.7735,-874.7002 1425.7735,-868.7002 1425.7735,-868.7002 1425.7735,-806.0998 1425.7735,-806.0998 1425.7735,-800.0998 1431.7735,-794.0998 1437.7735,-794.0998 1437.7735,-794.0998 1822.3657,-794.0998 1822.3657,-794.0998 1828.3657,-794.0998 1834.3657,-800.0998 1834.3657,-806.0998 1834.3657,-806.0998 1834.3657,-868.7002 1834.3657,-868.7002 1834.3657,-874.7002 1828.3657,-880.7002 1822.3657,-880.7002"/>
<text text-anchor="middle" x="1630.0696" y="-830.2" font-family="Monaco" font-size="24.00" fill="#000000">openssl@1.1.1s%gcc@9.4.0/5xerf6i</text>
</g>
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo&#45;&gt;ywrpvv2hgooeepdke33exkqrtdpd5gkl -->
<g id="edge22" class="edge">
<title>5xerf6imlgo4xlubacr4mljacc3edexo-&gt;ywrpvv2hgooeepdke33exkqrtdpd5gkl</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1425.7129,-803.7711C1262.7545,-776.9548 1035.5151,-739.5603 871.9084,-712.6373"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="872.1525,-709.1305 861.7169,-710.9602 871.0158,-716.0376 872.1525,-709.1305"/>
</g>
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo&#45;&gt;4vsmjofkhntilgzh4zebluqak5mdsu3x -->
<g id="edge48" class="edge">
<title>5xerf6imlgo4xlubacr4mljacc3edexo-&gt;4vsmjofkhntilgzh4zebluqak5mdsu3x</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1644.2788,-794.0072C1650.5843,-774.7513 1658.0636,-751.9107 1664.6976,-731.6514"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1668.0917,-732.533 1667.8776,-721.9403 1661.4393,-730.3546 1668.0917,-732.533"/>
</g>
<!-- 5xerf6imlgo4xlubacr4mljacc3edexo&#45;&gt;nizxi5u5bbrzhzwfy2qb7hatlhuswlrz -->
<g id="edge41" class="edge">
<title>5xerf6imlgo4xlubacr4mljacc3edexo-&gt;nizxi5u5bbrzhzwfy2qb7hatlhuswlrz</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1834.3289,-793.5645C1906.6817,-774.1673 1975.9199,-749.2273 1998.2925,-721.3707 2031.5218,-680.681 2032.1636,-617.9031 2027.044,-573.3921"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1834.8468,-795.4962C1907.3595,-776.0489 1976.5977,-751.1089 1999.8467,-722.6293 2033.5217,-680.7015 2034.1635,-617.9235 2029.0309,-573.1639"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2031.4885,-572.6712 2026.7474,-563.1964 2024.5451,-573.5598 2031.4885,-572.6712"/>
</g>
<!-- v32wejd4d5lc6uka4qlrogwh5xae2h3r -->
<g id="node26" class="node">
<title>v32wejd4d5lc6uka4qlrogwh5xae2h3r</title>
<path fill="#ff7f50" stroke="#000000" stroke-width="4" d="M1306.1776,-404.3002C1306.1776,-404.3002 929.9616,-404.3002 929.9616,-404.3002 923.9616,-404.3002 917.9616,-398.3002 917.9616,-392.3002 917.9616,-392.3002 917.9616,-329.6998 917.9616,-329.6998 917.9616,-323.6998 923.9616,-317.6998 929.9616,-317.6998 929.9616,-317.6998 1306.1776,-317.6998 1306.1776,-317.6998 1312.1776,-317.6998 1318.1776,-323.6998 1318.1776,-329.6998 1318.1776,-329.6998 1318.1776,-392.3002 1318.1776,-392.3002 1318.1776,-398.3002 1312.1776,-404.3002 1306.1776,-404.3002"/>
<text text-anchor="middle" x="1118.0696" y="-353.8" font-family="Monaco" font-size="24.00" fill="#000000">readline@8.2%gcc@9.4.0/v32wejd</text>
</g>
<!-- uabgssx6lsgrevwbttslldnr5nzguprj&#45;&gt;v32wejd4d5lc6uka4qlrogwh5xae2h3r -->
<g id="edge7" class="edge">
<title>uabgssx6lsgrevwbttslldnr5nzguprj-&gt;v32wejd4d5lc6uka4qlrogwh5xae2h3r</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1117.0696,-476.4072C1117.0696,-457.3263 1117.0696,-434.7257 1117.0696,-414.6046"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1119.0696,-476.4072C1119.0696,-457.3263 1119.0696,-434.7257 1119.0696,-414.6046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1121.5697,-414.3403 1118.0696,-404.3403 1114.5697,-414.3404 1121.5697,-414.3403"/>
</g>
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef&#45;&gt;o524gebsxavobkte3k5fglgwnedfkadf -->
<g id="edge14" class="edge">
<title>lfh3aovn65e66cs24qiehq3nd2ddojef-&gt;o524gebsxavobkte3k5fglgwnedfkadf</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1167.6711,-1112.5788C1078.9073,-1090.9596 971.5916,-1064.822 881.5513,-1042.892"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1168.1444,-1110.6356C1079.3806,-1089.0165 972.0649,-1062.8788 882.0246,-1040.9488"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="882.5603,-1038.5062 872.016,-1039.5403 880.9038,-1045.3074 882.5603,-1038.5062"/>
<text text-anchor="middle" x="963.904" y="-1079.817" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=blas,lapack</text>
</g>
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef&#45;&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw -->
<g id="edge31" class="edge">
<title>lfh3aovn65e66cs24qiehq3nd2ddojef-&gt;2w3nq3n3hcj2tqlvcpewsryamltlu5tw</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1559.7922,-1112.1043C1562.8511,-1111.5975 1565.8904,-1111.1002 1568.9103,-1110.6128 1759.2182,-1079.8992 1973.2397,-1052.1328 2144.6143,-1031.5343"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1560.1191,-1114.0774C1563.1741,-1113.5712 1566.2134,-1113.0739 1569.2289,-1112.5872 1759.4755,-1081.8826 1973.497,-1054.1161 2144.8529,-1033.52"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2145.1529,-1036.002 2154.6648,-1031.3357 2144.3191,-1029.0518 2145.1529,-1036.002"/>
<text text-anchor="middle" x="1828.178" y="-1072.4692" font-family="Times,serif" font-size="14.00" fill="#000000">virtuals=mpi</text>
</g>
<!-- lfh3aovn65e66cs24qiehq3nd2ddojef&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge21" class="edge">
<title>lfh3aovn65e66cs24qiehq3nd2ddojef-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1346.0696,-1111.6072C1346.0696,-1092.5263 1346.0696,-1069.9257 1346.0696,-1049.8046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1349.5697,-1049.5403 1346.0696,-1039.5403 1342.5697,-1049.5404 1349.5697,-1049.5403"/>
</g>
<!-- 2w3nq3n3hcj2tqlvcpewsryamltlu5tw&#45;&gt;htzjns66gmq6pjofohp26djmjnpbegho -->
<g id="edge30" class="edge">
<title>2w3nq3n3hcj2tqlvcpewsryamltlu5tw-&gt;htzjns66gmq6pjofohp26djmjnpbegho</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M2467.0696,-952.8072C2467.0696,-933.7263 2467.0696,-911.1257 2467.0696,-891.0046"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="2470.5697,-890.7403 2467.0696,-880.7403 2463.5697,-890.7404 2470.5697,-890.7403"/>
</g>
<!-- 7rzbmgoxhmm2jhellkgcjmn62uklf22x&#45;&gt;gguve5icmo5e4cw5o3hvvfsxremc46if -->
<g id="edge2" class="edge">
<title>7rzbmgoxhmm2jhellkgcjmn62uklf22x-&gt;gguve5icmo5e4cw5o3hvvfsxremc46if</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1422.351,-1429.2133C1312.2528,-1388.8872 1171.1589,-1316.8265 1103.0696,-1198.4 1083.8409,-1164.956 1082.4563,-1144.2088 1103.0696,-1111.6 1121.4102,-1082.5864 1149.2483,-1060.7204 1179.6189,-1044.2895"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1181.4205,-1047.2977 1188.6801,-1039.5809 1178.1927,-1041.0863 1181.4205,-1047.2977"/>
</g>
<!-- v32wejd4d5lc6uka4qlrogwh5xae2h3r&#45;&gt;j5rupoqliu7kasm6xndl7ui32wgawkru -->
<g id="edge39" class="edge">
<title>v32wejd4d5lc6uka4qlrogwh5xae2h3r-&gt;j5rupoqliu7kasm6xndl7ui32wgawkru</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1179.8001,-316.7866C1209.2065,-296.3053 1244.4355,-271.7686 1274.8343,-250.5961"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1180.9431,-318.4278C1210.3495,-297.9465 1245.5785,-273.4098 1275.9774,-252.2373"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1277.6375,-254.1277 1283.8429,-245.5403 1273.6367,-248.3836 1277.6375,-254.1277"/>
</g>
<!-- gguve5icmo5e4cw5o3hvvfsxremc46if&#45;&gt;j5rupoqliu7kasm6xndl7ui32wgawkru -->
<g id="edge18" class="edge">
<title>gguve5icmo5e4cw5o3hvvfsxremc46if-&gt;j5rupoqliu7kasm6xndl7ui32wgawkru</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1345.0696,-952.7909C1345.0696,-891.6316 1345.0696,-776.6094 1345.0696,-678.6 1345.0696,-678.6 1345.0696,-678.6 1345.0696,-519.8 1345.0696,-426.9591 1345.0696,-318.8523 1345.0696,-255.7237"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1347.0696,-952.7909C1347.0696,-891.6316 1347.0696,-776.6094 1347.0696,-678.6 1347.0696,-678.6 1347.0696,-678.6 1347.0696,-519.8 1347.0696,-426.9591 1347.0696,-318.8523 1347.0696,-255.7237"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1349.5697,-255.6091 1346.0696,-245.6091 1342.5697,-255.6092 1349.5697,-255.6091"/>
</g>
<!-- gguve5icmo5e4cw5o3hvvfsxremc46if&#45;&gt;5xerf6imlgo4xlubacr4mljacc3edexo -->
<g id="edge40" class="edge">
<title>gguve5icmo5e4cw5o3hvvfsxremc46if-&gt;5xerf6imlgo4xlubacr4mljacc3edexo</title>
<path fill="none" stroke="#1e90ff" stroke-width="2" d="M1423.1858,-951.9344C1460.2844,-931.1905 1504.8229,-906.2866 1543.0151,-884.9312"/>
<path fill="none" stroke="#dc143c" stroke-width="2" d="M1424.1619,-953.68C1461.2605,-932.9361 1505.799,-908.0322 1543.9912,-886.6769"/>
<polygon fill="#1e90ff" stroke="#1e90ff" stroke-width="2" points="1545.5391,-888.6757 1552.5592,-880.7403 1542.1228,-882.5659 1545.5391,-888.6757"/>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 58 KiB

View File

@@ -54,16 +54,9 @@ or refer to the full manual below.
features
getting_started
basic_usage
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
replace_conda_homebrew
.. toctree::
:maxdepth: 2
:caption: Links
Tutorial (spack-tutorial.rtfd.io) <https://spack-tutorial.readthedocs.io>
Packages (packages.spack.io) <https://packages.spack.io>
Binaries (binaries.spack.io) <https://cache.spack.io>
.. toctree::
:maxdepth: 2
:caption: Reference
@@ -79,6 +72,7 @@ or refer to the full manual below.
repositories
binary_caches
command_index
package_list
chain
extensions
pipelines

View File

@@ -519,11 +519,11 @@ inspections and customize them per-module-set.
modules:
prefix_inspections:
./bin:
bin:
- PATH
./man:
man:
- MANPATH
./:
'':
- CMAKE_PREFIX_PATH
Prefix inspections are only applied if the relative path inside the
@@ -579,7 +579,7 @@ the view.
view_relative_modules:
use_view: my_view
prefix_inspections:
./bin:
bin:
- PATH
view:
my_view:

View File

@@ -0,0 +1,17 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _package-list:
============
Package List
============
This is a list of things you can install using Spack. It is
automatically generated based on the packages in this Spack
version.
.. raw:: html
:file: package_list.html

View File

@@ -1549,7 +1549,7 @@ its value:
def configure_args(self):
...
if self.spec.satisfies("+shared"):
if "+shared" in self.spec:
extra_args.append("--enable-shared")
else:
extra_args.append("--disable-shared")
@@ -1636,7 +1636,7 @@ Within a package recipe a multi-valued variant is tested using a ``key=value`` s
.. code-block:: python
if spec.satisfies("languages=jit"):
if "languages=jit" in spec:
options.append("--enable-host-shared")
"""""""""""""""""""""""""""""""""""""""""""
@@ -2352,7 +2352,7 @@ the following at the command line of a bash shell:
.. code-block:: console
$ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 & done
$ for i in {1..12}; do nohup spack install -j 4 mpich@3.3.2 >> mpich_install.txt 2>&1 &; done
.. note::
@@ -2557,10 +2557,9 @@ Conditional dependencies
^^^^^^^^^^^^^^^^^^^^^^^^
You may have a package that only requires a dependency under certain
conditions. For example, you may have a package with optional MPI support.
You would then provide a variant to reflect that the feature is optional
and specify the MPI dependency only applies when MPI support is enabled.
In that case, you could say something like:
conditions. For example, you may have a package that has optional MPI support,
- MPI is only a dependency when you want to enable MPI support for the
package. In that case, you could say something like:
.. code-block:: python
@@ -2568,39 +2567,13 @@ In that case, you could say something like:
depends_on("mpi", when="+mpi")
``when`` can include constraints on the variant, version, compiler, etc. and
the :mod:`syntax<spack.spec>` is the same as for Specs written on the command
line.
Suppose the above package also has, since version 3, optional `Trilinos`
support and you want them both to build either with or without MPI. Further
suppose you require a version of `Trilinos` no older than 12.6. In that case,
the `trilinos` variant and dependency directives would be:
.. code-block:: python
variant("trilinos", default=False, description="Enable Trilinos support")
depends_on("trilinos@12.6:", when="@3: +trilinos")
depends_on("trilinos@12.6: +mpi", when="@3: +trilinos +mpi")
Alternatively, you could use the `when` context manager to equivalently specify
the `trilinos` variant dependencies as follows:
.. code-block:: python
with when("@3: +trilinos"):
depends_on("trilinos@12.6:")
depends_on("trilinos +mpi", when="+mpi")
The argument to ``when`` in either case can include any Spec constraints that
are supported on the command line using the same :ref:`syntax <sec-specs>`.
.. note::
If a dependency isn't typically used, you can save time by making it
conditional since Spack will not build the dependency unless it is
required for the Spec.
If a dependency/feature of a package isn't typically used, you can save time
by making it conditional (since Spack will not build the dependency unless it
is required for the Spec).
.. _dependency_dependency_patching:
@@ -2688,6 +2661,60 @@ appear in the package file (or in this case, in the list).
right version. If two packages depend on ``binutils`` patched *the
same* way, they can both use a single installation of ``binutils``.
.. _setup-dependent-environment:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Influence how dependents are built or run
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Spack provides a mechanism for dependencies to influence the
environment of their dependents by overriding the
:meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
or the
:meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
methods.
The Qt package, for instance, uses this call:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
:pyobject: Qt.setup_dependent_build_environment
:linenos:
to set the ``QTDIR`` environment variable so that packages
that depend on a particular Qt installation will find it.
Another good example of how a dependency can influence
the build environment of dependents is the Python package:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
:pyobject: Python.setup_dependent_build_environment
:linenos:
In the method above it is ensured that any package that depends on Python
will have the ``PYTHONPATH``, ``PYTHONHOME`` and ``PATH`` environment
variables set appropriately before starting the installation. To make things
even simpler the ``python setup.py`` command is also inserted into the module
scope of dependents by overriding a third method called
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`
:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
:pyobject: Python.setup_dependent_package
:linenos:
This allows most python packages to have a very simple install procedure,
like the following:
.. code-block:: python
def install(self, spec, prefix):
setup_py("install", "--prefix={0}".format(prefix))
Finally the Python package takes also care of the modifications to ``PYTHONPATH``
to allow dependencies to run correctly:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
:pyobject: Python.setup_dependent_run_environment
:linenos:
.. _packaging_conflicts:
@@ -2832,70 +2859,6 @@ variant(s) are selected. This may be accomplished with conditional
extends("python", when="+python")
...
.. _setup-environment:
--------------------------------------------
Runtime and build time environment variables
--------------------------------------------
Spack provides a few methods to help package authors set up the required environment variables for
their package. Environment variables typically depend on how the package is used: variables that
make sense during the build phase may not be needed at runtime, and vice versa. Further, sometimes
it makes sense to let a dependency set the environment variables for its dependents. To allow all
this, Spack provides four different methods that can be overridden in a package:
1. :meth:`setup_build_environment <spack.builder.Builder.setup_build_environment>`
2. :meth:`setup_run_environment <spack.package_base.PackageBase.setup_run_environment>`
3. :meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
4. :meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
The Qt package, for instance, uses this call:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
:pyobject: Qt.setup_dependent_build_environment
:linenos:
to set the ``QTDIR`` environment variable so that packages that depend on a particular Qt
installation will find it.
The following diagram will give you an idea when each of these methods is called in a build
context:
.. image:: images/setup_env.png
:align: center
Notice that ``setup_dependent_run_environment`` can be called multiple times, once for each
dependent package, whereas ``setup_run_environment`` is called only once for the package itself.
This means that the former should only be used if the environment variables depend on the dependent
package, whereas the latter should be used if the environment variables depend only on the package
itself.
--------------------------------
Setting package module variables
--------------------------------
Apart from modifying environment variables of the dependent package, you can also define Python
variables to be used by the dependent. This is done by implementing
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An
example of this can be found in the ``Python`` package:
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
:pyobject: Python.setup_dependent_package
:linenos:
This allows Python packages to directly use these variables:
.. code-block:: python
def install(self, spec, prefix):
...
install("script.py", python_platlib)
.. note::
We recommend using ``setup_dependent_package`` sparingly, as it is not always clear where
global variables are coming from when editing a ``package.py`` file.
-----
Views
-----
@@ -2974,33 +2937,6 @@ The ``provides("mpi")`` call tells Spack that the ``mpich`` package
can be used to satisfy the dependency of any package that
``depends_on("mpi")``.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Providing multiple virtuals simultaneously
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Packages can provide more than one virtual dependency. Sometimes, due to implementation details,
there are subsets of those virtuals that need to be provided together by the same package.
A well-known example is ``openblas``, which provides both the ``lapack`` and ``blas`` API in a single ``libopenblas``
library. A package that needs ``lapack`` and ``blas`` must either use ``openblas`` to provide both, or not use
``openblas`` at all. It cannot pick one or the other.
To express this constraint in a package, the two virtual dependencies must be listed in the same ``provides`` directive:
.. code-block:: python
provides('blas', 'lapack')
This makes it impossible to select ``openblas`` as a provider for one of the two
virtual dependencies and not for the other. If you try to, Spack will report an error:
.. code-block:: console
$ spack spec netlib-scalapack ^[virtuals=lapack] openblas ^[virtuals=blas] atlas
==> Error: concretization failed for the following reasons:
1. Package 'openblas' needs to provide both 'lapack' and 'blas' together, but provides only 'lapack'
^^^^^^^^^^^^^^^^^^^^
Versioned Interfaces
^^^^^^^^^^^^^^^^^^^^
@@ -3503,56 +3439,6 @@ is equivalent to:
Constraints from nested context managers are also combined together, but they are rarely
needed or recommended.
.. _default_args:
------------------------
Common default arguments
------------------------
Similarly, if directives have a common set of default arguments, you can
group them together in a ``with default_args()`` block:
.. code-block:: python
class PyExample(PythonPackage):
with default_args(type=("build", "run")):
depends_on("py-foo")
depends_on("py-foo@2:", when="@2:")
depends_on("py-bar")
depends_on("py-bz")
The above is short for:
.. code-block:: python
class PyExample(PythonPackage):
depends_on("py-foo", type=("build", "run"))
depends_on("py-foo@2:", when="@2:", type=("build", "run"))
depends_on("py-bar", type=("build", "run"))
depends_on("py-bz", type=("build", "run"))
.. note::
The ``with when()`` context manager is composable, while ``with default_args()``
merely overrides the default. For example:
.. code-block:: python
with default_args(when="+feature"):
depends_on("foo")
depends_on("bar")
depends_on("baz", when="+baz")
is equivalent to:
.. code-block:: python
depends_on("foo", when="+feature")
depends_on("bar", when="+feature")
depends_on("baz", when="+baz") # Note: not when="+feature+baz"
.. _install-method:
------------------
@@ -3615,7 +3501,7 @@ need to override methods like ``configure_args``:
def configure_args(self):
args = ["--enable-cxx"] + self.enable_or_disable("libs")
if self.spec.satisfies("libs=static"):
if "libs=static" in self.spec:
args.append("--with-pic")
return args
@@ -3749,8 +3635,7 @@ regardless of the build system. The arguments for the phase are:
The arguments ``spec`` and ``prefix`` are passed only for convenience, as they always
correspond to ``self.spec`` and ``self.spec.prefix`` respectively.
If the ``package.py`` has build instructions in a separate
:ref:`builder class <multiple_build_systems>`, the signature for a phase changes slightly:
If the ``package.py`` encodes builders explicitly, the signature for a phase changes slightly:
.. code-block:: python
@@ -3760,6 +3645,56 @@ If the ``package.py`` has build instructions in a separate
In this case the package is passed as the second argument, and ``self`` is the builder instance.
.. _multiple_build_systems:
^^^^^^^^^^^^^^^^^^^^^^
Multiple build systems
^^^^^^^^^^^^^^^^^^^^^^
There are cases where a software actively supports two build systems, or changes build systems
as it evolves, or needs different build systems on different platforms. Spack allows dealing with
these cases natively, if a recipe is written using builders explicitly.
For instance, software that supports two build systems unconditionally should derive from
both ``*Package`` base classes, and declare the possible use of multiple build systems using
a directive:
.. code-block:: python
class ArpackNg(CMakePackage, AutotoolsPackage):
build_system("cmake", "autotools", default="cmake")
In this case the software can be built with both ``autotools`` and ``cmake``. Since the package
supports multiple build systems, it is necessary to declare which one is the default. The ``package.py``
will likely contain some overriding of default builder methods:
.. code-block:: python
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
def cmake_args(self):
pass
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
def configure_args(self):
pass
In more complex cases it might happen that the build system changes according to certain conditions,
for instance across versions. That can be expressed with conditional variant values:
.. code-block:: python
class ArpackNg(CMakePackage, AutotoolsPackage):
build_system(
conditional("cmake", when="@0.64:"),
conditional("autotools", when="@:0.63"),
default="cmake",
)
In the example the directive impose a change from ``Autotools`` to ``CMake`` going
from ``v0.63`` to ``v0.64``.
^^^^^^^^^^^^^^^^^^
Mixin base classes
^^^^^^^^^^^^^^^^^^
@@ -3806,106 +3741,6 @@ for instance:
In the example above ``Cp2k`` inherits all the conflicts and variants that ``CudaPackage`` defines.
.. _multiple_build_systems:
----------------------
Multiple build systems
----------------------
There are cases where a package actively supports two build systems, or changes build systems
as it evolves, or needs different build systems on different platforms. Spack allows dealing with
these cases by splitting the build instructions into separate builder classes.
For instance, software that supports two build systems unconditionally should derive from
both ``*Package`` base classes, and declare the possible use of multiple build systems using
a directive:
.. code-block:: python
class Example(CMakePackage, AutotoolsPackage):
variant("my_feature", default=True)
build_system("cmake", "autotools", default="cmake")
In this case the software can be built with both ``autotools`` and ``cmake``. Since the package
supports multiple build systems, it is necessary to declare which one is the default.
Additional build instructions are split into separate builder classes:
.. code-block:: python
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
def cmake_args(self):
return [
self.define_from_variant("MY_FEATURE", "my_feature")
]
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
def configure_args(self):
return self.with_or_without("my-feature", variant="my_feature")
In this example, ``spack install example +feature build_sytem=cmake`` will
pick the ``CMakeBuilder`` and invoke ``cmake -DMY_FEATURE:BOOL=ON``.
Similarly, ``spack install example +feature build_system=autotools`` will pick
the ``AutotoolsBuilder`` and invoke ``./configure --with-my-feature``.
Dependencies are always specified in the package class. When some dependencies
depend on the choice of the build system, it is possible to use when conditions as
usual:
.. code-block:: python
class Example(CMakePackage, AutotoolsPackage):
build_system("cmake", "autotools", default="cmake")
# Runtime dependencies
depends_on("ncurses")
depends_on("libxml2")
# Lowerbounds for cmake only apply when using cmake as the build system
with when("build_system=cmake"):
depends_on("cmake@3.18:", when="@2.0:", type="build")
depends_on("cmake@3:", type="build")
# Specify extra build dependencies used only in the configure script
with when("build_system=autotools"):
depends_on("perl", type="build")
depends_on("pkgconfig", type="build")
Very often projects switch from one build system to another, or add support
for a new build system from a certain version, which means that the choice
of the build system typically depends on a version range. Those situations can
be handled by using conditional values in the ``build_system`` directive:
.. code-block:: python
class Example(CMakePackage, AutotoolsPackage):
build_system(
conditional("cmake", when="@0.64:"),
conditional("autotools", when="@:0.63"),
default="cmake",
)
In the example the directive impose a change from ``Autotools`` to ``CMake`` going
from ``v0.63`` to ``v0.64``.
The ``build_system`` can be used as an ordinary variant, which also means that it can
be used in ``depends_on`` statements. This can be useful when a package *requires* that
its dependency has a CMake config file, meaning that the dependent can only build when the
dependency is built with CMake, and not Autotools. In that case, you can force the choice
of the build system in the dependent:
.. code-block:: python
class Dependent(CMakePackage):
depends_on("example build_system=cmake")
.. _install-environment:
-----------------------
@@ -4478,7 +4313,7 @@ for supported features, for instance:
.. code-block:: python
if spec.satisfies("target=avx512"):
if "avx512" in spec.target:
args.append("--with-avx512")
The snippet above will append the ``--with-avx512`` item to a list of arguments only if the corresponding
@@ -6913,63 +6748,3 @@ To achieve backward compatibility with the single-class format Spack creates in
Overall the role of the adapter is to route access to attributes of methods first through the ``*Package``
hierarchy, and then back to the base class builder. This is schematically shown in the diagram above, where
the adapter role is to "emulate" a method resolution order like the one represented by the red arrows.
------------------------------
Specifying License Information
------------------------------
Most of the software in Spack is open source, and most open source software is released
under one or more `common open source licenses <https://opensource.org/licenses/>`_.
Specifying the license that a package is released under in a project's
`package.py` is good practice. To specify a license, find the `SPDX identifier
<https://spdx.org/licenses/>`_ for a project and then add it using the license
directive:
.. code-block:: python
license("<SPDX Identifier HERE>")
For example, the SPDX ID for the Apache Software License, version 2.0 is ``Apache-2.0``,
so you'd write:
.. code-block:: python
license("Apache-2.0")
Or, for a dual-licensed package like Spack, you would use an `SPDX Expression
<https://spdx.github.io/spdx-spec/v2-draft/SPDX-license-expressions/>`_ with both of its
licenses:
.. code-block:: python
license("Apache-2.0 OR MIT")
Note that specifying a license without a when clause makes it apply to all
versions and variants of the package, which might not actually be the case.
For example, a project might have switched licenses at some point or have
certain build configurations that include files that are licensed differently.
Spack itself used to be under the ``LGPL-2.1`` license, until it was relicensed
in version ``0.12`` in 2018.
You can specify when a ``license()`` directive applies using with a ``when=``
clause, just like other directives. For example, to specify that a specific
license identifier should only apply to versions up to ``0.11``, but another
license should apply for later versions, you could write:
.. code-block:: python
license("LGPL-2.1", when="@:0.11")
license("Apache-2.0 OR MIT", when="@0.12:")
Note that unlike for most other directives, the ``when=`` constraints in the
``license()`` directive can't intersect. Spack needs to be able to resolve
exactly one license identifier expression for any given version. To specify
*multiple* licenses, use SPDX expressions and operators as above. The operators
you probably care most about are:
* ``OR``: user chooses one license to adhere to; and
* ``AND``: user has to adhere to all the licenses.
You may also care about `license exceptions
<https://spdx.org/licenses/exceptions-index.html>`_ that use the ``WITH`` operator,
e.g. ``Apache-2.0 WITH LLVM-exception``.

View File

@@ -213,16 +213,6 @@ pipeline jobs.
``spack ci generate``
^^^^^^^^^^^^^^^^^^^^^
Throughout this documentation, references to the "mirror" mean the target
mirror which is checked for the presence of up-to-date specs, and where
any scheduled jobs should push built binary packages. In the past, this
defaulted to the mirror at index 0 in the mirror configs, and could be
overridden using the ``--buildcache-destination`` argument. Starting with
Spack 0.23, ``spack ci generate`` will require you to identify this mirror
by the name "buildcache-destination". While you can configure any number
of mirrors as sources for your pipelines, you will need to identify the
destination mirror by name.
Concretizes the specs in the active environment, stages them (as described in
:ref:`staging_algorithm`), and writes the resulting ``.gitlab-ci.yml`` to disk.
During concretization of the environment, ``spack ci generate`` also writes a

View File

@@ -4,7 +4,7 @@
SPDX-License-Identifier: (Apache-2.0 OR MIT)
=====================================
Spack for Homebrew/Conda Users
Using Spack to Replace Homebrew/Conda
=====================================
Spack is an incredibly powerful package manager, designed for supercomputers
@@ -191,18 +191,18 @@ The ``--fresh`` flag tells Spack to use the latest version of every package
where possible instead of trying to optimize for reuse of existing installed
packages.
The ``--force`` flag in addition tells Spack to overwrite its previous
concretization decisions, allowing you to choose a new version of Python.
If any of the new packages like Bash are already installed, ``spack install``
The ``--force`` flag in addition tells Spack to overwrite its previous
concretization decisions, allowing you to choose a new version of Python.
If any of the new packages like Bash are already installed, ``spack install``
won't re-install them, it will keep the symlinks in place.
-----------------------------------
Updating & Cleaning Up Old Packages
-----------------------------------
If you're looking to mimic the behavior of Homebrew, you may also want to
clean up out-of-date packages from your environment after an upgrade. To
upgrade your entire software stack within an environment and clean up old
If you're looking to mimic the behavior of Homebrew, you may also want to
clean up out-of-date packages from your environment after an upgrade. To
upgrade your entire software stack within an environment and clean up old
package versions, simply run the following commands:
.. code-block:: console
@@ -212,9 +212,9 @@ package versions, simply run the following commands:
$ spack concretize --fresh --force
$ spack install
$ spack gc
Running ``spack mark -i --all`` tells Spack to mark all of the existing
packages within an environment as "implicitly" installed. This tells
Running ``spack mark -i --all`` tells Spack to mark all of the existing
packages within an environment as "implicitly" installed. This tells
spack's garbage collection system that these packages should be cleaned up.
Don't worry however, this will not remove your entire environment.
@@ -223,8 +223,8 @@ a fresh concretization and will re-mark any packages that should remain
installed as "explicitly" installed.
**Note:** if you use multiple spack environments you should re-run ``spack install``
in each of your environments prior to running ``spack gc`` to prevent spack
from uninstalling any shared packages that are no longer required by the
in each of your environments prior to running ``spack gc`` to prevent spack
from uninstalling any shared packages that are no longer required by the
environment you just upgraded.
--------------

View File

@@ -2,12 +2,12 @@ sphinx==7.2.6
sphinxcontrib-programoutput==0.17
sphinx_design==0.5.0
sphinx-rtd-theme==1.3.0
python-levenshtein==0.23.0
python-levenshtein==0.21.1
docutils==0.18.1
pygments==2.16.1
urllib3==2.0.7
pytest==7.4.3
urllib3==2.0.5
pytest==7.4.2
isort==5.12.0
black==23.10.1
black==23.9.1
flake8==6.1.0
mypy==1.6.1
mypy==1.5.1

View File

@@ -1,7 +1,9 @@
Name, Supported Versions, Notes, Requirement Reason
Python, 3.6--3.12, , Interpreter for Spack
Python, 3.6--3.11, , Interpreter for Spack
C/C++ Compilers, , , Building software
make, , , Build software
patch, , , Build software
bash, , , Compiler wrappers
tar, , , Extract/create archives
gzip, , , Compress/Decompress archives
unzip, , , Compress/Decompress archives
1 Name Supported Versions Notes Requirement Reason
2 Python 3.6--3.12 3.6--3.11 Interpreter for Spack
3 C/C++ Compilers Building software
4 make Build software
5 patch Build software
6 bash Compiler wrappers
7 tar Extract/create archives
8 gzip Compress/Decompress archives
9 unzip Compress/Decompress archives

View File

@@ -18,7 +18,7 @@
* Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.2 (commit 1dc58a5776dd77e6fc6e4ba5626af5b1fb24996e)
* Version: 0.2.1 (commit df43a1834460bf94516136951c4729a3100603ec)
astunparse
----------------

View File

@@ -1,2 +1,2 @@
"""Init file to avoid namespace packages"""
__version__ = "0.2.2"
__version__ = "0.2.1"

View File

@@ -2318,26 +2318,6 @@
]
}
},
"power10": {
"from": ["power9"],
"vendor": "IBM",
"generation": 10,
"features": [],
"compilers": {
"gcc": [
{
"versions": "11.1:",
"flags": "-mcpu={name} -mtune={name}"
}
],
"clang": [
{
"versions": "11.0:",
"flags": "-mcpu={name} -mtune={name}"
}
]
}
},
"ppc64le": {
"from": [],
"vendor": "generic",
@@ -2425,29 +2405,6 @@
]
}
},
"power10le": {
"from": ["power9le"],
"vendor": "IBM",
"generation": 10,
"features": [],
"compilers": {
"gcc": [
{
"name": "power10",
"versions": "11.1:",
"flags": "-mcpu={name} -mtune={name}"
}
],
"clang": [
{
"versions": "11.0:",
"family": "ppc64le",
"name": "power10",
"flags": "-mcpu={name} -mtune={name}"
}
]
}
},
"aarch64": {
"from": [],
"vendor": "generic",
@@ -2635,37 +2592,6 @@
]
}
},
"armv9.0a": {
"from": ["armv8.5a"],
"vendor": "generic",
"features": [],
"compilers": {
"gcc": [
{
"versions": "12:",
"flags": "-march=armv9-a -mtune=generic"
}
],
"clang": [
{
"versions": "14:",
"flags": "-march=armv9-a -mtune=generic"
}
],
"apple-clang": [
{
"versions": ":",
"flags": "-march=armv9-a -mtune=generic"
}
],
"arm": [
{
"versions": ":",
"flags": "-march=armv9-a -mtune=generic"
}
]
}
},
"thunderx2": {
"from": ["armv8.1a"],
"vendor": "Cavium",
@@ -2887,12 +2813,8 @@
],
"arm" : [
{
"versions": "20:21.9",
"versions": "20:",
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
},
{
"versions": "22:",
"flags" : "-mcpu=neoverse-n1"
}
],
"nvhpc" : [
@@ -3020,7 +2942,7 @@
},
{
"versions": "22:",
"flags" : "-mcpu=neoverse-v1"
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
}
],
"nvhpc" : [
@@ -3032,126 +2954,6 @@
]
}
},
"neoverse_v2": {
"from": ["neoverse_n1", "armv9.0a"],
"vendor": "ARM",
"features": [
"fp",
"asimd",
"evtstrm",
"aes",
"pmull",
"sha1",
"sha2",
"crc32",
"atomics",
"fphp",
"asimdhp",
"cpuid",
"asimdrdm",
"jscvt",
"fcma",
"lrcpc",
"dcpop",
"sha3",
"sm3",
"sm4",
"asimddp",
"sha512",
"sve",
"asimdfhm",
"dit",
"uscat",
"ilrcpc",
"flagm",
"ssbs",
"sb",
"paca",
"pacg",
"dcpodp",
"sve2",
"sveaes",
"svepmull",
"svebitperm",
"svesha3",
"svesm4",
"flagm2",
"frint",
"svei8mm",
"svebf16",
"i8mm",
"bf16",
"dgh",
"bti"
],
"compilers" : {
"gcc": [
{
"versions": "4.8:5.99",
"flags": "-march=armv8-a"
},
{
"versions": "6:6.99",
"flags" : "-march=armv8.1-a"
},
{
"versions": "7.0:7.99",
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
},
{
"versions": "8.0:8.99",
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
},
{
"versions": "9.0:9.99",
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
},
{
"versions": "10.0:11.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
},
{
"versions": "12.0:12.99",
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
},
{
"versions": "13.0:",
"flags" : "-mcpu=neoverse-v2"
}
],
"clang" : [
{
"versions": "9.0:10.99",
"flags" : "-march=armv8.5-a+sve"
},
{
"versions": "11.0:13.99",
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
},
{
"versions": "14.0:15.99",
"flags" : "-march=armv9-a+i8mm+bf16"
},
{
"versions": "16.0:",
"flags" : "-mcpu=neoverse-v2"
}
],
"arm" : [
{
"versions": "23.04.0:",
"flags" : "-mcpu=neoverse-v2"
}
],
"nvhpc" : [
{
"versions": "23.3:",
"name": "neoverse-v2",
"flags": "-tp {name}"
}
]
}
},
"m1": {
"from": ["armv8.4a"],
"vendor": "Apple",

View File

@@ -156,37 +156,6 @@ def lookup(name):
shutil.copystat = copystat
def polite_path(components: Iterable[str]):
"""
Given a list of strings which are intended to be path components,
generate a path, and format each component to avoid generating extra
path entries.
For example all "/", "\", and ":" characters will be replaced with
"_". Other characters like "=" will also be replaced.
"""
return os.path.join(*[polite_filename(x) for x in components])
@memoized
def _polite_antipattern():
# A regex of all the characters we don't want in a filename
return re.compile(r"[^A-Za-z0-9_.-]")
def polite_filename(filename: str) -> str:
"""
Replace generally problematic filename characters with underscores.
This differs from sanitize_filename in that it is more aggressive in
changing characters in the name. For example it removes "=" which can
confuse path parsing in external tools.
"""
# This character set applies for both Windows and Linux. It does not
# account for reserved filenames in Windows.
return _polite_antipattern().sub("_", filename)
def getuid():
if sys.platform == "win32":
import ctypes

View File

@@ -211,7 +211,6 @@ def info(message, *args, **kwargs):
stream.write(line + "\n")
else:
stream.write(indent + _output_filter(str(arg)) + "\n")
stream.flush()
def verbose(message, *args, **kwargs):

View File

@@ -8,8 +8,8 @@
from llnl.util.lang import memoized
import spack.spec
import spack.version
from spack.compilers.clang import Clang
from spack.spec import CompilerSpec
from spack.util.executable import Executable, ProcessError
@@ -17,9 +17,7 @@ class ABI:
"""This class provides methods to test ABI compatibility between specs.
The current implementation is rather rough and could be improved."""
def architecture_compatible(
self, target: spack.spec.Spec, constraint: spack.spec.Spec
) -> bool:
def architecture_compatible(self, target, constraint):
"""Return true if architecture of target spec is ABI compatible
to the architecture of constraint spec. If either the target
or constraint specs have no architecture, target is also defined
@@ -36,7 +34,7 @@ def _gcc_get_libstdcxx_version(self, version):
a compiler's libstdc++ or libgcc_s"""
from spack.build_environment import dso_suffix
spec = spack.spec.CompilerSpec("gcc", version)
spec = CompilerSpec("gcc", version)
compilers = spack.compilers.compilers_for_spec(spec)
if not compilers:
return None
@@ -79,20 +77,16 @@ def _gcc_compiler_compare(self, pversion, cversion):
return False
return plib == clib
def _intel_compiler_compare(
self, pversion: spack.version.ClosedOpenRange, cversion: spack.version.ClosedOpenRange
) -> bool:
def _intel_compiler_compare(self, pversion, cversion):
"""Returns true iff the intel version pversion and cversion
are ABI compatible"""
# Test major and minor versions. Ignore build version.
pv = pversion.lo
cv = cversion.lo
return pv.up_to(2) == cv.up_to(2)
if len(pversion.version) < 2 or len(cversion.version) < 2:
return False
return pversion.version[:2] == cversion.version[:2]
def compiler_compatible(
self, parent: spack.spec.Spec, child: spack.spec.Spec, loose: bool = False
) -> bool:
def compiler_compatible(self, parent, child, **kwargs):
"""Return true if compilers for parent and child are ABI compatible."""
if not parent.compiler or not child.compiler:
return True
@@ -101,7 +95,7 @@ def compiler_compatible(
# Different compiler families are assumed ABI incompatible
return False
if loose:
if kwargs.get("loose", False):
return True
# TODO: Can we move the specialized ABI matching stuff
@@ -122,10 +116,9 @@ def compiler_compatible(
return True
return False
def compatible(
self, target: spack.spec.Spec, constraint: spack.spec.Spec, loose: bool = False
) -> bool:
def compatible(self, target, constraint, **kwargs):
"""Returns true if target spec is ABI compatible to constraint spec"""
loosematch = kwargs.get("loose", False)
return self.architecture_compatible(target, constraint) and self.compiler_compatible(
target, constraint, loose=loose
target, constraint, loose=loosematch
)

View File

@@ -307,17 +307,10 @@ def _check_build_test_callbacks(pkgs, error_cls):
@package_directives
def _check_patch_urls(pkgs, error_cls):
"""Ensure that patches fetched from GitHub and GitLab have stable sha256
hashes."""
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
github_patch_url_re = (
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
r".+/.+/(?:commit|pull)/[a-fA-F0-9]+\.(?:patch|diff)"
)
# Only .diff URLs have stable/full hashes:
# https://forum.gitlab.com/t/patches-with-full-index/29313
gitlab_patch_url_re = (
r"^https?://(?:.+)?gitlab(?:.+)/"
r".+/.+/-/(?:commit|merge_requests)/[a-fA-F0-9]+\.(?:patch|diff)"
".+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
)
errors = []
@@ -328,27 +321,19 @@ def _check_patch_urls(pkgs, error_cls):
if not isinstance(patch, spack.patch.UrlPatch):
continue
if re.match(github_patch_url_re, patch.url):
full_index_arg = "?full_index=1"
if not patch.url.endswith(full_index_arg):
errors.append(
error_cls(
"patch URL in package {0} must end with {1}".format(
pkg_cls.name, full_index_arg
),
[patch.url],
)
)
elif re.match(gitlab_patch_url_re, patch.url):
if not patch.url.endswith(".diff"):
errors.append(
error_cls(
"patch URL in package {0} must end with .diff".format(
pkg_cls.name
),
[patch.url],
)
if not re.match(github_patch_url_re, patch.url):
continue
full_index_arg = "?full_index=1"
if not patch.url.endswith(full_index_arg):
errors.append(
error_cls(
"patch URL in package {0} must end with {1}".format(
pkg_cls.name, full_index_arg
),
[patch.url],
)
)
return errors

View File

@@ -5,13 +5,11 @@
import codecs
import collections
import errno
import hashlib
import io
import itertools
import json
import os
import pathlib
import re
import shutil
import sys
@@ -25,7 +23,7 @@
import warnings
from contextlib import closing, contextmanager
from gzip import GzipFile
from typing import Dict, List, NamedTuple, Optional, Set, Tuple
from typing import List, NamedTuple, Optional, Union
from urllib.error import HTTPError, URLError
import llnl.util.filesystem as fsys
@@ -33,7 +31,6 @@
import llnl.util.tty as tty
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
import spack.caches
import spack.cmd
import spack.config as config
import spack.database as spack_db
@@ -41,9 +38,6 @@
import spack.hooks
import spack.hooks.sbang
import spack.mirror
import spack.oci.image
import spack.oci.oci
import spack.oci.opener
import spack.platforms
import spack.relocate as relocate
import spack.repo
@@ -53,7 +47,6 @@
import spack.util.crypto
import spack.util.file_cache as file_cache
import spack.util.gpg
import spack.util.path
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.util.timer as timer
@@ -131,25 +124,25 @@ class BinaryCacheIndex:
mean we should have paid the price to update the cache earlier?
"""
def __init__(self, cache_root: Optional[str] = None):
self._index_cache_root: str = cache_root or binary_index_location()
def __init__(self, cache_root):
self._index_cache_root = cache_root
# the key associated with the serialized _local_index_cache
self._index_contents_key = "contents.json"
# a FileCache instance storing copies of remote binary cache indices
self._index_file_cache: Optional[file_cache.FileCache] = None
self._index_file_cache = None
# stores a map of mirror URL to index hash and cache key (index path)
self._local_index_cache: Optional[dict] = None
self._local_index_cache = None
# hashes of remote indices already ingested into the concrete spec
# cache (_mirrors_for_spec)
self._specs_already_associated: Set[str] = set()
self._specs_already_associated = set()
# mapping from mirror urls to the time.time() of the last index fetch and a bool indicating
# whether the fetch succeeded or not.
self._last_fetch_times: Dict[str, float] = {}
self._last_fetch_times = {}
# _mirrors_for_spec is a dictionary mapping DAG hashes to lists of
# entries indicating mirrors where that concrete spec can be found.
@@ -159,7 +152,7 @@ def __init__(self, cache_root: Optional[str] = None):
# - the concrete spec itself, keyed by ``spec`` (including the
# full hash, since the dag hash may match but we want to
# use the updated source if available)
self._mirrors_for_spec: Dict[str, dict] = {}
self._mirrors_for_spec = {}
def _init_local_index_cache(self):
if not self._index_file_cache:
@@ -223,11 +216,11 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
with self._index_file_cache.read_transaction(cache_key):
db._read_from_file(cache_path)
except spack_db.InvalidDatabaseVersionError as e:
tty.warn(
msg = (
f"you need a newer Spack version to read the buildcache index for the "
f"following mirror: '{mirror_url}'. {e.database_version_message}"
)
return
raise BuildcacheIndexError(msg) from e
spec_list = db.query_local(installed=False, in_buildcache=True)
@@ -478,18 +471,14 @@ def _fetch_and_cache_index(self, mirror_url, cache_entry={}):
FetchIndexError
"""
# TODO: get rid of this request, handle 404 better
scheme = urllib.parse.urlparse(mirror_url).scheme
if scheme != "oci" and not web_util.url_exists(
if not web_util.url_exists(
url_util.join(mirror_url, _build_cache_relative_path, "index.json")
):
return False
if scheme == "oci":
# TODO: Actually etag and OCI are not mutually exclusive...
fetcher = OCIIndexFetcher(mirror_url, cache_entry.get("index_hash", None))
elif cache_entry.get("etag"):
fetcher = EtagIndexFetcher(mirror_url, cache_entry["etag"])
etag = cache_entry.get("etag", None)
if etag:
fetcher = EtagIndexFetcher(mirror_url, etag)
else:
fetcher = DefaultIndexFetcher(
mirror_url, local_hash=cache_entry.get("index_hash", None)
@@ -530,8 +519,15 @@ def binary_index_location():
return spack.util.path.canonicalize_path(cache_root)
#: Default binary cache index instance
BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore
def _binary_index():
"""Get the singleton store instance."""
return BinaryCacheIndex(binary_index_location())
#: Singleton binary_index instance
binary_index: Union[BinaryCacheIndex, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(
_binary_index
)
class NoOverwriteException(spack.error.SpackError):
@@ -626,14 +622,22 @@ def build_cache_prefix(prefix):
def buildinfo_file_name(prefix):
"""Filename of the binary package meta-data file"""
return os.path.join(prefix, ".spack", "binary_distribution")
"""
Filename of the binary package meta-data file
"""
name = os.path.join(prefix, ".spack/binary_distribution")
return name
def read_buildinfo_file(prefix):
"""Read buildinfo file"""
with open(buildinfo_file_name(prefix), "r") as f:
return syaml.load(f)
"""
Read buildinfo file
"""
filename = buildinfo_file_name(prefix)
with open(filename, "r") as inputfile:
content = inputfile.read()
buildinfo = syaml.load(content)
return buildinfo
class BuildManifestVisitor(BaseDirectoryVisitor):
@@ -794,7 +798,11 @@ def tarball_directory_name(spec):
Return name of the tarball directory according to the convention
<os>-<architecture>/<compiler>/<package>-<version>/
"""
return spec.format_path("{architecture}/{compiler.name}-{compiler.version}/{name}-{version}")
return os.path.join(
str(spec.architecture),
f"{spec.compiler.name}-{spec.compiler.version}",
f"{spec.name}-{spec.version}",
)
def tarball_name(spec, ext):
@@ -802,10 +810,10 @@ def tarball_name(spec, ext):
Return the name of the tarfile according to the convention
<os>-<architecture>-<package>-<dag_hash><ext>
"""
spec_formatted = spec.format_path(
"{architecture}-{compiler.name}-{compiler.version}-{name}-{version}-{hash}"
return (
f"{spec.architecture}-{spec.compiler.name}-{spec.compiler.version}-"
f"{spec.name}-{spec.version}-{spec.dag_hash()}{ext}"
)
return f"{spec_formatted}{ext}"
def tarball_path_name(spec, ext):
@@ -816,6 +824,18 @@ def tarball_path_name(spec, ext):
return os.path.join(tarball_directory_name(spec), tarball_name(spec, ext))
def checksum_tarball(file):
# calculate sha256 hash of tar file
block_size = 65536
hasher = hashlib.sha256()
with open(file, "rb") as tfile:
buf = tfile.read(block_size)
while len(buf) > 0:
hasher.update(buf)
buf = tfile.read(block_size)
return hasher.hexdigest()
def select_signing_key(key=None):
if key is None:
keys = spack.util.gpg.signing_keys()
@@ -894,7 +914,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
index_json_path,
url_util.join(cache_prefix, "index.json"),
keep_original=False,
extra_args={"ContentType": "application/json", "CacheControl": "no-cache"},
extra_args={"ContentType": "application/json"},
)
# Push the hash
@@ -902,7 +922,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
index_hash_path,
url_util.join(cache_prefix, "index.json.hash"),
keep_original=False,
extra_args={"ContentType": "text/plain", "CacheControl": "no-cache"},
extra_args={"ContentType": "text/plain"},
)
@@ -1132,190 +1152,63 @@ def gzip_compressed_tarfile(path):
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
# So we follow gzip.
with open(path, "wb") as f, ChecksumWriter(f) as inner_checksum, closing(
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=inner_checksum)
) as gzip_file, ChecksumWriter(gzip_file) as outer_checksum, tarfile.TarFile(
name="", mode="w", fileobj=outer_checksum
) as tar:
yield tar, inner_checksum, outer_checksum
with open(path, "wb") as fileobj, closing(
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=fileobj)
) as gzip_file, tarfile.TarFile(name="", mode="w", fileobj=gzip_file) as tar:
yield tar
def _tarinfo_name(absolute_path: str, *, _path=pathlib.PurePath) -> str:
"""Compute tarfile entry name as the relative path from the (system) root."""
return _path(*_path(absolute_path).parts[1:]).as_posix()
def deterministic_tarinfo(tarinfo: tarfile.TarInfo):
# We only add files, symlinks, hardlinks, and directories
# No character devices, block devices and FIFOs should ever enter a tarball.
if tarinfo.isdev():
return None
# For distribution, it makes no sense to user/group data; since (a) they don't exist
# on other machines, and (b) they lead to surprises as `tar x` run as root will change
# ownership if it can. We want to extract as the current user. By setting owner to root,
# root will extract as root, and non-privileged user will extract as themselves.
tarinfo.uid = 0
tarinfo.gid = 0
tarinfo.uname = ""
tarinfo.gname = ""
# Reset mtime to epoch time, our prefixes are not truly immutable, so files may get
# touched; as long as the content does not change, this ensures we get stable tarballs.
tarinfo.mtime = 0
# Normalize mode
if tarinfo.isfile() or tarinfo.islnk():
# If user can execute, use 0o755; else 0o644
# This is to avoid potentially unsafe world writable & exeutable files that may get
# extracted when Python or tar is run with privileges
tarinfo.mode = 0o644 if tarinfo.mode & 0o100 == 0 else 0o755
else: # symbolic link and directories
tarinfo.mode = 0o755
return tarinfo
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
"""Create a tarfile of an install prefix of a spec. Skips existing buildinfo file.
Only adds regular files, symlinks and dirs. Skips devices, fifos. Preserves hardlinks.
Normalizes permissions like git. Tar entries are added in depth-first pre-order, with
dir entries partitioned by file | dir, and sorted alphabetically, for reproducibility.
Partitioning ensures only one dir is in memory at a time, and sorting improves compression.
Args:
tar: tarfile object to add files to
prefix: absolute install prefix of spec"""
if not os.path.isabs(prefix) or not os.path.isdir(prefix):
raise ValueError(f"prefix '{prefix}' must be an absolute path to a directory")
hardlink_to_tarinfo_name: Dict[Tuple[int, int], str] = dict()
stat_key = lambda stat: (stat.st_dev, stat.st_ino)
try: # skip buildinfo file if it exists
files_to_skip = [stat_key(os.lstat(buildinfo_file_name(prefix)))]
except OSError:
files_to_skip = []
dir_stack = [prefix]
while dir_stack:
dir = dir_stack.pop()
# Add the dir before its contents
dir_info = tarfile.TarInfo(_tarinfo_name(dir))
dir_info.type = tarfile.DIRTYPE
dir_info.mode = 0o755
tar.addfile(dir_info)
# Sort by name: reproducible & improves compression
with os.scandir(dir) as it:
entries = sorted(it, key=lambda entry: entry.name)
new_dirs = []
for entry in entries:
if entry.is_dir(follow_symlinks=False):
new_dirs.append(entry.path)
continue
file_info = tarfile.TarInfo(_tarinfo_name(entry.path))
s = entry.stat(follow_symlinks=False)
# Skip existing binary distribution files.
id = stat_key(s)
if id in files_to_skip:
continue
# Normalize the mode
file_info.mode = 0o644 if s.st_mode & 0o100 == 0 else 0o755
if entry.is_symlink():
file_info.type = tarfile.SYMTYPE
file_info.linkname = os.readlink(entry.path)
tar.addfile(file_info)
elif entry.is_file(follow_symlinks=False):
# Deduplicate hardlinks
if s.st_nlink > 1:
if id in hardlink_to_tarinfo_name:
file_info.type = tarfile.LNKTYPE
file_info.linkname = hardlink_to_tarinfo_name[id]
tar.addfile(file_info)
continue
hardlink_to_tarinfo_name[id] = file_info.name
# If file not yet seen, copy it.
file_info.type = tarfile.REGTYPE
file_info.size = s.st_size
with open(entry.path, "rb") as f:
tar.addfile(file_info, f)
dir_stack.extend(reversed(new_dirs)) # we pop, so reverse to stay alphabetical
def tar_add_metadata(tar: tarfile.TarFile, path: str, data: dict):
# Serialize buildinfo for the tarball
bstring = syaml.dump(data, default_flow_style=True).encode("utf-8")
tarinfo = tarfile.TarInfo(name=path)
tarinfo.size = len(bstring)
tar.addfile(deterministic_tarinfo(tarinfo), io.BytesIO(bstring))
class ChecksumWriter(io.BufferedIOBase):
"""Checksum writer computes a checksum while writing to a file."""
def deterministic_tarinfo_without_buildinfo(tarinfo: tarfile.TarInfo):
"""Skip buildinfo file when creating a tarball, and normalize other tarinfo fields."""
if tarinfo.name.endswith("/.spack/binary_distribution"):
return None
myfileobj = None
def __init__(self, fileobj, algorithm=hashlib.sha256):
self.fileobj = fileobj
self.hasher = algorithm()
self.length = 0
def hexdigest(self):
return self.hasher.hexdigest()
def write(self, data):
if isinstance(data, (bytes, bytearray)):
length = len(data)
else:
data = memoryview(data)
length = data.nbytes
if length > 0:
self.fileobj.write(data)
self.hasher.update(data)
self.length += length
return length
def read(self, size=-1):
raise OSError(errno.EBADF, "read() on write-only object")
def read1(self, size=-1):
raise OSError(errno.EBADF, "read1() on write-only object")
def peek(self, n):
raise OSError(errno.EBADF, "peek() on write-only object")
@property
def closed(self):
return self.fileobj is None
def close(self):
fileobj = self.fileobj
if fileobj is None:
return
self.fileobj.close()
self.fileobj = None
def flush(self):
self.fileobj.flush()
def fileno(self):
return self.fileobj.fileno()
def rewind(self):
raise OSError("Can't rewind while computing checksum")
def readable(self):
return False
def writable(self):
return True
def seekable(self):
return True
def tell(self):
return self.fileobj.tell()
def seek(self, offset, whence=io.SEEK_SET):
# In principle forward seek is possible with b"0" padding,
# but this is not implemented.
if offset == 0 and whence == io.SEEK_CUR:
return
raise OSError("Can't seek while computing checksum")
def readline(self, size=-1):
raise OSError(errno.EBADF, "readline() on write-only object")
return deterministic_tarinfo(tarinfo)
def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
with gzip_compressed_tarfile(tarfile_path) as (tar, inner_checksum, outer_checksum):
# Tarball the install prefix
tarfile_of_spec_prefix(tar, binaries_dir)
# Serialize buildinfo for the tarball
bstring = syaml.dump(buildinfo, default_flow_style=True).encode("utf-8")
tarinfo = tarfile.TarInfo(name=_tarinfo_name(buildinfo_file_name(binaries_dir)))
tarinfo.type = tarfile.REGTYPE
tarinfo.size = len(bstring)
tarinfo.mode = 0o644
tar.addfile(tarinfo, io.BytesIO(bstring))
return inner_checksum.hexdigest(), outer_checksum.hexdigest()
def _do_create_tarball(tarfile_path: str, binaries_dir: str, pkg_dir: str, buildinfo: dict):
with gzip_compressed_tarfile(tarfile_path) as tar:
tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo_without_buildinfo)
tar_add_metadata(tar, buildinfo_file_name(pkg_dir), buildinfo)
class PushOptions(NamedTuple):
@@ -1387,14 +1280,20 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
):
raise NoOverwriteException(url_util.format(remote_specfile_path))
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
binaries_dir = spec.prefix
# create info for later relocation and create tar
buildinfo = get_buildinfo_dict(spec)
checksum, _ = _do_create_tarball(tarfile_path, binaries_dir, buildinfo)
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
# get the sha256 checksum of the tarball
checksum = checksum_tarball(tarfile_path)
# add sha256 checksum to spec.json
with open(spec_file, "r") as inputfile:
content = inputfile.read()
if spec_file.endswith(".json"):
@@ -1437,21 +1336,10 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
return None
class NotInstalledError(spack.error.SpackError):
"""Raised when a spec is not installed but picked to be packaged."""
def __init__(self, specs: List[Spec]):
super().__init__(
"Cannot push non-installed packages",
", ".join(s.cformat("{name}{@version}{/hash:7}") for s in specs),
)
def specs_to_be_packaged(
specs: List[Spec], root: bool = True, dependencies: bool = True
) -> List[Spec]:
"""Return the list of nodes to be packaged, given a list of specs.
Raises NotInstalledError if a spec is not installed but picked to be packaged.
Args:
specs: list of root specs to be processed
@@ -1459,35 +1347,19 @@ def specs_to_be_packaged(
dependencies: include the dependencies of each
spec in the nodes
"""
if not root and not dependencies:
return []
elif dependencies:
nodes = traverse.traverse_nodes(specs, root=root, deptype="all")
else:
nodes = set(specs)
# Filter packageable roots
# Limit to installed non-externals.
packageable = lambda n: not n.external and n.installed
# Mass install check
with spack.store.STORE.db.read_transaction():
if root:
# Error on uninstalled roots, when roots are requested
uninstalled_roots = list(s for s in specs if not s.installed)
if uninstalled_roots:
raise NotInstalledError(uninstalled_roots)
roots = specs
else:
roots = []
if dependencies:
# Error on uninstalled deps, when deps are requested
deps = list(
traverse.traverse_nodes(
specs, deptype="all", order="breadth", root=False, key=traverse.by_dag_hash
)
)
uninstalled_deps = list(s for s in deps if not s.installed)
if uninstalled_deps:
raise NotInstalledError(uninstalled_deps)
else:
deps = []
return [s for s in itertools.chain(roots, deps) if not s.external]
return list(filter(packageable, nodes))
def push(spec: Spec, mirror_url: str, options: PushOptions):
@@ -1595,6 +1467,8 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
tarball = tarball_path_name(spec, ".spack")
specfile_prefix = tarball_name(spec, ".spec")
mirrors_to_try = []
# Note on try_first and try_next:
# mirrors_for_spec mostly likely came from spack caching remote
# mirror indices locally and adding their specs to a local data
@@ -1607,116 +1481,63 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else []
try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first]
mirrors = try_first + try_next
for url in try_first + try_next:
mirrors_to_try.append(
{
"specfile": url_util.join(url, _build_cache_relative_path, specfile_prefix),
"spackfile": url_util.join(url, _build_cache_relative_path, tarball),
}
)
tried_to_verify_sigs = []
# Assumes we care more about finding a spec file by preferred ext
# than by mirrory priority. This can be made less complicated as
# we remove support for deprecated spec formats and buildcache layouts.
for try_signed in (True, False):
for mirror in mirrors:
# If it's an OCI index, do things differently, since we cannot compose URLs.
parsed = urllib.parse.urlparse(mirror)
for ext in ["json.sig", "json"]:
for mirror_to_try in mirrors_to_try:
specfile_url = "{0}.{1}".format(mirror_to_try["specfile"], ext)
spackfile_url = mirror_to_try["spackfile"]
local_specfile_stage = try_fetch(specfile_url)
if local_specfile_stage:
local_specfile_path = local_specfile_stage.save_filename
signature_verified = False
# TODO: refactor this to some "nice" place.
if parsed.scheme == "oci":
ref = spack.oci.image.ImageReference.from_string(mirror[len("oci://") :]).with_tag(
spack.oci.image.default_tag(spec)
)
if ext.endswith(".sig") and not unsigned:
# If we found a signed specfile at the root, try to verify
# the signature immediately. We will not download the
# tarball if we could not verify the signature.
tried_to_verify_sigs.append(specfile_url)
signature_verified = try_verify(local_specfile_path)
if not signature_verified:
tty.warn("Failed to verify: {0}".format(specfile_url))
# Fetch the manifest
try:
response = spack.oci.opener.urlopen(
urllib.request.Request(
url=ref.manifest_url(),
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
)
)
except Exception:
continue
if unsigned or signature_verified or not ext.endswith(".sig"):
# We will download the tarball in one of three cases:
# 1. user asked for --no-check-signature
# 2. user didn't ask for --no-check-signature, but we
# found a spec.json.sig and verified the signature already
# 3. neither of the first two cases are true, but this file
# is *not* a signed json (not a spec.json.sig file). That
# means we already looked at all the mirrors and either didn't
# find any .sig files or couldn't verify any of them. But it
# is still possible to find an old style binary package where
# the signature is a detached .asc file in the outer archive
# of the tarball, and in that case, the only way to know is to
# download the tarball. This is a deprecated use case, so if
# something goes wrong during the extraction process (can't
# verify signature, checksum doesn't match) we will fail at
# that point instead of trying to download more tarballs from
# the remaining mirrors, looking for one we can use.
tarball_stage = try_fetch(spackfile_url)
if tarball_stage:
return {
"tarball_stage": tarball_stage,
"specfile_stage": local_specfile_stage,
"signature_verified": signature_verified,
}
# Download the config = spec.json and the relevant tarball
try:
manifest = json.loads(response.read())
spec_digest = spack.oci.image.Digest.from_string(manifest["config"]["digest"])
tarball_digest = spack.oci.image.Digest.from_string(
manifest["layers"][-1]["digest"]
)
except Exception:
continue
with spack.oci.oci.make_stage(
ref.blob_url(spec_digest), spec_digest, keep=True
) as local_specfile_stage:
try:
local_specfile_stage.fetch()
local_specfile_stage.check()
except Exception:
continue
local_specfile_stage.cache_local()
with spack.oci.oci.make_stage(
ref.blob_url(tarball_digest), tarball_digest, keep=True
) as tarball_stage:
try:
tarball_stage.fetch()
tarball_stage.check()
except Exception:
continue
tarball_stage.cache_local()
return {
"tarball_stage": tarball_stage,
"specfile_stage": local_specfile_stage,
"signature_verified": False,
}
else:
ext = "json.sig" if try_signed else "json"
specfile_path = url_util.join(mirror, _build_cache_relative_path, specfile_prefix)
specfile_url = f"{specfile_path}.{ext}"
spackfile_url = url_util.join(mirror, _build_cache_relative_path, tarball)
local_specfile_stage = try_fetch(specfile_url)
if local_specfile_stage:
local_specfile_path = local_specfile_stage.save_filename
signature_verified = False
if try_signed and not unsigned:
# If we found a signed specfile at the root, try to verify
# the signature immediately. We will not download the
# tarball if we could not verify the signature.
tried_to_verify_sigs.append(specfile_url)
signature_verified = try_verify(local_specfile_path)
if not signature_verified:
tty.warn("Failed to verify: {0}".format(specfile_url))
if unsigned or signature_verified or not try_signed:
# We will download the tarball in one of three cases:
# 1. user asked for --no-check-signature
# 2. user didn't ask for --no-check-signature, but we
# found a spec.json.sig and verified the signature already
# 3. neither of the first two cases are true, but this file
# is *not* a signed json (not a spec.json.sig file). That
# means we already looked at all the mirrors and either didn't
# find any .sig files or couldn't verify any of them. But it
# is still possible to find an old style binary package where
# the signature is a detached .asc file in the outer archive
# of the tarball, and in that case, the only way to know is to
# download the tarball. This is a deprecated use case, so if
# something goes wrong during the extraction process (can't
# verify signature, checksum doesn't match) we will fail at
# that point instead of trying to download more tarballs from
# the remaining mirrors, looking for one we can use.
tarball_stage = try_fetch(spackfile_url)
if tarball_stage:
return {
"tarball_stage": tarball_stage,
"specfile_stage": local_specfile_stage,
"signature_verified": signature_verified,
}
local_specfile_stage.destroy()
local_specfile_stage.destroy()
# Falling through the nested loops meeans we exhaustively searched
# for all known kinds of spec files on all mirrors and did not find
@@ -1949,7 +1770,7 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
)
# compute the sha256 checksum of the tarball
local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path)
local_checksum = checksum_tarball(tarfile_path)
expected = remote_checksum["hash"]
# if the checksums don't match don't install
@@ -2010,7 +1831,6 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
spec_dict = sjson.load(content)
bchecksum = spec_dict["binary_cache_checksum"]
filename = download_result["tarball_stage"].save_filename
signature_verified = download_result["signature_verified"]
tmpdir = None
@@ -2043,7 +1863,7 @@ def extract_tarball(spec, download_result, unsigned=False, force=False, timer=ti
)
# compute the sha256 checksum of the tarball
local_checksum = spack.util.crypto.checksum(hashlib.sha256, tarfile_path)
local_checksum = checksum_tarball(tarfile_path)
expected = bchecksum["hash"]
# if the checksums don't match don't install
@@ -2249,7 +2069,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
tty.debug("No Spack mirrors are currently configured")
return {}
results = BINARY_INDEX.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
# The index may be out-of-date. If we aren't only considering indices, try
# to fetch directly since we know where the file should be.
@@ -2258,7 +2078,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
# We found a spec by the direct fetch approach, we might as well
# add it to our mapping.
if results:
BINARY_INDEX.update_spec(spec, results)
binary_index.update_spec(spec, results)
return results
@@ -2274,12 +2094,12 @@ def update_cache_and_get_specs():
Throws:
FetchCacheError
"""
BINARY_INDEX.update()
return BINARY_INDEX.get_all_built_specs()
binary_index.update()
return binary_index.get_all_built_specs()
def clear_spec_cache():
BINARY_INDEX.clear()
binary_index.clear()
def get_keys(install=False, trust=False, force=False, mirrors=None):
@@ -2602,7 +2422,7 @@ def get_remote_hash(self):
return None
return remote_hash.decode("utf-8")
def conditional_fetch(self) -> FetchIndexResult:
def conditional_fetch(self):
# Do an intermediate fetch for the hash
# and a conditional fetch for the contents
@@ -2616,12 +2436,12 @@ def conditional_fetch(self) -> FetchIndexResult:
try:
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
except urllib.error.URLError as e:
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
raise FetchIndexError("Could not fetch index from {}".format(url_index), e)
try:
result = codecs.getreader("utf-8")(response).read()
except ValueError as e:
raise FetchIndexError("Remote index {} is invalid".format(url_index), e) from e
return FetchCacheError("Remote index {} is invalid".format(url_index), e)
computed_hash = compute_hash(result)
@@ -2653,7 +2473,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
self.etag = etag
self.urlopen = urlopen
def conditional_fetch(self) -> FetchIndexResult:
def conditional_fetch(self):
# Just do a conditional fetch immediately
url = url_util.join(self.url, _build_cache_relative_path, "index.json")
headers = {
@@ -2684,59 +2504,3 @@ def conditional_fetch(self) -> FetchIndexResult:
data=result,
fresh=False,
)
class OCIIndexFetcher:
def __init__(self, url: str, local_hash, urlopen=None) -> None:
self.local_hash = local_hash
# Remove oci:// prefix
assert url.startswith("oci://")
self.ref = spack.oci.image.ImageReference.from_string(url[6:])
self.urlopen = urlopen or spack.oci.opener.urlopen
def conditional_fetch(self) -> FetchIndexResult:
"""Download an index from an OCI registry type mirror."""
url_manifest = self.ref.with_tag(spack.oci.image.default_index_tag).manifest_url()
try:
response = self.urlopen(
urllib.request.Request(
url=url_manifest,
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
)
)
except urllib.error.URLError as e:
raise FetchIndexError(
"Could not fetch manifest from {}".format(url_manifest), e
) from e
try:
manifest = json.loads(response.read())
except Exception as e:
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
# Get first blob hash, which should be the index.json
try:
index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"])
except Exception as e:
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
# Fresh?
if index_digest.digest == self.local_hash:
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
# Otherwise fetch the blob / index.json
response = self.urlopen(
urllib.request.Request(
url=self.ref.blob_url(index_digest),
headers={"Accept": "application/vnd.oci.image.layer.v1.tar+gzip"},
)
)
result = codecs.getreader("utf-8")(response).read()
# Make sure the blob we download has the advertised hash
if compute_hash(result) != index_digest.digest:
raise FetchIndexError(f"Remote index {url_manifest} is invalid")
return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False)

View File

@@ -143,9 +143,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
def _add_compilers_if_missing() -> None:
arch = spack.spec.ArchSpec.frontend_arch()
if not spack.compilers.compilers_for_arch(arch):
new_compilers = spack.compilers.find_new_compilers(
mixed_toolchain=sys.platform == "darwin"
)
new_compilers = spack.compilers.find_new_compilers()
if new_compilers:
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)

View File

@@ -214,7 +214,7 @@ def _install_and_test(
with spack.config.override(self.mirror_scope):
# This index is currently needed to get the compiler used to build some
# specs that we know by dag hash.
spack.binary_distribution.BINARY_INDEX.regenerate_spec_cache()
spack.binary_distribution.binary_index.regenerate_spec_cache()
index = spack.binary_distribution.update_cache_and_get_specs()
if not index:
@@ -228,7 +228,7 @@ def _install_and_test(
if not abstract_spec.intersects(candidate_spec):
continue
if python_spec is not None and not abstract_spec.intersects(f"^{python_spec}"):
if python_spec is not None and python_spec not in abstract_spec:
continue
for _, pkg_hash, pkg_sha256 in item["binaries"]:
@@ -291,10 +291,6 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
with spack_python_interpreter():
# Add hint to use frontend operating system on Cray
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
# This is needed to help the old concretizer taking the `setuptools` dependency
# only when bootstrapping from sources on Python 3.12
if spec_for_current_python() == "python@3.12":
concrete_spec.constrain("+force_setuptools")
if module == "clingo":
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
@@ -450,11 +446,16 @@ def ensure_executables_in_path_or_raise(
current_bootstrapper.last_search["spec"],
current_bootstrapper.last_search["command"],
)
cmd.add_default_envmod(
spack.user_environment.environment_modifications_for_specs(
concrete_spec, set_package_py_globals=False
env_mods = spack.util.environment.EnvironmentModifications()
for dep in concrete_spec.traverse(
root=True, order="post", deptype=("link", "run")
):
env_mods.extend(
spack.user_environment.environment_modifications_for_spec(
dep, set_package_py_globals=False
)
)
)
cmd.add_default_envmod(env_mods)
return cmd
assert exception_handler, (

View File

@@ -40,15 +40,12 @@
import sys
import traceback
import types
from collections import defaultdict
from enum import Flag, auto
from itertools import chain
from typing import List, Tuple
import llnl.util.tty as tty
from llnl.string import plural
from llnl.util.filesystem import join_path
from llnl.util.lang import dedupe, stable_partition
from llnl.util.lang import dedupe
from llnl.util.symlink import symlink
from llnl.util.tty.color import cescape, colorize
from llnl.util.tty.log import MultiProcessFd
@@ -58,21 +55,17 @@
import spack.build_systems.python
import spack.builder
import spack.config
import spack.deptypes as dt
import spack.main
import spack.package_base
import spack.paths
import spack.platforms
import spack.repo
import spack.schema.environment
import spack.spec
import spack.store
import spack.subprocess_context
import spack.user_environment
import spack.util.path
import spack.util.pattern
from spack import traverse
from spack.context import Context
from spack.error import NoHeadersError, NoLibrariesError
from spack.install_test import spack_install_test_log
from spack.installer import InstallError
@@ -83,6 +76,7 @@
env_flag,
filter_system_paths,
get_path,
inspect_path,
is_system_path,
validate,
)
@@ -115,6 +109,7 @@
SPACK_CCACHE_BINARY = "SPACK_CCACHE_BINARY"
SPACK_SYSTEM_DIRS = "SPACK_SYSTEM_DIRS"
# Platform-specific library suffix.
if sys.platform == "darwin":
dso_suffix = "dylib"
@@ -411,13 +406,19 @@ def set_compiler_environment_variables(pkg, env):
def set_wrapper_variables(pkg, env):
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
`SPACK_`) and also add the compiler wrappers to PATH.
"""Set environment variables used by the Spack compiler wrapper
(which have the prefix `SPACK_`) and also add the compiler wrappers
to PATH.
This determines the injected -L/-I/-rpath options; each of these specifies a search order and
this function computes these options in a manner that is intended to match the DAG traversal
order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext
is using topo order."""
This determines the injected -L/-I/-rpath options; each
of these specifies a search order and this function computes these
options in a manner that is intended to match the DAG traversal order
in `modifications_from_dependencies`: that method uses a post-order
traversal so that `PrependPath` actions from dependencies take lower
precedence; we use a post-order traversal here to match the visitation
order of `modifications_from_dependencies` (so we are visiting the
lowest priority packages first).
"""
# Set environment variables if specified for
# the given compiler
compiler = pkg.compiler
@@ -536,42 +537,45 @@ def update_compiler_args_for_dep(dep):
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
def set_package_py_globals(pkg, context: Context = Context.BUILD):
def set_module_variables_for_package(pkg):
"""Populate the Python module of a package with some useful global names.
This makes things easier for package writers.
"""
# Put a marker on this module so that it won't execute the body of this
# function again, since it is not needed
marker = "_set_run_already_called"
if getattr(pkg.module, marker, False):
return
module = ModuleChangePropagator(pkg)
jobs = determine_number_of_jobs(parallel=pkg.parallel)
m = module
m.make_jobs = jobs
if context == Context.BUILD:
jobs = determine_number_of_jobs(parallel=pkg.parallel)
m.make_jobs = jobs
# TODO: make these build deps that can be installed if not found.
m.make = MakeExecutable("make", jobs)
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
# TODO: johnwparent: add package or builder support to define these build tools
# for now there is no entrypoint for builders to define these on their
# own
if sys.platform == "win32":
m.nmake = Executable("nmake")
m.msbuild = Executable("msbuild")
# analog to configure for win32
m.cscript = Executable("cscript")
# TODO: make these build deps that can be installed if not found.
m.make = MakeExecutable("make", jobs)
m.gmake = MakeExecutable("gmake", jobs)
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
# TODO: johnwparent: add package or builder support to define these build tools
# for now there is no entrypoint for builders to define these on their
# own
if sys.platform == "win32":
m.nmake = Executable("nmake")
m.msbuild = Executable("msbuild")
# analog to configure for win32
m.cscript = Executable("cscript")
# Find the configure script in the archive path
# Don't use which for this; we want to find it in the current dir.
m.configure = Executable("./configure")
# Find the configure script in the archive path
# Don't use which for this; we want to find it in the current dir.
m.configure = Executable("./configure")
# Standard CMake arguments
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
# Standard CMake arguments
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
# Put spack compiler paths in module scope. (Some packages use it
# in setup_run_environment etc, so don't put it context == build)
# Put spack compiler paths in module scope.
link_dir = spack.paths.build_env_path
m.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
m.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
@@ -595,6 +599,9 @@ def static_to_shared_library(static_lib, shared_lib=None, **kwargs):
m.static_to_shared_library = static_to_shared_library
# Put a marker on this module so that it won't execute the body of this
# function again, since it is not needed
setattr(m, marker, True)
module.propagate_changes_to_mro()
@@ -720,15 +727,12 @@ def load_external_modules(pkg):
load_module(external_module)
def setup_package(pkg, dirty, context: Context = Context.BUILD):
def setup_package(pkg, dirty, context="build"):
"""Execute all environment setup routines."""
if context not in (Context.BUILD, Context.TEST):
raise ValueError(f"'context' must be Context.BUILD or Context.TEST - got {context}")
if context not in ["build", "test"]:
raise ValueError("'context' must be one of ['build', 'test'] - got: {0}".format(context))
# First populate the package.py's module with the relevant globals that could be used in any
# of the setup_* functions.
setup_context = SetupContext(pkg.spec, context=context)
setup_context.set_all_package_py_globals()
set_module_variables_for_package(pkg)
# Keep track of env changes from packages separately, since we want to
# issue warnings when packages make "suspicious" modifications.
@@ -736,15 +740,13 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
env_mods = EnvironmentModifications()
# setup compilers for build contexts
need_compiler = context == Context.BUILD or (
context == Context.TEST and pkg.test_requires_compiler
)
need_compiler = context == "build" or (context == "test" and pkg.test_requires_compiler)
if need_compiler:
set_compiler_environment_variables(pkg, env_mods)
set_wrapper_variables(pkg, env_mods)
tty.debug("setup_package: grabbing modifications from dependencies")
env_mods.extend(setup_context.get_env_modifications())
env_mods.extend(modifications_from_dependencies(pkg.spec, context, custom_mods_only=False))
tty.debug("setup_package: collected all modifications from dependencies")
# architecture specific setup
@@ -752,13 +754,28 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
target = platform.target(pkg.spec.architecture.target)
platform.setup_platform_environment(pkg, env_mods)
if context == Context.TEST:
env_mods.prepend_path("PATH", ".")
elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"):
tty.debug(
"A dependency has updated CPATH, this may lead pkg-config to assume that the package "
"is part of the system includes and omit it when invoked with '--cflags'."
if context == "build":
tty.debug("setup_package: setup build environment for root")
builder = spack.builder.create(pkg)
builder.setup_build_environment(env_mods)
if (not dirty) and (not env_mods.is_unset("CPATH")):
tty.debug(
"A dependency has updated CPATH, this may lead pkg-"
"config to assume that the package is part of the system"
" includes and omit it when invoked with '--cflags'."
)
elif context == "test":
tty.debug("setup_package: setup test environment for root")
env_mods.extend(
inspect_path(
pkg.spec.prefix,
spack.user_environment.prefix_inspections(pkg.spec.platform),
exclude=is_system_path,
)
)
pkg.setup_run_environment(env_mods)
env_mods.prepend_path("PATH", ".")
# First apply the clean environment changes
env_base.apply_modifications()
@@ -796,257 +813,158 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
return env_base
class EnvironmentVisitor:
def __init__(self, *roots: spack.spec.Spec, context: Context):
# For the roots (well, marked specs) we follow different edges
# than for their deps, depending on the context.
self.root_hashes = set(s.dag_hash() for s in roots)
def _make_runnable(pkg, env):
# Helper method which prepends a Package's bin/ prefix to the PATH
# environment variable
prefix = pkg.prefix
if context == Context.BUILD:
# Drop direct run deps in build context
# We don't really distinguish between install and build time test deps,
# so we include them here as build-time test deps.
self.root_depflag = dt.BUILD | dt.TEST | dt.LINK
elif context == Context.TEST:
# This is more of an extended run environment
self.root_depflag = dt.TEST | dt.RUN | dt.LINK
elif context == Context.RUN:
self.root_depflag = dt.RUN | dt.LINK
def neighbors(self, item):
spec = item.edge.spec
if spec.dag_hash() in self.root_hashes:
depflag = self.root_depflag
else:
depflag = dt.LINK | dt.RUN
return traverse.sort_edges(spec.edges_to_dependencies(depflag=depflag))
for dirname in ["bin", "bin64"]:
bin_dir = os.path.join(prefix, dirname)
if os.path.isdir(bin_dir):
env.prepend_path("PATH", bin_dir)
class UseMode(Flag):
#: Entrypoint spec (a spec to be built; an env root, etc)
ROOT = auto()
def modifications_from_dependencies(
spec, context, custom_mods_only=True, set_package_py_globals=True
):
"""Returns the environment modifications that are required by
the dependencies of a spec and also applies modifications
to this spec's package at module scope, if need be.
#: A spec used at runtime, but no executables in PATH
RUNTIME = auto()
Environment modifications include:
#: A spec used at runtime, with executables in PATH
RUNTIME_EXECUTABLE = auto()
- Updating PATH so that executables can be found
- Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective
tools can find Spack-built dependencies
- Running custom package environment modifications
#: A spec that's a direct build or test dep
BUILDTIME_DIRECT = auto()
Custom package modifications can conflict with the default PATH changes
we make (specifically for the PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH
environment variables), so this applies changes in a fixed order:
#: A spec that should be visible in search paths in a build env.
BUILDTIME = auto()
- All modifications (custom and default) from external deps first
- All modifications from non-external deps afterwards
#: Flag is set when the (node, mode) is finalized
ADDED = auto()
With that order, `PrependPath` actions from non-external default
environment modifications will take precedence over custom modifications
from external packages.
A secondary constraint is that custom and default modifications are
grouped on a per-package basis: combined with the post-order traversal this
means that default modifications of dependents can override custom
modifications of dependencies (again, this would only occur for PATH,
CMAKE_PREFIX_PATH, or PKG_CONFIG_PATH).
def effective_deptypes(
*specs: spack.spec.Spec, context: Context = Context.BUILD
) -> List[Tuple[spack.spec.Spec, UseMode]]:
"""Given a list of input specs and a context, return a list of tuples of
all specs that contribute to (environment) modifications, together with
a flag specifying in what way they do so. The list is ordered topologically
from root to leaf, meaning that environment modifications should be applied
in reverse so that dependents override dependencies, not the other way around."""
visitor = traverse.TopoVisitor(
EnvironmentVisitor(*specs, context=context),
key=lambda x: x.dag_hash(),
root=True,
all_edges=True,
)
traverse.traverse_depth_first_with_visitor(traverse.with_artificial_edges(specs), visitor)
# Dictionary with "no mode" as default value, so it's easy to write modes[x] |= flag.
use_modes = defaultdict(lambda: UseMode(0))
nodes_with_type = []
for edge in visitor.edges:
parent, child, depflag = edge.parent, edge.spec, edge.depflag
# Mark the starting point
if parent is None:
use_modes[child] = UseMode.ROOT
continue
parent_mode = use_modes[parent]
# Nothing to propagate.
if not parent_mode:
continue
# Dependending on the context, include particular deps from the root.
if UseMode.ROOT & parent_mode:
if context == Context.BUILD:
if (dt.BUILD | dt.TEST) & depflag:
use_modes[child] |= UseMode.BUILDTIME_DIRECT
if dt.LINK & depflag:
use_modes[child] |= UseMode.BUILDTIME
elif context == Context.TEST:
if (dt.RUN | dt.TEST) & depflag:
use_modes[child] |= UseMode.RUNTIME_EXECUTABLE
elif dt.LINK & depflag:
use_modes[child] |= UseMode.RUNTIME
elif context == Context.RUN:
if dt.RUN & depflag:
use_modes[child] |= UseMode.RUNTIME_EXECUTABLE
elif dt.LINK & depflag:
use_modes[child] |= UseMode.RUNTIME
# Propagate RUNTIME and RUNTIME_EXECUTABLE through link and run deps.
if (UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE | UseMode.BUILDTIME_DIRECT) & parent_mode:
if dt.LINK & depflag:
use_modes[child] |= UseMode.RUNTIME
if dt.RUN & depflag:
use_modes[child] |= UseMode.RUNTIME_EXECUTABLE
# Propagate BUILDTIME through link deps.
if UseMode.BUILDTIME & parent_mode:
if dt.LINK & depflag:
use_modes[child] |= UseMode.BUILDTIME
# Finalize the spec; the invariant is that all in-edges are processed
# before out-edges, meaning that parent is done.
if not (UseMode.ADDED & parent_mode):
use_modes[parent] |= UseMode.ADDED
nodes_with_type.append((parent, parent_mode))
# Attach the leaf nodes, since we only added nodes with out-edges.
for spec, parent_mode in use_modes.items():
if parent_mode and not (UseMode.ADDED & parent_mode):
nodes_with_type.append((spec, parent_mode))
return nodes_with_type
class SetupContext:
"""This class encapsulates the logic to determine environment modifications, and is used as
well to set globals in modules of package.py."""
def __init__(self, *specs: spack.spec.Spec, context: Context) -> None:
"""Construct a ModificationsFromDag object.
Args:
specs: single root spec for build/test context, possibly more for run context
context: build, run, or test"""
if (context == Context.BUILD or context == Context.TEST) and not len(specs) == 1:
raise ValueError("Cannot setup build environment for multiple specs")
specs_with_type = effective_deptypes(*specs, context=context)
self.specs = specs
self.context = context
self.external: List[Tuple[spack.spec.Spec, UseMode]]
self.nonexternal: List[Tuple[spack.spec.Spec, UseMode]]
# Reverse so we go from leaf to root
self.nodes_in_subdag = set(id(s) for s, _ in specs_with_type)
# Split into non-external and external, maintaining topo order per group.
self.external, self.nonexternal = stable_partition(
reversed(specs_with_type), lambda t: t[0].external
Args:
spec (spack.spec.Spec): spec for which we want the modifications
context (str): either 'build' for build-time modifications or 'run'
for run-time modifications
custom_mods_only (bool): if True returns only custom modifications, if False
returns custom and default modifications
set_package_py_globals (bool): whether or not to set the global variables in the
package.py files (this may be problematic when using buildcaches that have
been built on a different but compatible OS)
"""
if context not in ["build", "run", "test"]:
raise ValueError(
"Expecting context to be one of ['build', 'run', 'test'], " "got: {0}".format(context)
)
self.should_be_runnable = UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME_EXECUTABLE
self.should_setup_run_env = (
UseMode.BUILDTIME_DIRECT | UseMode.RUNTIME | UseMode.RUNTIME_EXECUTABLE
)
self.should_setup_dependent_build_env = UseMode.BUILDTIME | UseMode.BUILDTIME_DIRECT
self.should_setup_build_env = UseMode.ROOT if context == Context.BUILD else UseMode(0)
if context == Context.RUN or context == Context.TEST:
self.should_be_runnable |= UseMode.ROOT
self.should_setup_run_env |= UseMode.ROOT
env = EnvironmentModifications()
# Everything that calls setup_run_environment and setup_dependent_* needs globals set.
self.should_set_package_py_globals = (
self.should_setup_dependent_build_env | self.should_setup_run_env | UseMode.ROOT
)
# In a build context, the root and direct build deps need build-specific globals set.
self.needs_build_context = UseMode.ROOT | UseMode.BUILDTIME_DIRECT
# Note: see computation of 'custom_mod_deps' and 'exe_deps' later in this
# function; these sets form the building blocks of those collections.
build_deps = set(spec.dependencies(deptype=("build", "test")))
link_deps = set(spec.traverse(root=False, deptype="link"))
build_link_deps = build_deps | link_deps
build_and_supporting_deps = set()
for build_dep in build_deps:
build_and_supporting_deps.update(build_dep.traverse(deptype="run"))
run_and_supporting_deps = set(spec.traverse(root=False, deptype=("run", "link")))
test_and_supporting_deps = set()
for test_dep in set(spec.dependencies(deptype="test")):
test_and_supporting_deps.update(test_dep.traverse(deptype="run"))
def set_all_package_py_globals(self):
"""Set the globals in modules of package.py files."""
for dspec, flag in chain(self.external, self.nonexternal):
pkg = dspec.package
# All dependencies that might have environment modifications to apply
custom_mod_deps = set()
if context == "build":
custom_mod_deps.update(build_and_supporting_deps)
# Tests may be performed after build
custom_mod_deps.update(test_and_supporting_deps)
else:
# test/run context
custom_mod_deps.update(run_and_supporting_deps)
if context == "test":
custom_mod_deps.update(test_and_supporting_deps)
custom_mod_deps.update(link_deps)
if self.should_set_package_py_globals & flag:
if self.context == Context.BUILD and self.needs_build_context & flag:
set_package_py_globals(pkg, context=Context.BUILD)
else:
# This includes runtime dependencies, also runtime deps of direct build deps.
set_package_py_globals(pkg, context=Context.RUN)
# Determine 'exe_deps': the set of packages with binaries we want to use
if context == "build":
exe_deps = build_and_supporting_deps | test_and_supporting_deps
elif context == "run":
exe_deps = set(spec.traverse(deptype="run"))
elif context == "test":
exe_deps = test_and_supporting_deps
for spec in dspec.dependents():
# Note: some specs have dependents that are unreachable from the root, so avoid
# setting globals for those.
if id(spec) not in self.nodes_in_subdag:
continue
dependent_module = ModuleChangePropagator(spec.package)
pkg.setup_dependent_package(dependent_module, spec)
dependent_module.propagate_changes_to_mro()
def default_modifications_for_dep(dep):
if dep in build_link_deps and not is_system_path(dep.prefix) and context == "build":
prefix = dep.prefix
def get_env_modifications(self) -> EnvironmentModifications:
"""Returns the environment variable modifications for the given input specs and context.
Environment modifications include:
- Updating PATH for packages that are required at runtime
- Updating CMAKE_PREFIX_PATH and PKG_CONFIG_PATH so that their respective
tools can find Spack-built dependencies (when context=build)
- Running custom package environment modifications: setup_run_environment,
setup_dependent_run_environment, setup_build_environment,
setup_dependent_build_environment.
env.prepend_path("CMAKE_PREFIX_PATH", prefix)
The (partial) order imposed on the specs is externals first, then topological
from leaf to root. That way externals cannot contribute search paths that would shadow
Spack's prefixes, and dependents override variables set by dependencies."""
env = EnvironmentModifications()
for dspec, flag in chain(self.external, self.nonexternal):
tty.debug(f"Adding env modifications for {dspec.name}")
pkg = dspec.package
for directory in ("lib", "lib64", "share"):
pcdir = os.path.join(prefix, directory, "pkgconfig")
if os.path.isdir(pcdir):
env.prepend_path("PKG_CONFIG_PATH", pcdir)
if self.should_setup_dependent_build_env & flag:
self._make_buildtime_detectable(dspec, env)
if dep in exe_deps and not is_system_path(dep.prefix):
_make_runnable(dep, env)
for root in self.specs: # there is only one root in build context
spack.builder.create(pkg).setup_dependent_build_environment(env, root)
def add_modifications_for_dep(dep):
tty.debug("Adding env modifications for {0}".format(dep.name))
# Some callers of this function only want the custom modifications.
# For callers that want both custom and default modifications, we want
# to perform the default modifications here (this groups custom
# and default modifications together on a per-package basis).
if not custom_mods_only:
default_modifications_for_dep(dep)
if self.should_setup_build_env & flag:
spack.builder.create(pkg).setup_build_environment(env)
# Perform custom modifications here (PrependPath actions performed in
# the custom method override the default environment modifications
# we do to help the build, namely for PATH, CMAKE_PREFIX_PATH, and
# PKG_CONFIG_PATH)
if dep in custom_mod_deps:
dpkg = dep.package
if set_package_py_globals:
set_module_variables_for_package(dpkg)
if self.should_be_runnable & flag:
self._make_runnable(dspec, env)
current_module = ModuleChangePropagator(spec.package)
dpkg.setup_dependent_package(current_module, spec)
current_module.propagate_changes_to_mro()
if self.should_setup_run_env & flag:
run_env_mods = EnvironmentModifications()
for spec in dspec.dependents(deptype=dt.LINK | dt.RUN):
if id(spec) in self.nodes_in_subdag:
pkg.setup_dependent_run_environment(run_env_mods, spec)
pkg.setup_run_environment(run_env_mods)
if self.context == Context.BUILD:
# Don't let the runtime environment of comiler like dependencies leak into the
# build env
run_env_mods.drop("CC", "CXX", "F77", "FC")
env.extend(run_env_mods)
if context == "build":
builder = spack.builder.create(dpkg)
builder.setup_dependent_build_environment(env, spec)
else:
dpkg.setup_dependent_run_environment(env, spec)
tty.debug("Added env modifications for {0}".format(dep.name))
return env
# Note that we want to perform environment modifications in a fixed order.
# The Spec.traverse method provides this: i.e. in addition to
# the post-order semantics, it also guarantees a fixed traversal order
# among dependencies which are not constrained by post-order semantics.
for dspec in spec.traverse(root=False, order="post"):
if dspec.external:
add_modifications_for_dep(dspec)
def _make_buildtime_detectable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
if is_system_path(dep.prefix):
return
for dspec in spec.traverse(root=False, order="post"):
# Default env modifications for non-external packages can override
# custom modifications of external packages (this can only occur
# for modifications to PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH)
if not dspec.external:
add_modifications_for_dep(dspec)
env.prepend_path("CMAKE_PREFIX_PATH", dep.prefix)
for d in ("lib", "lib64", "share"):
pcdir = os.path.join(dep.prefix, d, "pkgconfig")
if os.path.isdir(pcdir):
env.prepend_path("PKG_CONFIG_PATH", pcdir)
def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
if is_system_path(dep.prefix):
return
for d in ("bin", "bin64"):
bin_dir = os.path.join(dep.prefix, d)
if os.path.isdir(bin_dir):
env.prepend_path("PATH", bin_dir)
return env
def get_cmake_prefix_path(pkg):
@@ -1078,7 +996,7 @@ def get_cmake_prefix_path(pkg):
def _setup_pkg_and_run(
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
):
context: str = kwargs.get("context", "build")
context = kwargs.get("context", "build")
try:
# We are in the child process. Python sets sys.stdin to
@@ -1094,7 +1012,7 @@ def _setup_pkg_and_run(
if not kwargs.get("fake", False):
kwargs["unmodified_env"] = os.environ.copy()
kwargs["env_modifications"] = setup_package(
pkg, dirty=kwargs.get("dirty", False), context=Context.from_string(context)
pkg, dirty=kwargs.get("dirty", False), context=context
)
return_value = function(pkg, kwargs)
write_pipe.send(return_value)

View File

@@ -46,7 +46,6 @@ class AutotoolsPackage(spack.package_base.PackageBase):
depends_on("gnuconfig", type="build", when="target=ppc64le:")
depends_on("gnuconfig", type="build", when="target=aarch64:")
depends_on("gnuconfig", type="build", when="target=riscv64:")
depends_on("gmake", type="build")
conflicts("platform=windows")
def flags_to_build_system_args(self, flags):

View File

@@ -142,10 +142,10 @@ def flags_to_build_system_args(self, flags):
# We specify for each of them.
if flags["ldflags"]:
ldflags = " ".join(flags["ldflags"])
ld_string = "-DCMAKE_{0}_LINKER_FLAGS={1}"
# cmake has separate linker arguments for types of builds.
self.cmake_flag_args.append(f"-DCMAKE_EXE_LINKER_FLAGS={ldflags}")
self.cmake_flag_args.append(f"-DCMAKE_MODULE_LINKER_FLAGS={ldflags}")
self.cmake_flag_args.append(f"-DCMAKE_SHARED_LINKER_FLAGS={ldflags}")
for type in ["EXE", "MODULE", "SHARED", "STATIC"]:
self.cmake_flag_args.append(ld_string.format(type, ldflags))
# CMake has libs options separated by language. Apply ours to each.
if flags["ldlibs"]:

View File

@@ -9,8 +9,7 @@
import spack.builder
import spack.package_base
from spack.directives import build_system, conflicts, depends_on
from spack.multimethod import when
from spack.directives import build_system, conflicts
from ._checks import (
BaseBuilder,
@@ -30,10 +29,7 @@ class MakefilePackage(spack.package_base.PackageBase):
legacy_buildsystem = "makefile"
build_system("makefile")
with when("build_system=makefile"):
conflicts("platform=windows")
depends_on("gmake", type="build")
conflicts("platform=windows", when="build_system=makefile")
@spack.builder.builder("makefile")

View File

@@ -10,7 +10,7 @@
import spack.builder
import spack.package_base
from spack.directives import build_system, conflicts, depends_on, variant
from spack.directives import build_system, depends_on, variant
from spack.multimethod import when
from ._checks import BaseBuilder, execute_build_time_tests
@@ -47,13 +47,6 @@ class MesonPackage(spack.package_base.PackageBase):
variant("strip", default=False, description="Strip targets on install")
depends_on("meson", type="build")
depends_on("ninja", type="build")
# Python detection in meson requires distutils to be importable, but distutils no longer
# exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement,
# because the distutils-precedence.pth startup file that setuptools ships with is not run
# when setuptools is in PYTHONPATH; it has to be in system site-packages. In a future meson
# release, the distutils requirement will be dropped, so this conflict can be relaxed.
# We have patches to make it work with meson 1.1 and above.
conflicts("^python@3.12:", when="^meson@:1.0")
def flags_to_build_system_args(self, flags):
"""Produces a list of all command line arguments to pass the specified

View File

@@ -23,29 +23,13 @@
import spack.spec
import spack.store
from spack.directives import build_system, depends_on, extends, maintainers
from spack.error import NoHeadersError, NoLibrariesError
from spack.error import NoHeadersError, NoLibrariesError, SpecError
from spack.install_test import test_part
from spack.version import Version
from ._checks import BaseBuilder, execute_install_time_tests
def _flatten_dict(dictionary):
"""Iterable that yields KEY=VALUE paths through a dictionary.
Args:
dictionary: Possibly nested dictionary of arbitrary keys and values.
Yields:
A single path through the dictionary.
"""
for key, item in dictionary.items():
if isinstance(item, dict):
# Recursive case
for value in _flatten_dict(item):
yield f"{key}={value}"
else:
# Base case
yield f"{key}={item}"
class PythonExtension(spack.package_base.PackageBase):
maintainers("adamjstewart")
@@ -378,7 +362,7 @@ class PythonPipBuilder(BaseBuilder):
legacy_long_methods = ("install_options", "global_options", "config_settings")
#: Names associated with package attributes in the old build-system format
legacy_attributes = ("archive_files", "build_directory", "install_time_test_callbacks")
legacy_attributes = ("build_directory", "install_time_test_callbacks")
#: Callback names for install-time test
install_time_test_callbacks = ["test"]
@@ -423,15 +407,14 @@ def build_directory(self):
def config_settings(self, spec, prefix):
"""Configuration settings to be passed to the PEP 517 build backend.
Requires pip 22.1 or newer for keys that appear only a single time,
or pip 23.1 or newer if the same key appears multiple times.
Requires pip 22.1 or newer.
Args:
spec (spack.spec.Spec): build spec
prefix (spack.util.prefix.Prefix): installation prefix
Returns:
dict: Possibly nested dictionary of KEY, VALUE settings
dict: dictionary of KEY, VALUE settings
"""
return {}
@@ -467,28 +450,29 @@ def global_options(self, spec, prefix):
def install(self, pkg, spec, prefix):
"""Install everything from build directory."""
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
args = PythonPipBuilder.std_args(pkg) + ["--prefix=" + prefix]
for key, value in self.config_settings(spec, prefix).items():
if spec["py-pip"].version < Version("22.1"):
raise SpecError(
"'{}' package uses 'config_settings' which is only supported by "
"pip 22.1+. Add the following line to the package to fix this:\n\n"
' depends_on("py-pip@22.1:", type="build")'.format(spec.name)
)
args.append("--config-settings={}={}".format(key, value))
for setting in _flatten_dict(self.config_settings(spec, prefix)):
args.append(f"--config-settings={setting}")
for option in self.install_options(spec, prefix):
args.append(f"--install-option={option}")
args.append("--install-option=" + option)
for option in self.global_options(spec, prefix):
args.append(f"--global-option={option}")
args.append("--global-option=" + option)
if pkg.stage.archive_file and pkg.stage.archive_file.endswith(".whl"):
args.append(pkg.stage.archive_file)
else:
args.append(".")
pip = spec["python"].command
# Hide user packages, since we don't have build isolation. This is
# necessary because pip / setuptools may run hooks from arbitrary
# packages during the build. There is no equivalent variable to hide
# system packages, so this is not reliable for external Python.
pip.add_default_env("PYTHONNOUSERSITE", "1")
pip.add_default_arg("-m")
pip.add_default_arg("pip")
pip = inspect.getmodule(pkg).pip
with fs.working_dir(self.build_directory):
pip(*args)

View File

@@ -64,7 +64,7 @@ class RacketBuilder(spack.builder.Builder):
@property
def subdirectory(self):
if self.pkg.racket_name:
if self.racket_name:
return "pkgs/{0}".format(self.pkg.racket_name)
return None

View File

@@ -25,7 +25,6 @@
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.lang import memoized
from llnl.util.tty.color import cescape, colorize
import spack
import spack.binary_distribution as bindist
@@ -46,30 +45,11 @@
from spack.reporters import CDash, CDashConfiguration
from spack.reporters.cdash import build_stamp as cdash_build_stamp
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
JOB_RETRY_CONDITIONS = [
# "always",
"unknown_failure",
"script_failure",
"api_failure",
"stuck_or_timeout_failure",
"runner_system_failure",
"runner_unsupported",
"stale_schedule",
# "job_execution_timeout",
"archived_failure",
"unmet_prerequisites",
"scheduler_failure",
"data_integrity_failure",
]
JOB_RETRY_CONDITIONS = ["always"]
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
# TODO: Remove this in Spack 0.23
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
JOB_NAME_FORMAT = (
"{name}{@version} {/hash:7} {%compiler.name}{@compiler.version}{arch=architecture}"
)
spack_gpg = spack.main.SpackCommand("gpg")
spack_compiler = spack.main.SpackCommand("compiler")
@@ -89,23 +69,48 @@ def __exit__(self, exc_type, exc_value, exc_traceback):
return False
def get_job_name(spec: spack.spec.Spec, build_group: str = ""):
"""Given a spec and possibly a build group, return the job name. If the
resulting name is longer than 255 characters, it will be truncated.
def get_job_name(spec, osarch, build_group):
"""Given the necessary parts, format the gitlab job name
Arguments:
spec (spack.spec.Spec): Spec job will build
osarch: Architecture TODO: (this is a spack.spec.ArchSpec,
but sphinx doesn't recognize the type and fails).
build_group (str): Name of build group this job belongs to (a CDash
notion)
Returns: The job name
"""
job_name = spec.format(JOB_NAME_FORMAT)
item_idx = 0
format_str = ""
format_args = []
format_str += "{{{0}}}".format(item_idx)
format_args.append(spec.name)
item_idx += 1
format_str += "/{{{0}}}".format(item_idx)
format_args.append(spec.dag_hash(7))
item_idx += 1
format_str += " {{{0}}}".format(item_idx)
format_args.append(spec.version)
item_idx += 1
format_str += " {{{0}}}".format(item_idx)
format_args.append(spec.compiler)
item_idx += 1
format_str += " {{{0}}}".format(item_idx)
format_args.append(osarch)
item_idx += 1
if build_group:
job_name = "{0} {1}".format(job_name, build_group)
format_str += " {{{0}}}".format(item_idx)
format_args.append(build_group)
item_idx += 1
return job_name[:255]
return format_str.format(*format_args)
def _remove_reserved_tags(tags):
@@ -113,6 +118,15 @@ def _remove_reserved_tags(tags):
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
def _get_spec_string(spec):
format_elements = ["{name}{@version}", "{%compiler}"]
if spec.architecture:
format_elements.append(" {arch=architecture}")
return spec.format("".join(format_elements))
def _spec_deps_key(s):
return "{0}/{1}".format(s.name, s.dag_hash(7))
@@ -217,22 +231,22 @@ def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisi
tty.msg("Staging summary ([x] means a job needs rebuilding):")
for stage_index, stage in enumerate(stages):
tty.msg(f" stage {stage_index} ({len(stage)} jobs):")
tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage)))
for job in sorted(stage, key=lambda j: (not rebuild_decisions[j].rebuild, j)):
for job in sorted(stage):
s = spec_labels[job]
rebuild = rebuild_decisions[job].rebuild
reason = rebuild_decisions[job].reason
reason_msg = f" ({reason})" if reason else ""
spec_fmt = "{name}{@version}{%compiler}{/hash:7}"
if rebuild_decisions[job].rebuild:
status = colorize("@*g{[x]} ")
msg = f" {status}{s.cformat(spec_fmt)}{reason_msg}"
else:
msg = f"{s.format(spec_fmt)}{reason_msg}"
if rebuild_decisions[job].mirrors:
msg += f" [{', '.join(rebuild_decisions[job].mirrors)}]"
msg = colorize(f" @K - {cescape(msg)}@.")
tty.msg(msg)
reason_msg = " ({0})".format(reason) if reason else ""
tty.msg(
" [{1}] {0} -> {2}{3}".format(
job, "x" if rebuild else " ", _get_spec_string(s), reason_msg
)
)
if rebuild_decisions[job].mirrors:
tty.msg(" found on the following mirrors:")
for murl in rebuild_decisions[job].mirrors:
tty.msg(" {0}".format(murl))
def _compute_spec_deps(spec_list):
@@ -323,7 +337,7 @@ def _spec_matches(spec, match_string):
def _format_job_needs(
dep_jobs, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
dep_jobs, osname, build_group, prune_dag, rebuild_decisions, enable_artifacts_buildcache
):
needs_list = []
for dep_job in dep_jobs:
@@ -333,7 +347,7 @@ def _format_job_needs(
if not prune_dag or rebuild:
needs_list.append(
{
"job": get_job_name(dep_job, build_group),
"job": get_job_name(dep_job, dep_job.architecture, build_group),
"artifacts": enable_artifacts_buildcache,
}
)
@@ -686,7 +700,7 @@ def generate_gitlab_ci_yaml(
remote_mirror_override (str): Typically only needed when one spack.yaml
is used to populate several mirrors with binaries, based on some
criteria. Spack protected pipelines populate different mirrors based
on branch name, facilitated by this option. DEPRECATED
on branch name, facilitated by this option.
"""
with spack.concretize.disable_compiler_existence_check():
with env.write_transaction():
@@ -783,39 +797,17 @@ def generate_gitlab_ci_yaml(
"instead.",
)
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
deprecated_mirror_config = False
buildcache_destination = None
if "buildcache-destination" in pipeline_mirrors:
if remote_mirror_override:
tty.die(
"Using the deprecated --buildcache-destination cli option and "
"having a mirror named 'buildcache-destination' at the same time "
"is not allowed"
)
buildcache_destination = pipeline_mirrors["buildcache-destination"]
else:
deprecated_mirror_config = True
# TODO: This will be an error in Spack 0.23
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
tty.die("spack ci generate requires an env containing a mirror")
# TODO: Remove this block in spack 0.23
remote_mirror_url = None
if deprecated_mirror_config:
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
tty.die("spack ci generate requires an env containing a mirror")
ci_mirrors = yaml_root["mirrors"]
mirror_urls = [url for url in ci_mirrors.values()]
remote_mirror_url = mirror_urls[0]
ci_mirrors = yaml_root["mirrors"]
mirror_urls = [url for url in ci_mirrors.values()]
remote_mirror_url = mirror_urls[0]
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
if spack_buildcache_copy:
buildcache_copies = {}
buildcache_copy_src_prefix = (
buildcache_destination.fetch_url
if buildcache_destination
else remote_mirror_override or remote_mirror_url
)
buildcache_copy_src_prefix = remote_mirror_override or remote_mirror_url
buildcache_copy_dest_prefix = spack_buildcache_copy
# Check for a list of "known broken" specs that we should not bother
@@ -827,7 +819,6 @@ def generate_gitlab_ci_yaml(
enable_artifacts_buildcache = False
if "enable-artifacts-buildcache" in ci_config:
tty.warn("Support for enable-artifacts-buildcache will be removed in Spack 0.23")
enable_artifacts_buildcache = ci_config["enable-artifacts-buildcache"]
rebuild_index_enabled = True
@@ -836,15 +827,13 @@ def generate_gitlab_ci_yaml(
temp_storage_url_prefix = None
if "temporary-storage-url-prefix" in ci_config:
tty.warn("Support for temporary-storage-url-prefix will be removed in Spack 0.23")
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
# If a remote mirror override (alternate buildcache destination) was
# specified, add it here in case it has already built hashes we might
# generate.
# TODO: Remove this block in Spack 0.23
mirrors_to_check = None
if deprecated_mirror_config and remote_mirror_override:
if remote_mirror_override:
if spack_pipeline_type == "spack_protected_branch":
# Overriding the main mirror in this case might result
# in skipping jobs on a release pipeline because specs are
@@ -864,9 +853,8 @@ def generate_gitlab_ci_yaml(
cfg.default_modify_scope(),
)
# TODO: Remove this block in Spack 0.23
shared_pr_mirror = None
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
if spack_pipeline_type == "spack_pull_request":
stack_name = os.environ.get("SPACK_CI_STACK_NAME", "")
shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name)
spack.mirror.add(
@@ -918,7 +906,6 @@ def generate_gitlab_ci_yaml(
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
# TODO: Remove this line in Spack 0.23
local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror")
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
@@ -933,13 +920,13 @@ def generate_gitlab_ci_yaml(
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
# TODO: Remove this line in Spack 0.23
rel_local_mirror_dir = os.path.join(local_mirror_dir, ci_project_dir)
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
# Speed up staging by first fetching binary indices from all mirrors
# (including the override mirror we may have just added above).
try:
bindist.BINARY_INDEX.update()
bindist.binary_index.update()
except bindist.FetchCacheError as e:
tty.warn(e)
@@ -1036,7 +1023,8 @@ def main_script_replacements(cmd):
if "after_script" in job_object:
job_object["after_script"] = _unpack_script(job_object["after_script"])
job_name = get_job_name(release_spec, build_group)
osname = str(release_spec.architecture)
job_name = get_job_name(release_spec, osname, build_group)
job_vars = job_object.setdefault("variables", {})
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
@@ -1063,6 +1051,7 @@ def main_script_replacements(cmd):
job_object["needs"].extend(
_format_job_needs(
dep_jobs,
osname,
build_group,
prune_dag,
rebuild_decisions,
@@ -1148,7 +1137,6 @@ def main_script_replacements(cmd):
},
)
# TODO: Remove this block in Spack 0.23
if enable_artifacts_buildcache:
bc_root = os.path.join(local_mirror_dir, "build_cache")
job_object["artifacts"]["paths"].extend(
@@ -1178,12 +1166,10 @@ def main_script_replacements(cmd):
_print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions)
# Clean up remote mirror override if enabled
# TODO: Remove this block in Spack 0.23
if deprecated_mirror_config:
if remote_mirror_override:
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
if spack_pipeline_type == "spack_pull_request":
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
if remote_mirror_override:
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
if spack_pipeline_type == "spack_pull_request":
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
tty.debug("{0} build jobs generated in {1} stages".format(job_id, stage_id))
@@ -1214,28 +1200,10 @@ def main_script_replacements(cmd):
sync_job["needs"] = [
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
]
if "variables" not in sync_job:
sync_job["variables"] = {}
sync_job["variables"]["SPACK_COPY_ONLY_DESTINATION"] = (
buildcache_destination.fetch_url
if buildcache_destination
else remote_mirror_override or remote_mirror_url
)
if "buildcache-source" in pipeline_mirrors:
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
else:
# TODO: Remove this condition in Spack 0.23
buildcache_source = os.environ.get("SPACK_SOURCE_MIRROR", None)
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
output_object["copy"] = sync_job
job_id += 1
if job_id > 0:
# TODO: Remove this block in Spack 0.23
if temp_storage_url_prefix:
# There were some rebuild jobs scheduled, so we will need to
# schedule a job to clean up the temporary storage location
@@ -1269,13 +1237,6 @@ def main_script_replacements(cmd):
signing_job["when"] = "always"
signing_job["retry"] = {"max": 2, "when": ["always"]}
signing_job["interruptible"] = True
if "variables" not in signing_job:
signing_job["variables"] = {}
signing_job["variables"]["SPACK_BUILDCACHE_DESTINATION"] = (
buildcache_destination.push_url # need the s3 url for aws s3 sync
if buildcache_destination
else remote_mirror_override or remote_mirror_url
)
output_object["sign-pkgs"] = signing_job
@@ -1284,13 +1245,13 @@ def main_script_replacements(cmd):
stage_names.append("stage-rebuild-index")
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
index_target_mirror = mirror_urls[0]
if remote_mirror_override:
index_target_mirror = remote_mirror_override
final_job["stage"] = "stage-rebuild-index"
target_mirror = remote_mirror_override or remote_mirror_url
if buildcache_destination:
target_mirror = buildcache_destination.push_url
final_job["script"] = _unpack_script(
final_job["script"],
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
op=lambda cmd: cmd.replace("{index_target_mirror}", index_target_mirror),
)
final_job["when"] = "always"
@@ -1312,24 +1273,20 @@ def main_script_replacements(cmd):
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
"SPACK_VERSION": spack_version,
"SPACK_CHECKOUT_VERSION": version_to_clone,
# TODO: Remove this line in Spack 0.23
"SPACK_REMOTE_MIRROR_URL": remote_mirror_url,
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
# TODO: Remove this line in Spack 0.23
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
# TODO: Remove this line in Spack 0.23
"SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None",
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
}
# TODO: Remove this block in Spack 0.23
if deprecated_mirror_config and remote_mirror_override:
if remote_mirror_override:
(output_object["variables"]["SPACK_REMOTE_MIRROR_OVERRIDE"]) = remote_mirror_override
spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None)
@@ -2069,23 +2026,43 @@ def process_command(name, commands, repro_dir, run=True, exit_on_failure=True):
def create_buildcache(
input_spec: spack.spec.Spec, *, destination_mirror_urls: List[str], sign_binaries: bool = False
input_spec: spack.spec.Spec,
*,
pipeline_mirror_url: Optional[str] = None,
buildcache_mirror_url: Optional[str] = None,
sign_binaries: bool = False,
) -> List[PushResult]:
"""Create the buildcache at the provided mirror(s).
Arguments:
input_spec: Installed spec to package and push
destination_mirror_urls: List of urls to push to
buildcache_mirror_url: URL for the buildcache mirror
pipeline_mirror_url: URL for the pipeline mirror
sign_binaries: Whether or not to sign buildcache entry
Returns: A list of PushResults, indicating success or failure.
"""
results = []
for mirror_url in destination_mirror_urls:
# Create buildcache in either the main remote mirror, or in the
# per-PR mirror, if this is a PR pipeline
if buildcache_mirror_url:
results.append(
PushResult(
success=push_mirror_contents(input_spec, mirror_url, sign_binaries), url=mirror_url
success=push_mirror_contents(input_spec, buildcache_mirror_url, sign_binaries),
url=buildcache_mirror_url,
)
)
# Create another copy of that buildcache in the per-pipeline
# temporary storage mirror (this is only done if either
# artifacts buildcache is enabled or a temporary storage url
# prefix is set)
if pipeline_mirror_url:
results.append(
PushResult(
success=push_mirror_contents(input_spec, pipeline_mirror_url, sign_binaries),
url=pipeline_mirror_url,
)
)
@@ -2265,13 +2242,13 @@ def build_name(self):
spec.architecture,
self.build_group,
)
tty.debug(
tty.verbose(
"Generated CDash build name ({0}) from the {1}".format(build_name, spec.name)
)
return build_name
build_name = os.environ.get("SPACK_CDASH_BUILD_NAME")
tty.debug("Using CDash build name ({0}) from the environment".format(build_name))
tty.verbose("Using CDash build name ({0}) from the environment".format(build_name))
return build_name
@property # type: ignore
@@ -2285,11 +2262,11 @@ def build_stamp(self):
Returns: (str) current CDash build stamp"""
build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP")
if build_stamp:
tty.debug("Using build stamp ({0}) from the environment".format(build_stamp))
tty.verbose("Using build stamp ({0}) from the environment".format(build_stamp))
return build_stamp
build_stamp = cdash_build_stamp(self.build_group, time.time())
tty.debug("Generated new build stamp ({0})".format(build_stamp))
tty.verbose("Generated new build stamp ({0})".format(build_stamp))
return build_stamp
@property # type: ignore

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.cmd.common.env_utility as env_utility
from spack.context import Context
description = (
"run a command in a spec's install environment, or dump its environment to screen or file"
@@ -15,4 +14,4 @@
def build_env(parser, args):
env_utility.emulate_env_utility("build-env", Context.BUILD, args)
env_utility.emulate_env_utility("build-env", "build", args)

View File

@@ -3,19 +3,16 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import copy
import glob
import hashlib
import json
import multiprocessing.pool
import os
import shutil
import sys
import tempfile
import urllib.request
from typing import Dict, List, Optional, Tuple
from typing import List
import llnl.util.tty as tty
import llnl.util.tty.color as clr
from llnl.string import plural
from llnl.util.lang import elide_list
@@ -25,37 +22,17 @@
import spack.config
import spack.environment as ev
import spack.error
import spack.hash_types as ht
import spack.mirror
import spack.oci.oci
import spack.oci.opener
import spack.relocate
import spack.repo
import spack.spec
import spack.stage
import spack.store
import spack.user_environment
import spack.util.crypto
import spack.util.url as url_util
import spack.util.web as web_util
from spack.build_environment import determine_number_of_jobs
from spack.cmd import display_specs
from spack.oci.image import (
Digest,
ImageReference,
default_config,
default_index_tag,
default_manifest,
default_tag,
tag_is_spec,
)
from spack.oci.oci import (
copy_missing_layers_with_retry,
get_manifest_and_config_with_retry,
upload_blob_with_retry,
upload_manifest_with_retry,
)
from spack.spec import Spec, save_dependency_specfiles
from spack.stage import Stage
description = "create, download and install binary packages"
section = "packaging"
@@ -81,9 +58,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
push_sign.add_argument(
"--key", "-k", metavar="key", type=str, default=None, help="key for signing"
)
push.add_argument(
"mirror", type=arguments.mirror_name_or_url, help="mirror name, path, or URL"
)
push.add_argument("mirror", type=str, help="mirror name, path, or URL")
push.add_argument(
"--update-index",
"--rebuild-index",
@@ -109,10 +84,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
action="store_true",
help="stop pushing on first failure (default is best effort)",
)
push.add_argument(
"--base-image", default=None, help="specify the base image for the buildcache. "
)
arguments.add_common_arguments(push, ["specs", "jobs"])
arguments.add_common_arguments(push, ["specs"])
push.set_defaults(func=push_fn)
install = subparsers.add_parser("install", help=install_fn.__doc__)
@@ -296,21 +268,6 @@ def _matching_specs(specs: List[Spec]) -> List[Spec]:
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
def _format_spec(spec: Spec) -> str:
return spec.cformat("{name}{@version}{/hash:7}")
def _progress(i: int, total: int):
if total > 1:
digits = len(str(total))
return f"[{i+1:{digits}}/{total}] "
return ""
def _make_pool():
return multiprocessing.pool.Pool(determine_number_of_jobs(parallel=True))
def push_fn(args):
"""create a binary package and push it to a mirror"""
if args.spec_file:
@@ -324,80 +281,63 @@ def push_fn(args):
else:
specs = spack.cmd.require_active_env("buildcache push").all_specs()
mirror = arguments.mirror_name_or_url(args.mirror)
if args.allow_root:
tty.warn(
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
)
# Check if this is an OCI image.
try:
image_ref = spack.oci.oci.image_from_mirror(args.mirror)
except ValueError:
image_ref = None
url = mirror.push_url
# For OCI images, we require dependencies to be pushed for now.
if image_ref:
if "dependencies" not in args.things_to_install:
tty.die("Dependencies must be pushed for OCI images.")
if not args.unsigned:
tty.warn(
"Code signing is currently not supported for OCI images. "
"Use --unsigned to silence this warning."
)
# This is a list of installed, non-external specs.
specs = bindist.specs_to_be_packaged(
specs,
root="package" in args.things_to_install,
dependencies="dependencies" in args.things_to_install,
)
url = args.mirror.push_url
# When pushing multiple specs, print the url once ahead of time, as well as how
# many specs are being pushed.
if len(specs) > 1:
tty.info(f"Selected {len(specs)} specs to push to {url}")
skipped = []
failed = []
# TODO: unify this logic in the future.
if image_ref:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, _make_pool() as pool:
skipped = _push_oci(args, image_ref, specs, tmpdir, pool)
else:
skipped = []
# tty printing
color = clr.get_color_when()
format_spec = lambda s: s.format("{name}{@version}{/hash:7}", color=color)
total_specs = len(specs)
digits = len(str(total_specs))
for i, spec in enumerate(specs):
try:
bindist.push_or_raise(
spec,
url,
bindist.PushOptions(
force=args.force,
unsigned=args.unsigned,
key=args.key,
regenerate_index=args.update_index,
),
)
for i, spec in enumerate(specs):
try:
bindist.push_or_raise(
spec,
url,
bindist.PushOptions(
force=args.force,
unsigned=args.unsigned,
key=args.key,
regenerate_index=args.update_index,
),
)
msg = f"{_progress(i, len(specs))}Pushed {_format_spec(spec)}"
if len(specs) == 1:
msg += f" to {url}"
tty.info(msg)
if total_specs > 1:
msg = f"[{i+1:{digits}}/{total_specs}] Pushed {format_spec(spec)}"
else:
msg = f"Pushed {format_spec(spec)} to {url}"
except bindist.NoOverwriteException:
skipped.append(_format_spec(spec))
tty.info(msg)
# Catch any other exception unless the fail fast option is set
except Exception as e:
if args.fail_fast or isinstance(
e, (bindist.PickKeyException, bindist.NoKeyException)
):
raise
failed.append((_format_spec(spec), e))
except bindist.NoOverwriteException:
skipped.append(format_spec(spec))
# Catch any other exception unless the fail fast option is set
except Exception as e:
if args.fail_fast or isinstance(e, (bindist.PickKeyException, bindist.NoKeyException)):
raise
failed.append((format_spec(spec), e))
if skipped:
if len(specs) == 1:
@@ -424,341 +364,6 @@ def push_fn(args):
),
)
# Update the index if requested
# TODO: remove update index logic out of bindist; should be once after all specs are pushed
# not once per spec.
if image_ref and len(skipped) < len(specs) and args.update_index:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, _make_pool() as pool:
_update_index_oci(image_ref, tmpdir, pool)
def _get_spack_binary_blob(image_ref: ImageReference) -> Optional[spack.oci.oci.Blob]:
"""Get the spack tarball layer digests and size if it exists"""
try:
manifest, config = get_manifest_and_config_with_retry(image_ref)
return spack.oci.oci.Blob(
compressed_digest=Digest.from_string(manifest["layers"][-1]["digest"]),
uncompressed_digest=Digest.from_string(config["rootfs"]["diff_ids"][-1]),
size=manifest["layers"][-1]["size"],
)
except Exception:
return None
def _push_single_spack_binary_blob(image_ref: ImageReference, spec: spack.spec.Spec, tmpdir: str):
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
# Create an oci.image.layer aka tarball of the package
compressed_tarfile_checksum, tarfile_checksum = spack.oci.oci.create_tarball(spec, filename)
blob = spack.oci.oci.Blob(
Digest.from_sha256(compressed_tarfile_checksum),
Digest.from_sha256(tarfile_checksum),
os.path.getsize(filename),
)
# Upload the blob
upload_blob_with_retry(image_ref, file=filename, digest=blob.compressed_digest)
# delete the file
os.unlink(filename)
return blob
def _retrieve_env_dict_from_config(config: dict) -> dict:
"""Retrieve the environment variables from the image config file.
Sets a default value for PATH if it is not present.
Args:
config (dict): The image config file.
Returns:
dict: The environment variables.
"""
env = {"PATH": "/bin:/usr/bin"}
if "Env" in config.get("config", {}):
for entry in config["config"]["Env"]:
key, value = entry.split("=", 1)
env[key] = value
return env
def _archspec_to_gooarch(spec: spack.spec.Spec) -> str:
name = spec.target.family.name
name_map = {"aarch64": "arm64", "x86_64": "amd64"}
return name_map.get(name, name)
def _put_manifest(
base_images: Dict[str, Tuple[dict, dict]],
checksums: Dict[str, spack.oci.oci.Blob],
spec: spack.spec.Spec,
image_ref: ImageReference,
tmpdir: str,
):
architecture = _archspec_to_gooarch(spec)
dependencies = list(
reversed(
list(
s
for s in spec.traverse(order="topo", deptype=("link", "run"), root=True)
if not s.external
)
)
)
base_manifest, base_config = base_images[architecture]
env = _retrieve_env_dict_from_config(base_config)
spack.user_environment.environment_modifications_for_specs(spec).apply_modifications(env)
# Create an oci.image.config file
config = copy.deepcopy(base_config)
# Add the diff ids of the dependencies
for s in dependencies:
config["rootfs"]["diff_ids"].append(str(checksums[s.dag_hash()].uncompressed_digest))
# Set the environment variables
config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
# From the OCI v1.0 spec:
# > Any extra fields in the Image JSON struct are considered implementation
# > specific and MUST be ignored by any implementations which are unable to
# > interpret them.
# We use this to store the Spack spec, so we can use it to create an index.
spec_dict = spec.to_dict(hash=ht.dag_hash)
spec_dict["buildcache_layout_version"] = 1
spec_dict["binary_cache_checksum"] = {
"hash_algorithm": "sha256",
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
}
config.update(spec_dict)
config_file = os.path.join(tmpdir, f"{spec.dag_hash()}.config.json")
with open(config_file, "w") as f:
json.dump(config, f, separators=(",", ":"))
config_file_checksum = Digest.from_sha256(
spack.util.crypto.checksum(hashlib.sha256, config_file)
)
# Upload the config file
upload_blob_with_retry(image_ref, file=config_file, digest=config_file_checksum)
oci_manifest = {
"mediaType": "application/vnd.oci.image.manifest.v1+json",
"schemaVersion": 2,
"config": {
"mediaType": base_manifest["config"]["mediaType"],
"digest": str(config_file_checksum),
"size": os.path.getsize(config_file),
},
"layers": [
*(layer for layer in base_manifest["layers"]),
*(
{
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
"digest": str(checksums[s.dag_hash()].compressed_digest),
"size": checksums[s.dag_hash()].size,
}
for s in dependencies
),
],
"annotations": {"org.opencontainers.image.description": spec.format()},
}
image_ref_for_spec = image_ref.with_tag(default_tag(spec))
# Finally upload the manifest
upload_manifest_with_retry(image_ref_for_spec, oci_manifest=oci_manifest)
# delete the config file
os.unlink(config_file)
return image_ref_for_spec
def _push_oci(
args,
image_ref: ImageReference,
installed_specs_with_deps: List[Spec],
tmpdir: str,
pool: multiprocessing.pool.Pool,
) -> List[str]:
"""Push specs to an OCI registry
Args:
args: The command line arguments.
image_ref: The image reference.
installed_specs_with_deps: The installed specs to push, excluding externals,
including deps, ordered from roots to leaves.
Returns:
List[str]: The list of skipped specs (already in the buildcache).
"""
# Reverse the order
installed_specs_with_deps = list(reversed(installed_specs_with_deps))
# The base image to use for the package. When not set, we use
# the OCI registry only for storage, and do not use any base image.
base_image_ref: Optional[ImageReference] = (
ImageReference.from_string(args.base_image) if args.base_image else None
)
# Spec dag hash -> blob
checksums: Dict[str, spack.oci.oci.Blob] = {}
# arch -> (manifest, config)
base_images: Dict[str, Tuple[dict, dict]] = {}
# Specs not uploaded because they already exist
skipped = []
if not args.force:
tty.info("Checking for existing specs in the buildcache")
to_be_uploaded = []
tags_to_check = (image_ref.with_tag(default_tag(s)) for s in installed_specs_with_deps)
available_blobs = pool.map(_get_spack_binary_blob, tags_to_check)
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
if maybe_blob is not None:
checksums[spec.dag_hash()] = maybe_blob
skipped.append(_format_spec(spec))
else:
to_be_uploaded.append(spec)
else:
to_be_uploaded = installed_specs_with_deps
if not to_be_uploaded:
return skipped
tty.info(
f"{len(to_be_uploaded)} specs need to be pushed to {image_ref.domain}/{image_ref.name}"
)
# Upload blobs
new_blobs = pool.starmap(
_push_single_spack_binary_blob, ((image_ref, spec, tmpdir) for spec in to_be_uploaded)
)
# And update the spec to blob mapping
for spec, blob in zip(to_be_uploaded, new_blobs):
checksums[spec.dag_hash()] = blob
# Copy base image layers, probably fine to do sequentially.
for spec in to_be_uploaded:
architecture = _archspec_to_gooarch(spec)
# Get base image details, if we don't have them yet
if architecture in base_images:
continue
if base_image_ref is None:
base_images[architecture] = (default_manifest(), default_config(architecture, "linux"))
else:
base_images[architecture] = copy_missing_layers_with_retry(
base_image_ref, image_ref, architecture
)
# Upload manifests
tty.info("Uploading manifests")
pushed_image_ref = pool.starmap(
_put_manifest,
((base_images, checksums, spec, image_ref, tmpdir) for spec in to_be_uploaded),
)
# Print the image names of the top-level specs
for spec, ref in zip(to_be_uploaded, pushed_image_ref):
tty.info(f"Pushed {_format_spec(spec)} to {ref}")
return skipped
def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
# Don't allow recursion here, since Spack itself always uploads
# vnd.oci.image.manifest.v1+json, not vnd.oci.image.index.v1+json
_, config = get_manifest_and_config_with_retry(image_ref.with_tag(tag), tag, recurse=0)
# Do very basic validation: if "spec" is a key in the config, it
# must be a Spec object too.
return config if "spec" in config else None
def _update_index_oci(
image_ref: ImageReference, tmpdir: str, pool: multiprocessing.pool.Pool
) -> None:
response = spack.oci.opener.urlopen(urllib.request.Request(url=image_ref.tags_url()))
spack.oci.opener.ensure_status(response, 200)
tags = json.load(response)["tags"]
# Fetch all image config files in parallel
spec_dicts = pool.starmap(
_config_from_tag, ((image_ref, tag) for tag in tags if tag_is_spec(tag))
)
# Populate the database
db_root_dir = os.path.join(tmpdir, "db_root")
db = bindist.BuildCacheDatabase(db_root_dir)
for spec_dict in spec_dicts:
spec = Spec.from_dict(spec_dict)
db.add(spec, directory_layout=None)
db.mark(spec, "in_buildcache", True)
# Create the index.json file
index_json_path = os.path.join(tmpdir, "index.json")
with open(index_json_path, "w") as f:
db._write_to_file(f)
# Create an empty config.json file
empty_config_json_path = os.path.join(tmpdir, "config.json")
with open(empty_config_json_path, "wb") as f:
f.write(b"{}")
# Upload the index.json file
index_shasum = Digest.from_sha256(spack.util.crypto.checksum(hashlib.sha256, index_json_path))
upload_blob_with_retry(image_ref, file=index_json_path, digest=index_shasum)
# Upload the config.json file
empty_config_digest = Digest.from_sha256(
spack.util.crypto.checksum(hashlib.sha256, empty_config_json_path)
)
upload_blob_with_retry(image_ref, file=empty_config_json_path, digest=empty_config_digest)
# Push a manifest file that references the index.json file as a layer
# Notice that we push this as if it is an image, which it of course is not.
# When the ORAS spec becomes official, we can use that instead of a fake image.
# For now we just use the OCI image spec, so that we don't run into issues with
# automatic garbage collection of blobs that are not referenced by any image manifest.
oci_manifest = {
"mediaType": "application/vnd.oci.image.manifest.v1+json",
"schemaVersion": 2,
# Config is just an empty {} file for now, and irrelevant
"config": {
"mediaType": "application/vnd.oci.image.config.v1+json",
"digest": str(empty_config_digest),
"size": os.path.getsize(empty_config_json_path),
},
# The buildcache index is the only layer, and is not a tarball, we lie here.
"layers": [
{
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
"digest": str(index_shasum),
"size": os.path.getsize(index_json_path),
}
],
}
upload_manifest_with_retry(image_ref.with_tag(default_index_tag), oci_manifest)
def install_fn(args):
"""install from a binary package"""
@@ -809,7 +414,7 @@ def preview_fn(args):
)
def check_fn(args: argparse.Namespace):
def check_fn(args):
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
this command uses the process exit code to indicate its result, specifically, if the
@@ -824,7 +429,7 @@ def check_fn(args: argparse.Namespace):
specs = spack.cmd.parse_specs(args.spec or args.spec_file)
if specs:
specs = _matching_specs(specs)
specs = _matching_specs(specs, specs)
else:
specs = spack.cmd.require_active_env("buildcache check").all_specs()
@@ -917,7 +522,7 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
local_path = os.path.join(tmpdir, os.path.basename(src_url))
try:
temp_stage = spack.stage.Stage(src_url, path=os.path.dirname(local_path))
temp_stage = Stage(src_url, path=os.path.dirname(local_path))
try:
temp_stage.create()
temp_stage.fetch()
@@ -1011,20 +616,6 @@ def manifest_copy(manifest_file_list):
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
# Special case OCI images for now.
try:
image_ref = spack.oci.oci.image_from_mirror(mirror)
except ValueError:
image_ref = None
if image_ref:
with tempfile.TemporaryDirectory(
dir=spack.stage.get_stage_root()
) as tmpdir, _make_pool() as pool:
_update_index_oci(image_ref, tmpdir, pool)
return
# Otherwise, assume a normal mirror.
url = mirror.push_url
bindist.generate_package_index(url_util.join(url, bindist.build_cache_relative_path()))

View File

@@ -3,10 +3,10 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import re
import sys
import llnl.string
import llnl.util.lang
from llnl.util import tty
@@ -15,11 +15,11 @@
import spack.spec
import spack.stage
import spack.util.crypto
import spack.util.web as web_util
from spack.cmd.common import arguments
from spack.package_base import PackageBase, deprecated_version, preferred_version
from spack.util.editor import editor
from spack.util.format import get_version_lines
from spack.util.naming import valid_fully_qualified_module_name
from spack.version import Version
description = "checksum available versions of a package"
@@ -35,30 +35,30 @@ def setup_parser(subparser):
help="don't clean up staging area when command completes",
)
subparser.add_argument(
"--batch",
"-b",
"--batch",
action="store_true",
default=False,
help="don't ask which versions to checksum",
)
subparser.add_argument(
"--latest",
"-l",
"--latest",
action="store_true",
default=False,
help="checksum the latest available version",
)
subparser.add_argument(
"--preferred",
"-p",
"--preferred",
action="store_true",
default=False,
help="checksum the known Spack preferred version",
)
modes_parser = subparser.add_mutually_exclusive_group()
modes_parser.add_argument(
"--add-to-package",
"-a",
"--add-to-package",
action="store_true",
default=False,
help="add new versions to package",
@@ -66,26 +66,27 @@ def setup_parser(subparser):
modes_parser.add_argument(
"--verify", action="store_true", default=False, help="verify known package checksums"
)
subparser.add_argument("package", help="name or spec (e.g. `cmake` or `cmake@3.18`)")
arguments.add_common_arguments(subparser, ["package", "jobs"])
subparser.add_argument(
"versions",
nargs="*",
help="checksum these specific versions (if omitted, Spack searches for remote versions)",
)
arguments.add_common_arguments(subparser, ["jobs"])
subparser.epilog = (
"examples:\n"
" `spack checksum zlib@1.2` autodetects versions 1.2.0 to 1.2.13 from the remote\n"
" `spack checksum zlib 1.2.13` checksums exact version 1.2.13 directly without search\n"
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
)
def checksum(parser, args):
spec = spack.spec.Spec(args.package)
# Did the user pass 'package@version' string?
if len(args.versions) == 0 and "@" in args.package:
args.versions = [args.package.split("@")[1]]
args.package = args.package.split("@")[0]
# Make sure the user provided a package and not a URL
if not valid_fully_qualified_module_name(args.package):
tty.die("`spack checksum` accepts package names, not URLs.")
# Get the package we're going to generate checksums for
pkg = spack.repo.PATH.get_pkg_class(spec.name)(spec)
pkg_cls = spack.repo.PATH.get_pkg_class(args.package)
pkg = pkg_cls(spack.spec.Spec(args.package))
# Build a list of versions to checksum
versions = [Version(v) for v in args.versions]
# Define placeholder for remote versions.
@@ -127,41 +128,18 @@ def checksum(parser, args):
remote_versions = pkg.fetch_remote_versions(args.jobs)
url_dict = remote_versions
# A spidered URL can differ from the package.py *computed* URL, pointing to different tarballs.
# For example, GitHub release pages sometimes have multiple tarballs with different shasum:
# - releases/download/1.0/<pkg>-1.0.tar.gz (uploaded tarball)
# - archive/refs/tags/1.0.tar.gz (generated tarball)
# We wanna ensure that `spack checksum` and `spack install` ultimately use the same URL, so
# here we check whether the crawled and computed URLs disagree, and if so, prioritize the
# former if that URL exists (just sending a HEAD request that is).
url_changed_for_version = set()
for version, url in url_dict.items():
possible_urls = pkg.all_urls_for_version(version)
if url not in possible_urls:
for possible_url in possible_urls:
if web_util.url_exists(possible_url):
url_dict[version] = possible_url
break
else:
url_changed_for_version.add(version)
if not url_dict:
tty.die(f"Could not find any remote versions for {pkg.name}")
elif len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
filtered_url_dict = spack.stage.interactive_version_filter(
url_dict,
pkg.versions,
url_changes=url_changed_for_version,
initial_verion_filter=spec.versions,
)
if not filtered_url_dict:
exit(0)
url_dict = filtered_url_dict
else:
tty.info(f"Found {llnl.string.plural(len(url_dict), 'version')} of {pkg.name}")
# print an empty line to create a new output section block
print()
version_hashes = spack.stage.get_checksums_for_versions(
url_dict, pkg.name, keep_stage=args.keep_stage, fetch_options=pkg.fetch_options
url_dict,
pkg.name,
keep_stage=args.keep_stage,
batch=(args.batch or len(versions) > 0 or len(url_dict) == 1),
fetch_options=pkg.fetch_options,
)
if args.verify:

View File

@@ -191,14 +191,6 @@ def ci_generate(args):
"""
env = spack.cmd.require_active_env(cmd_name="ci generate")
if args.copy_to:
tty.warn("The flag --copy-to is deprecated and will be removed in Spack 0.23")
if args.buildcache_destination:
tty.warn(
"The flag --buildcache-destination is deprecated and will be removed in Spack 0.23"
)
output_file = args.output_file
copy_yaml_to = args.copy_to
run_optimizer = args.optimize
@@ -272,6 +264,12 @@ def ci_rebuild(args):
if not ci_config:
tty.die("spack ci rebuild requires an env containing ci cfg")
tty.msg(
"SPACK_BUILDCACHE_DESTINATION={0}".format(
os.environ.get("SPACK_BUILDCACHE_DESTINATION", None)
)
)
# Grab the environment variables we need. These either come from the
# pipeline generation step ("spack ci generate"), where they were written
# out as variables, or else provided by GitLab itself.
@@ -279,7 +277,6 @@ def ci_rebuild(args):
job_log_dir = os.environ.get("SPACK_JOB_LOG_DIR")
job_test_dir = os.environ.get("SPACK_JOB_TEST_DIR")
repro_dir = os.environ.get("SPACK_JOB_REPRO_DIR")
# TODO: Remove this in Spack 0.23
local_mirror_dir = os.environ.get("SPACK_LOCAL_MIRROR_DIR")
concrete_env_dir = os.environ.get("SPACK_CONCRETE_ENV_DIR")
ci_pipeline_id = os.environ.get("CI_PIPELINE_ID")
@@ -288,12 +285,9 @@ def ci_rebuild(args):
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
# TODO: Remove this in Spack 0.23
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
# TODO: Remove this in Spack 0.23
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
# TODO: Remove this in Spack 0.23
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
@@ -350,36 +344,21 @@ def ci_rebuild(args):
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
deprecated_mirror_config = False
buildcache_destination = None
if "buildcache-destination" in pipeline_mirrors:
buildcache_destination = pipeline_mirrors["buildcache-destination"]
else:
deprecated_mirror_config = True
# TODO: This will be an error in Spack 0.23
# If no override url exists, then just push binary package to the
# normal remote mirror url.
# TODO: Remove in Spack 0.23
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
if buildcache_destination:
buildcache_mirror_url = buildcache_destination.push_url
# Figure out what is our temporary storage mirror: Is it artifacts
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
# force something or pipelines might not have a way to propagate build
# artifacts from upstream to downstream jobs.
# TODO: Remove this in Spack 0.23
pipeline_mirror_url = None
# TODO: Remove this in Spack 0.23
temp_storage_url_prefix = None
if "temporary-storage-url-prefix" in ci_config:
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
pipeline_mirror_url = url_util.join(temp_storage_url_prefix, ci_pipeline_id)
# TODO: Remove this in Spack 0.23
enable_artifacts_mirror = False
if "enable-artifacts-buildcache" in ci_config:
enable_artifacts_mirror = ci_config["enable-artifacts-buildcache"]
@@ -475,14 +454,12 @@ def ci_rebuild(args):
# If we decided there should be a temporary storage mechanism, add that
# mirror now so it's used when we check for a hash match already
# built for this spec.
# TODO: Remove this block in Spack 0.23
if pipeline_mirror_url:
mirror = spack.mirror.Mirror(pipeline_mirror_url, name=spack_ci.TEMP_STORAGE_MIRROR_NAME)
spack.mirror.add(mirror, cfg.default_modify_scope())
pipeline_mirrors.append(pipeline_mirror_url)
# Check configured mirrors for a built spec with a matching hash
# TODO: Remove this block in Spack 0.23
mirrors_to_check = None
if remote_mirror_override:
if spack_pipeline_type == "spack_protected_branch":
@@ -500,8 +477,7 @@ def ci_rebuild(args):
)
pipeline_mirrors.append(remote_mirror_override)
# TODO: Remove this in Spack 0.23
if deprecated_mirror_config and spack_pipeline_type == "spack_pull_request":
if spack_pipeline_type == "spack_pull_request":
if shared_pr_mirror_url != "None":
pipeline_mirrors.append(shared_pr_mirror_url)
@@ -523,7 +499,6 @@ def ci_rebuild(args):
tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name))
for match in matches:
tty.msg(" {0}".format(match["mirror_url"]))
# TODO: Remove this block in Spack 0.23
if enable_artifacts_mirror:
matching_mirror = matches[0]["mirror_url"]
build_cache_dir = os.path.join(local_mirror_dir, "build_cache")
@@ -538,8 +513,7 @@ def ci_rebuild(args):
# only want to keep the mirror being used by the current pipeline as it's binary
# package destination. This ensures that the when we rebuild everything, we only
# consume binary dependencies built in this pipeline.
# TODO: Remove this in Spack 0.23
if deprecated_mirror_config and full_rebuild:
if full_rebuild:
spack_ci.remove_other_mirrors(pipeline_mirrors, cfg.default_modify_scope())
# No hash match anywhere means we need to rebuild spec
@@ -605,9 +579,7 @@ def ci_rebuild(args):
"SPACK_COLOR=always",
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
"-j$(nproc)",
"install-deps/{}".format(
ev.depfile.MakefileSpec(job_spec).safe_format("{name}-{version}-{hash}")
),
"install-deps/{}".format(job_spec.format("{name}-{version}-{hash}")),
],
spack_cmd + ["install"] + root_install_args,
]
@@ -704,25 +676,21 @@ def ci_rebuild(args):
# print out some instructions on how to reproduce this build failure
# outside of the pipeline environment.
if install_exit_code == 0:
mirror_urls = [buildcache_mirror_url]
# TODO: Remove this block in Spack 0.23
if pipeline_mirror_url:
mirror_urls.append(pipeline_mirror_url)
for result in spack_ci.create_buildcache(
input_spec=job_spec,
destination_mirror_urls=mirror_urls,
sign_binaries=spack_ci.can_sign_binaries(),
):
msg = tty.msg if result.success else tty.warn
msg(
"{} {} to {}".format(
"Pushed" if result.success else "Failed to push",
job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
result.url,
if buildcache_mirror_url or pipeline_mirror_url:
for result in spack_ci.create_buildcache(
input_spec=job_spec,
buildcache_mirror_url=buildcache_mirror_url,
pipeline_mirror_url=pipeline_mirror_url,
sign_binaries=spack_ci.can_sign_binaries(),
):
msg = tty.msg if result.success else tty.warn
msg(
"{} {} to {}".format(
"Pushed" if result.success else "Failed to push",
job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
result.url,
)
)
)
# If this is a develop pipeline, check if the spec that we just built is
# on the broken-specs list. If so, remove it.

View File

@@ -796,9 +796,7 @@ def names(args: Namespace, out: IO) -> None:
commands = copy.copy(spack.cmd.all_commands())
if args.aliases:
aliases = spack.config.get("config:aliases")
if aliases:
commands.extend(aliases.keys())
commands.extend(spack.main.aliases.keys())
colify(commands, output=out)
@@ -814,10 +812,8 @@ def bash(args: Namespace, out: IO) -> None:
parser = spack.main.make_argument_parser()
spack.main.add_all_commands(parser)
aliases_config = spack.config.get("config:aliases")
if aliases_config:
aliases = ";".join(f"{key}:{val}" for key, val in aliases_config.items())
out.write(f'SPACK_ALIASES="{aliases}"\n\n')
aliases = ";".join(f"{key}:{val}" for key, val in spack.main.aliases.items())
out.write(f'SPACK_ALIASES="{aliases}"\n\n')
writer = BashCompletionWriter(parser.prog, out, args.aliases)
writer.write(parser)

View File

@@ -543,7 +543,7 @@ def add_concretizer_args(subparser):
)
def add_connection_args(subparser, add_help):
def add_s3_connection_args(subparser, add_help):
subparser.add_argument(
"--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
)
@@ -559,8 +559,6 @@ def add_connection_args(subparser, add_help):
subparser.add_argument(
"--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror"
)
subparser.add_argument("--oci-username", help="username to use to connect to this OCI mirror")
subparser.add_argument("--oci-password", help="password to use to connect to this OCI mirror")
def use_buildcache(cli_arg_value):

View File

@@ -7,6 +7,7 @@
import llnl.util.tty as tty
import spack.build_environment as build_environment
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.deptypes as dt
@@ -14,8 +15,7 @@
import spack.paths
import spack.spec
import spack.store
from spack import build_environment, traverse
from spack.context import Context
from spack import traverse
from spack.util.environment import dump_environment, pickle_environment
@@ -42,14 +42,14 @@ def setup_parser(subparser):
class AreDepsInstalledVisitor:
def __init__(self, context: Context = Context.BUILD):
if context == Context.BUILD:
# TODO: run deps shouldn't be required for build env.
def __init__(self, context="build"):
if context not in ("build", "test"):
raise ValueError("context can only be build or test")
if context == "build":
self.direct_deps = dt.BUILD | dt.LINK | dt.RUN
elif context == Context.TEST:
self.direct_deps = dt.BUILD | dt.TEST | dt.LINK | dt.RUN
else:
raise ValueError("context can only be Context.BUILD or Context.TEST")
self.direct_deps = dt.BUILD | dt.TEST | dt.LINK | dt.RUN
self.has_uninstalled_deps = False
@@ -76,7 +76,7 @@ def neighbors(self, item):
return item.edge.spec.edges_to_dependencies(depflag=depflag)
def emulate_env_utility(cmd_name, context: Context, args):
def emulate_env_utility(cmd_name, context, args):
if not args.spec:
tty.die("spack %s requires a spec." % cmd_name)
@@ -120,7 +120,7 @@ def emulate_env_utility(cmd_name, context: Context, args):
hashes=True,
# This shows more than necessary, but we cannot dynamically change deptypes
# in Spec.tree(...).
deptypes="all" if context == Context.BUILD else ("build", "test", "link", "run"),
deptypes="all" if context == "build" else ("build", "test", "link", "run"),
),
)

View File

@@ -31,19 +31,6 @@ def setup_parser(subparser):
aliases=["add"],
help="search the system for compilers to add to Spack configuration",
)
mixed_toolchain_group = find_parser.add_mutually_exclusive_group()
mixed_toolchain_group.add_argument(
"--mixed-toolchain",
action="store_true",
default=sys.platform == "darwin",
help="Allow mixed toolchains (for example: clang, clang++, gfortran)",
)
mixed_toolchain_group.add_argument(
"--no-mixed-toolchain",
action="store_false",
dest="mixed_toolchain",
help="Do not allow mixed toolchains (for example: clang, clang++, gfortran)",
)
find_parser.add_argument("add_paths", nargs=argparse.REMAINDER)
find_parser.add_argument(
"--scope",
@@ -99,9 +86,7 @@ def compiler_find(args):
# Below scope=None because we want new compilers that don't appear
# in any other configuration.
new_compilers = spack.compilers.find_new_compilers(
paths, scope=None, mixed_toolchain=args.mixed_toolchain
)
new_compilers = spack.compilers.find_new_compilers(paths, scope=None)
if new_compilers:
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope, init_config=False)
n = len(new_compilers)

View File

@@ -407,9 +407,7 @@ def config_prefer_upstream(args):
pkgs = {}
for spec in pref_specs:
# Collect all the upstream compilers and versions for this package.
pkg = pkgs.get(spec.name, {"version": []})
all = pkgs.get("all", {"compiler": []})
pkgs["all"] = all
pkg = pkgs.get(spec.name, {"version": [], "compiler": []})
pkgs[spec.name] = pkg
# We have no existing variant if this is our first added version.
@@ -420,8 +418,8 @@ def config_prefer_upstream(args):
pkg["version"].append(version)
compiler = str(spec.compiler)
if compiler not in all["compiler"]:
all["compiler"].append(compiler)
if compiler not in pkg["compiler"]:
pkg["compiler"].append(compiler)
# Get and list all the variants that differ from the default.
variants = []

View File

@@ -5,7 +5,6 @@
import os
import re
import sys
import urllib.parse
import llnl.util.tty as tty
@@ -63,10 +62,6 @@ class {class_name}({base_class_name}):
# notify when the package is updated.
# maintainers("github_user1", "github_user2")
# FIXME: Add the SPDX identifier of the project's license below.
# See https://spdx.org/licenses/ for a list.
license("UNKNOWN")
{versions}
{dependencies}
@@ -828,11 +823,6 @@ def get_versions(args, name):
# Find available versions
try:
url_dict = spack.url.find_versions_of_archive(args.url)
if len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
url_dict_filtered = spack.stage.interactive_version_filter(url_dict)
if url_dict_filtered is None:
exit(0)
url_dict = url_dict_filtered
except UndetectableVersionError:
# Use fake versions
tty.warn("Couldn't detect version in: {0}".format(args.url))
@@ -844,7 +834,11 @@ def get_versions(args, name):
url_dict = {version: args.url}
version_hashes = spack.stage.get_checksums_for_versions(
url_dict, name, first_stage_function=guesser, keep_stage=args.keep_stage
url_dict,
name,
first_stage_function=guesser,
keep_stage=args.keep_stage,
batch=(args.batch or len(url_dict) == 1),
)
versions = get_version_lines(version_hashes, url_dict)

View File

@@ -99,7 +99,10 @@ def dev_build(self, args):
spec = specs[0]
if not spack.repo.PATH.exists(spec.name):
raise spack.repo.UnknownPackageError(spec.name)
tty.die(
"No package for '{0}' was found.".format(spec.name),
" Use `spack create` to create a new package",
)
if not spec.versions.concrete_range_as_version:
tty.die(

View File

@@ -43,7 +43,10 @@ def edit_package(name, repo_path, namespace):
if not os.access(path, os.R_OK):
tty.die("Insufficient permissions on '%s'!" % path)
else:
raise spack.repo.UnknownPackageError(spec.name)
tty.die(
"No package for '{0}' was found.".format(spec.name),
" Use `spack create` to create a new package",
)
editor(path)

View File

@@ -5,11 +5,9 @@
import argparse
import os
import shlex
import shutil
import sys
import tempfile
from typing import Optional
import llnl.string as string
import llnl.util.filesystem as fs
@@ -98,16 +96,22 @@ def env_activate_setup_parser(subparser):
view_options = subparser.add_mutually_exclusive_group()
view_options.add_argument(
"--with-view",
"-v",
metavar="name",
help="set runtime environment variables for specific view",
"--with-view",
action="store_const",
dest="with_view",
const=True,
default=True,
help="update PATH, etc., with associated view",
)
view_options.add_argument(
"--without-view",
"-V",
action="store_true",
help="do not set runtime environment variables for any view",
"--without-view",
action="store_const",
dest="with_view",
const=False,
default=True,
help="do not update PATH, etc., with associated view",
)
subparser.add_argument(
@@ -145,13 +149,10 @@ def create_temp_env_directory():
return tempfile.mkdtemp(prefix="spack-")
def _tty_info(msg):
"""tty.info like function that prints the equivalent printf statement for eval."""
decorated = f'{colorize("@*b{==>}")} {msg}\n'
print(f"printf {shlex.quote(decorated)};")
def env_activate(args):
if not args.activate_env and not args.dir and not args.temp:
tty.die("spack env activate requires an environment name, directory, or --temp")
if not args.shell:
spack.cmd.common.shell_init_instructions(
"spack env activate", " eval `spack env activate {sh_arg} [...]`"
@@ -164,25 +165,12 @@ def env_activate(args):
env_name_or_dir = args.activate_env or args.dir
# When executing `spack env activate` without further arguments, activate
# the default environment. It's created when it doesn't exist yet.
if not env_name_or_dir and not args.temp:
short_name = "default"
if not ev.exists(short_name):
ev.create(short_name)
action = "Created and activated"
else:
action = "Activated"
env_path = ev.root(short_name)
_tty_info(f"{action} default environment in {env_path}")
# Temporary environment
elif args.temp:
if args.temp:
env = create_temp_env_directory()
env_path = os.path.abspath(env)
short_name = os.path.basename(env_path)
ev.create_in_dir(env).write(regenerate=False)
_tty_info(f"Created and activated temporary environment in {env_path}")
# Managed environment
elif ev.exists(env_name_or_dir) and not args.dir:
@@ -209,20 +197,10 @@ def env_activate(args):
# Activate new environment
active_env = ev.Environment(env_path)
# Check if runtime environment variables are requested, and if so, for what view.
view: Optional[str] = None
if args.with_view:
view = args.with_view
if not active_env.has_view(view):
tty.die(f"The environment does not have a view named '{view}'")
elif not args.without_view and active_env.has_view(ev.default_view_name):
view = ev.default_view_name
cmds += spack.environment.shell.activate_header(
env=active_env, shell=args.shell, prompt=env_prompt if args.prompt else None, view=view
env=active_env, shell=args.shell, prompt=env_prompt if args.prompt else None
)
env_mods.extend(spack.environment.shell.activate(env=active_env, view=view))
env_mods.extend(spack.environment.shell.activate(env=active_env, add_view=args.with_view))
cmds += env_mods.shell_modifications(args.shell)
sys.stdout.write(cmds)
@@ -397,33 +375,28 @@ def env_remove(args):
and manifests embedded in repositories should be removed manually.
"""
read_envs = []
bad_envs = []
for env_name in args.rm_env:
try:
env = ev.read(env_name)
read_envs.append(env)
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
bad_envs.append(env_name)
env = ev.read(env_name)
read_envs.append(env)
if not args.yes_to_all:
environments = string.plural(len(args.rm_env), "environment", show_n=False)
envs = string.comma_and(args.rm_env)
answer = tty.get_yes_or_no(f"Really remove {environments} {envs}?", default=False)
answer = tty.get_yes_or_no(
"Really remove %s %s?"
% (
string.plural(len(args.rm_env), "environment", show_n=False),
string.comma_and(args.rm_env),
),
default=False,
)
if not answer:
tty.die("Will not remove any environments")
for env in read_envs:
name = env.name
if env.active:
tty.die(f"Environment {name} can't be removed while activated.")
env.destroy()
tty.msg(f"Successfully removed environment '{name}'")
tty.die("Environment %s can't be removed while activated." % env.name)
for bad_env_name in bad_envs:
shutil.rmtree(
spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True)
)
tty.msg(f"Successfully removed environment '{bad_env_name}'")
env.destroy()
tty.msg("Successfully removed environment '%s'" % env.name)
#
@@ -570,8 +543,8 @@ def env_update_setup_parser(subparser):
def env_update(args):
manifest_file = ev.manifest_file(args.update_env)
backup_file = manifest_file + ".bkp"
needs_update = not ev.is_latest_format(manifest_file)
if not needs_update:
tty.msg('No update needed for the environment "{0}"'.format(args.update_env))
return
@@ -689,31 +662,18 @@ def env_depfile(args):
# Currently only make is supported.
spack.cmd.require_active_env(cmd_name="env depfile")
env = ev.active_environment()
# What things do we build when running make? By default, we build the
# root specs. If specific specs are provided as input, we build those.
filter_specs = spack.cmd.parse_specs(args.specs) if args.specs else None
template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile"))
model = depfile.MakefileModel.from_env(
env,
ev.active_environment(),
filter_specs=filter_specs,
pkg_buildcache=depfile.UseBuildCache.from_string(args.use_buildcache[0]),
dep_buildcache=depfile.UseBuildCache.from_string(args.use_buildcache[1]),
make_prefix=args.make_prefix,
jobserver=args.jobserver,
)
# Warn in case we're generating a depfile for an empty environment. We don't automatically
# concretize; the user should do that explicitly. Could be changed in the future if requested.
if model.empty:
if not env.user_specs:
tty.warn("no specs in the environment")
elif filter_specs is not None:
tty.warn("no concrete matching specs found in environment")
else:
tty.warn("environment is not concretized. Run `spack concretize` first")
makefile = template.render(model.to_dict())
# Finally write to stdout/file.

View File

@@ -72,10 +72,6 @@ def variant(s):
return spack.spec.ENABLED_VARIANT_COLOR + s + plain_format
def license(s):
return spack.spec.VERSION_COLOR + s + plain_format
class VariantFormatter:
def __init__(self, variants):
self.variants = variants
@@ -352,22 +348,6 @@ def print_virtuals(pkg):
color.cprint(" None")
def print_licenses(pkg):
"""Output the licenses of the project."""
color.cprint("")
color.cprint(section_title("Licenses: "))
if len(pkg.licenses) == 0:
color.cprint(" None")
else:
pad = padder(pkg.licenses, 4)
for when_spec in pkg.licenses:
license_identifier = pkg.licenses[when_spec]
line = license(" {0}".format(pad(license_identifier))) + color.cescape(when_spec)
color.cprint(line)
def info(parser, args):
spec = spack.spec.Spec(args.package)
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
@@ -397,7 +377,6 @@ def info(parser, args):
(args.all or not args.no_dependencies, print_dependencies),
(args.all or args.virtuals, print_virtuals),
(args.all or args.tests, print_tests),
(args.all or True, print_licenses),
]
for print_it, func in sections:
if print_it:

View File

@@ -240,7 +240,8 @@ def default_log_file(spec):
"""Computes the default filename for the log file and creates
the corresponding directory if not present
"""
basename = spec.format_path("test-{name}-{version}-{hash}.xml")
fmt = "test-{x.name}-{x.version}-{hash}.xml"
basename = fmt.format(x=spec, hash=spec.dag_hash())
dirname = fs.os.path.join(spack.paths.reports_path, "junit")
fs.mkdirp(dirname)
return fs.os.path.join(dirname, basename)

View File

@@ -5,8 +5,6 @@
import sys
import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.cmd.find
@@ -110,14 +108,18 @@ def load(parser, args):
)
return 1
if args.things_to_load != "package,dependencies":
tty.warn(
"The `--only` flag in spack load is deprecated and will be removed in Spack v0.22"
)
with spack.store.STORE.db.read_transaction():
env_mod = uenv.environment_modifications_for_specs(*specs)
if "dependencies" in args.things_to_load:
include_roots = "package" in args.things_to_load
specs = [
dep
for spec in specs
for dep in spec.traverse(root=include_roots, order="post", deptype=("run"))
]
env_mod = spack.util.environment.EnvironmentModifications()
for spec in specs:
env_mod.extend(uenv.environment_modifications_for_spec(spec))
env_mod.prepend_path(uenv.spack_loaded_hashes_var, spec.dag_hash())
cmds = env_mod.shell_modifications(args.shell)

View File

@@ -111,7 +111,7 @@ def setup_parser(subparser):
"and source use `--type binary --type source` (default)"
),
)
arguments.add_connection_args(add_parser, False)
arguments.add_s3_connection_args(add_parser, False)
# Remove
remove_parser = sp.add_parser("remove", aliases=["rm"], help=mirror_remove.__doc__)
remove_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror")
@@ -141,7 +141,7 @@ def setup_parser(subparser):
default=spack.config.default_modify_scope(),
help="configuration scope to modify",
)
arguments.add_connection_args(set_url_parser, False)
arguments.add_s3_connection_args(set_url_parser, False)
# Set
set_parser = sp.add_parser("set", help=mirror_set.__doc__)
@@ -170,7 +170,7 @@ def setup_parser(subparser):
default=spack.config.default_modify_scope(),
help="configuration scope to modify",
)
arguments.add_connection_args(set_parser, False)
arguments.add_s3_connection_args(set_parser, False)
# List
list_parser = sp.add_parser("list", help=mirror_list.__doc__)
@@ -192,8 +192,6 @@ def mirror_add(args):
or args.s3_profile
or args.s3_endpoint_url
or args.type
or args.oci_username
or args.oci_password
):
connection = {"url": args.url}
if args.s3_access_key_id and args.s3_access_key_secret:
@@ -204,8 +202,6 @@ def mirror_add(args):
connection["profile"] = args.s3_profile
if args.s3_endpoint_url:
connection["endpoint_url"] = args.s3_endpoint_url
if args.oci_username and args.oci_password:
connection["access_pair"] = [args.oci_username, args.oci_password]
if args.type:
connection["binary"] = "binary" in args.type
connection["source"] = "source" in args.type
@@ -239,8 +235,6 @@ def _configure_mirror(args):
changes["profile"] = args.s3_profile
if args.s3_endpoint_url:
changes["endpoint_url"] = args.s3_endpoint_url
if args.oci_username and args.oci_password:
changes["access_pair"] = [args.oci_username, args.oci_password]
# argparse cannot distinguish between --binary and --no-binary when same dest :(
# notice that set-url does not have these args, so getattr

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.cmd.common.env_utility as env_utility
from spack.context import Context
description = (
"run a command in a spec's test environment, or dump its environment to screen or file"
@@ -15,4 +14,4 @@
def test_env(parser, args):
env_utility.emulate_env_utility("test-env", Context.TEST, args)
env_utility.emulate_env_utility("test-env", "test", args)

View File

@@ -23,7 +23,7 @@
# tutorial configuration parameters
tutorial_branch = "releases/v0.21"
tutorial_branch = "releases/v0.20"
tutorial_mirror = "file:///mirror"
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")

View File

@@ -88,8 +88,9 @@ def unload(parser, args):
)
return 1
env_mod = uenv.environment_modifications_for_specs(*specs).reversed()
env_mod = spack.util.environment.EnvironmentModifications()
for spec in specs:
env_mod.extend(uenv.environment_modifications_for_spec(spec).reversed())
env_mod.remove_path(uenv.spack_loaded_hashes_var, spec.dag_hash())
cmds = env_mod.shell_modifications(args.shell)

View File

@@ -10,7 +10,7 @@
import itertools
import multiprocessing.pool
import os
from typing import Dict, List, Optional, Tuple
from typing import Dict, List
import archspec.cpu
@@ -21,7 +21,6 @@
import spack.compiler
import spack.config
import spack.error
import spack.operating_systems
import spack.paths
import spack.platforms
import spack.spec
@@ -224,16 +223,13 @@ def all_compiler_specs(scope=None, init_config=True):
]
def find_compilers(
path_hints: Optional[List[str]] = None, *, mixed_toolchain=False
) -> List["spack.compiler.Compiler"]:
def find_compilers(path_hints=None):
"""Return the list of compilers found in the paths given as arguments.
Args:
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
environment variable will be used if the value is None
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
a certain language
path_hints (list or None): list of path hints where to look for.
A sensible default based on the ``PATH`` environment variable
will be used if the value is None
"""
if path_hints is None:
path_hints = get_path("PATH")
@@ -254,7 +250,7 @@ def find_compilers(
finally:
tp.close()
def valid_version(item: Tuple[Optional[DetectVersionArgs], Optional[str]]) -> bool:
def valid_version(item):
value, error = item
if error is None:
return True
@@ -266,37 +262,25 @@ def valid_version(item: Tuple[Optional[DetectVersionArgs], Optional[str]]) -> bo
pass
return False
def remove_errors(
item: Tuple[Optional[DetectVersionArgs], Optional[str]]
) -> DetectVersionArgs:
def remove_errors(item):
value, _ = item
assert value is not None
return value
return make_compiler_list(
[remove_errors(detected) for detected in detected_versions if valid_version(detected)],
mixed_toolchain=mixed_toolchain,
)
return make_compiler_list(map(remove_errors, filter(valid_version, detected_versions)))
def find_new_compilers(
path_hints: Optional[List[str]] = None,
scope: Optional[str] = None,
*,
mixed_toolchain: bool = False,
):
def find_new_compilers(path_hints=None, scope=None):
"""Same as ``find_compilers`` but return only the compilers that are not
already in compilers.yaml.
Args:
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
environment variable will be used if the value is None
scope: scope to look for a compiler. If None consider the merged configuration.
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
a certain language
path_hints (list or None): list of path hints where to look for.
A sensible default based on the ``PATH`` environment variable
will be used if the value is None
scope (str): scope to look for a compiler. If None consider the
merged configuration.
"""
compilers = find_compilers(path_hints, mixed_toolchain=mixed_toolchain)
compilers = find_compilers(path_hints)
return select_new_compilers(compilers, scope)
@@ -654,9 +638,7 @@ def all_compiler_types():
)
def arguments_to_detect_version_fn(
operating_system: spack.operating_systems.OperatingSystem, paths: List[str]
) -> List[DetectVersionArgs]:
def arguments_to_detect_version_fn(operating_system, paths):
"""Returns a list of DetectVersionArgs tuples to be used in a
corresponding function to detect compiler versions.
@@ -664,7 +646,8 @@ def arguments_to_detect_version_fn(
function by providing a method called with the same name.
Args:
operating_system: the operating system on which we are looking for compilers
operating_system (spack.operating_systems.OperatingSystem): the operating system
on which we are looking for compilers
paths: paths to search for compilers
Returns:
@@ -673,10 +656,10 @@ def arguments_to_detect_version_fn(
compilers in this OS.
"""
def _default(search_paths: List[str]) -> List[DetectVersionArgs]:
command_arguments: List[DetectVersionArgs] = []
def _default(search_paths):
command_arguments = []
files_to_be_tested = fs.files_in(*search_paths)
for compiler_name in supported_compilers_for_host_platform():
for compiler_name in spack.compilers.supported_compilers_for_host_platform():
compiler_cls = class_for_compiler_name(compiler_name)
for language in ("cc", "cxx", "f77", "fc"):
@@ -701,9 +684,7 @@ def _default(search_paths: List[str]) -> List[DetectVersionArgs]:
return fn(paths)
def detect_version(
detect_version_args: DetectVersionArgs,
) -> Tuple[Optional[DetectVersionArgs], Optional[str]]:
def detect_version(detect_version_args):
"""Computes the version of a compiler and adds it to the information
passed as input.
@@ -712,7 +693,8 @@ def detect_version(
needs to be checked by the code dispatching the calls.
Args:
detect_version_args: information on the compiler for which we should detect the version.
detect_version_args (DetectVersionArgs): information on the
compiler for which we should detect the version.
Returns:
A ``(DetectVersionArgs, error)`` tuple. If ``error`` is ``None`` the
@@ -728,7 +710,7 @@ def _default(fn_args):
path = fn_args.path
# Get compiler names and the callback to detect their versions
callback = getattr(compiler_cls, f"{language}_version")
callback = getattr(compiler_cls, "{0}_version".format(language))
try:
version = callback(path)
@@ -754,15 +736,13 @@ def _default(fn_args):
return fn(detect_version_args)
def make_compiler_list(
detected_versions: List[DetectVersionArgs], mixed_toolchain: bool = False
) -> List["spack.compiler.Compiler"]:
def make_compiler_list(detected_versions):
"""Process a list of detected versions and turn them into a list of
compiler specs.
Args:
detected_versions: list of DetectVersionArgs containing a valid version
mixed_toolchain: allow mixing compilers from different toolchains if langauge is missing
detected_versions (list): list of DetectVersionArgs containing a
valid version
Returns:
list: list of Compiler objects
@@ -771,7 +751,7 @@ def make_compiler_list(
sorted_compilers = sorted(detected_versions, key=group_fn)
# Gather items in a dictionary by the id, name variation and language
compilers_d: Dict[CompilerID, Dict[NameVariation, dict]] = {}
compilers_d = {}
for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn):
compiler_id, name_variation, language = sort_key
by_compiler_id = compilers_d.setdefault(compiler_id, {})
@@ -780,7 +760,7 @@ def make_compiler_list(
def _default_make_compilers(cmp_id, paths):
operating_system, compiler_name, version = cmp_id
compiler_cls = class_for_compiler_name(compiler_name)
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, f"={version}")
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
# TODO: johnwparent - revist the following line as per discussion at:
@@ -802,14 +782,13 @@ def _default_make_compilers(cmp_id, paths):
getattr(variation, "suffix", None),
)
# Flatten to a list of compiler id, primary variation and compiler dictionary
flat_compilers: List[Tuple[CompilerID, NameVariation, dict]] = []
compilers = []
for compiler_id, by_compiler_id in compilers_d.items():
ordered = sorted(by_compiler_id, key=sort_fn)
selected_variation = ordered[0]
selected = by_compiler_id[selected_variation]
# Fill any missing parts from subsequent entries (without mixing toolchains)
# fill any missing parts from subsequent entries
for lang in ["cxx", "f77", "fc"]:
if lang not in selected:
next_lang = next(
@@ -818,63 +797,14 @@ def _default_make_compilers(cmp_id, paths):
if next_lang:
selected[lang] = next_lang
flat_compilers.append((compiler_id, selected_variation, selected))
operating_system, _, _ = compiler_id
make_compilers = getattr(operating_system, "make_compilers", _default_make_compilers)
# Next, fill out the blanks of missing compilers by creating a mixed toolchain (if requested)
if mixed_toolchain:
make_mixed_toolchain(flat_compilers)
# Finally, create the compiler list
compilers = []
for compiler_id, _, compiler in flat_compilers:
make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
compilers.extend(make_compilers(compiler_id, compiler))
compilers.extend(make_compilers(compiler_id, selected))
return compilers
def make_mixed_toolchain(compilers: List[Tuple[CompilerID, NameVariation, dict]]) -> None:
"""Add missing compilers across toolchains when they are missing for a particular language.
This currently only adds the most sensible gfortran to (apple)-clang if it doesn't have a
fortran compiler (no flang)."""
# First collect the clangs that are missing a fortran compiler
clangs_without_flang = [
(id, variation, compiler)
for id, variation, compiler in compilers
if id.compiler_name in ("clang", "apple-clang")
and "f77" not in compiler
and "fc" not in compiler
]
if not clangs_without_flang:
return
# Filter on GCCs with fortran compiler
gccs_with_fortran = [
(id, variation, compiler)
for id, variation, compiler in compilers
if id.compiler_name == "gcc" and "f77" in compiler and "fc" in compiler
]
# Sort these GCCs by "best variation" (no prefix / suffix first)
gccs_with_fortran.sort(
key=lambda x: (getattr(x[1], "prefix", None), getattr(x[1], "suffix", None))
)
# Attach the optimal GCC fortran compiler to the clangs that don't have one
for clang_id, _, clang_compiler in clangs_without_flang:
gcc_compiler = next(
(gcc[2] for gcc in gccs_with_fortran if gcc[0].os == clang_id.os), None
)
if not gcc_compiler:
continue
# Update the fc / f77 entries
clang_compiler["f77"] = gcc_compiler["f77"]
clang_compiler["fc"] = gcc_compiler["fc"]
def is_mixed_toolchain(compiler):
"""Returns True if the current compiler is a mixed toolchain,
False otherwise.

View File

@@ -5,6 +5,7 @@
import os
import re
import sys
import llnl.util.lang
@@ -113,6 +114,17 @@ def extract_version_from_output(cls, output):
return ".".join(match.groups())
return "unknown"
@classmethod
def fc_version(cls, fortran_compiler):
if sys.platform == "darwin":
return cls.default_version("clang")
return cls.default_version(fortran_compiler)
@classmethod
def f77_version(cls, f77):
return cls.fc_version(f77)
@property
def stdcxx_libs(self):
return ("-lstdc++",)

View File

@@ -5,6 +5,7 @@
import os
import re
import sys
import llnl.util.lang
@@ -38,10 +39,10 @@ class Clang(Compiler):
cxx_names = ["clang++"]
# Subclasses use possible names of Fortran 77 compiler
f77_names = ["flang"]
f77_names = ["flang", "gfortran", "xlf_r"]
# Subclasses use possible names of Fortran 90 compiler
fc_names = ["flang"]
fc_names = ["flang", "gfortran", "xlf90_r"]
version_argument = "--version"
@@ -181,3 +182,16 @@ def extract_version_from_output(cls, output):
if match:
ver = match.group(match.lastindex)
return ver
@classmethod
def fc_version(cls, fc):
# We could map from gcc/gfortran version to clang version, but on macOS
# we normally mix any version of gfortran with any version of clang.
if sys.platform == "darwin":
return cls.default_version("clang")
else:
return cls.default_version(fc)
@classmethod
def f77_version(cls, f77):
return cls.fc_version(f77)

View File

@@ -155,7 +155,7 @@ def _valid_virtuals_and_externals(self, spec):
),
)
def choose_virtual_or_external(self, spec: spack.spec.Spec):
def choose_virtual_or_external(self, spec):
"""Given a list of candidate virtual and external packages, try to
find one that is most ABI compatible.
"""

View File

@@ -69,7 +69,6 @@
SECTION_SCHEMAS = {
"compilers": spack.schema.compilers.schema,
"concretizer": spack.schema.concretizer.schema,
"definitions": spack.schema.definitions.schema,
"mirrors": spack.schema.mirrors.schema,
"repos": spack.schema.repos.schema,
"packages": spack.schema.packages.schema,
@@ -995,7 +994,6 @@ def read_config_file(filename, schema=None):
key = next(iter(data))
schema = _ALL_SCHEMAS[key]
validate(data, schema)
return data
except StopIteration:

View File

@@ -272,6 +272,13 @@ def _os_pkg_manager(self):
raise spack.error.SpackError(msg)
return os_pkg_manager
@tengine.context_property
def extra_instructions(self):
Extras = namedtuple("Extra", ["build", "final"])
extras = self.container_config.get("extra_instructions", {})
build, final = extras.get("build", None), extras.get("final", None)
return Extras(build=build, final=final)
@tengine.context_property
def labels(self):
return self.container_config.get("labels", {})

View File

@@ -1,29 +0,0 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This module provides classes used in user and build environment"""
from enum import Enum
class Context(Enum):
"""Enum used to indicate the context in which an environment has to be setup: build,
run or test."""
BUILD = 1
RUN = 2
TEST = 3
def __str__(self):
return ("build", "run", "test")[self.value - 1]
@classmethod
def from_string(cls, s: str):
if s == "build":
return Context.BUILD
elif s == "run":
return Context.RUN
elif s == "test":
return Context.TEST
raise ValueError(f"context should be one of 'build', 'run', 'test', got {s}")

View File

@@ -1522,18 +1522,14 @@ def _query(
# TODO: like installed and known that can be queried? Or are
# TODO: these really special cases that only belong here?
if query_spec is not any:
if not isinstance(query_spec, spack.spec.Spec):
query_spec = spack.spec.Spec(query_spec)
# Just look up concrete specs with hashes; no fancy search.
if query_spec.concrete:
# TODO: handling of hashes restriction is not particularly elegant.
hash_key = query_spec.dag_hash()
if hash_key in self._data and (not hashes or hash_key in hashes):
return [self._data[hash_key].spec]
else:
return []
# Just look up concrete specs with hashes; no fancy search.
if isinstance(query_spec, spack.spec.Spec) and query_spec.concrete:
# TODO: handling of hashes restriction is not particularly elegant.
hash_key = query_spec.dag_hash()
if hash_key in self._data and (not hashes or hash_key in hashes):
return [self._data[hash_key].spec]
else:
return []
# Abstract specs require more work -- currently we test
# against everything.
@@ -1541,9 +1537,6 @@ def _query(
start_date = start_date or datetime.datetime.min
end_date = end_date or datetime.datetime.max
# save specs whose name doesn't match for last, to avoid a virtual check
deferred = []
for key, rec in self._data.items():
if hashes is not None and rec.spec.dag_hash() not in hashes:
continue
@@ -1568,26 +1561,8 @@ def _query(
if not (start_date < inst_date < end_date):
continue
if query_spec is any:
if query_spec is any or rec.spec.satisfies(query_spec):
results.append(rec.spec)
continue
# check anon specs and exact name matches first
if not query_spec.name or rec.spec.name == query_spec.name:
if rec.spec.satisfies(query_spec):
results.append(rec.spec)
# save potential virtual matches for later, but not if we already found a match
elif not results:
deferred.append(rec.spec)
# Checking for virtuals is expensive, so we save it for last and only if needed.
# If we get here, we didn't find anything in the DB that matched by name.
# If we did fine something, the query spec can't be virtual b/c we matched an actual
# package installation, so skip the virtual check entirely. If we *didn't* find anything,
# check all the deferred specs *if* the query is virtual.
if not results and query_spec is not any and deferred and query_spec.virtual:
results = [spec for spec in deferred if spec.satisfies(query_spec)]
return results

View File

@@ -269,7 +269,7 @@ def find_windows_compiler_root_paths() -> List[str]:
At the moment simply returns location of VS install paths from VSWhere
But should be extended to include more information as relevant"""
return list(winOs.WindowsOs().vs_install_paths)
return list(winOs.WindowsOs.vs_install_paths)
@staticmethod
def find_windows_compiler_cmake_paths() -> List[str]:
@@ -299,36 +299,36 @@ def find_windows_compiler_bundled_packages() -> List[str]:
class WindowsKitExternalPaths:
plat_major_ver = None
if sys.platform == "win32":
plat_major_ver = str(winOs.windows_version()[0])
@staticmethod
def find_windows_kit_roots() -> List[str]:
def find_windows_kit_roots() -> Optional[str]:
"""Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\"""
if sys.platform != "win32":
return []
return None
program_files = os.environ["PROGRAMFILES(x86)"]
kit_base = os.path.join(program_files, "Windows Kits", "**")
return glob.glob(kit_base)
kit_base = os.path.join(
program_files, "Windows Kits", WindowsKitExternalPaths.plat_major_ver
)
return kit_base
@staticmethod
def find_windows_kit_bin_paths(kit_base: Optional[str] = None) -> List[str]:
"""Returns Windows kit bin directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
assert kit_base, "Unexpectedly empty value for Windows kit base path"
kit_paths = []
for kit in kit_base:
kit_bin = os.path.join(kit, "bin")
kit_paths.extend(glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\")))
return kit_paths
assert kit_base is not None, "unexpected value for kit_base"
kit_bin = os.path.join(kit_base, "bin")
return glob.glob(os.path.join(kit_bin, "[0-9]*", "*\\"))
@staticmethod
def find_windows_kit_lib_paths(kit_base: Optional[str] = None) -> List[str]:
"""Returns Windows kit lib directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
assert kit_base, "Unexpectedly empty value for Windows kit base path"
kit_paths = []
for kit in kit_base:
kit_lib = os.path.join(kit, "Lib")
kit_paths.extend(glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\")))
return kit_paths
assert kit_base is not None, "unexpected value for kit_base"
kit_lib = os.path.join(kit_base, "Lib")
return glob.glob(os.path.join(kit_lib, "[0-9]*", "*", "*\\"))
@staticmethod
def find_windows_driver_development_kit_paths() -> List[str]:
@@ -347,30 +347,23 @@ def find_windows_kit_reg_installed_roots_paths() -> List[str]:
if not reg:
# couldn't find key, return empty list
return []
kit_root_reg = re.compile(r"KitsRoot[0-9]+")
root_paths = []
for kit_root in filter(kit_root_reg.match, reg.get_values().keys()):
root_paths.extend(
WindowsKitExternalPaths.find_windows_kit_lib_paths(reg.get_value(kit_root).value)
)
return root_paths
return WindowsKitExternalPaths.find_windows_kit_lib_paths(
reg.get_value("KitsRoot%s" % WindowsKitExternalPaths.plat_major_ver).value
)
@staticmethod
def find_windows_kit_reg_sdk_paths() -> List[str]:
sdk_paths = []
sdk_regex = re.compile(r"v[0-9]+.[0-9]+")
windows_reg = spack.util.windows_registry.WindowsRegistryView(
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows",
reg = spack.util.windows_registry.WindowsRegistryView(
"SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v%s.0"
% WindowsKitExternalPaths.plat_major_ver,
root_key=spack.util.windows_registry.HKEY.HKEY_LOCAL_MACHINE,
)
for key in filter(sdk_regex.match, [x.name for x in windows_reg.get_subkeys()]):
reg = windows_reg.get_subkey(key)
sdk_paths.extend(
WindowsKitExternalPaths.find_windows_kit_lib_paths(
reg.get_value("InstallationFolder").value
)
)
return sdk_paths
if not reg:
# couldn't find key, return empty list
return []
return WindowsKitExternalPaths.find_windows_kit_lib_paths(
reg.get_value("InstallationFolder").value
)
def find_win32_additional_install_paths() -> List[str]:

View File

@@ -15,12 +15,9 @@
from typing import Dict, List, Optional, Set, Tuple
import llnl.util.filesystem
import llnl.util.lang
import llnl.util.tty
import spack.util.elf as elf_utils
import spack.util.environment
import spack.util.environment as environment
import spack.util.ld_so_conf
from .common import (
@@ -42,29 +39,15 @@
DETECTION_TIMEOUT = 120
def common_windows_package_paths(pkg_cls=None) -> List[str]:
"""Get the paths for common package installation location on Windows
that are outside the PATH
Returns [] on unix
"""
if sys.platform != "win32":
return []
def common_windows_package_paths() -> List[str]:
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
paths.extend(find_win32_additional_install_paths())
paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_installed_roots_paths())
paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_sdk_paths())
if pkg_cls:
paths.extend(compute_windows_user_path_for_package(pkg_cls))
paths.extend(compute_windows_program_path_for_package(pkg_cls))
return paths
def file_identifier(path):
s = os.stat(path)
return (s.st_dev, s.st_ino)
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
"""Get the paths of all executables available from the current PATH.
@@ -79,44 +62,18 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
path_hints: list of paths to be searched. If None the list will be
constructed based on the PATH environment variable.
"""
if sys.platform == "win32":
path_hints.extend(common_windows_package_paths())
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
return path_to_dict(search_paths)
def get_elf_compat(path):
"""For ELF files, get a triplet (EI_CLASS, EI_DATA, e_machine) and see if
it is host-compatible."""
# On ELF platforms supporting, we try to be a bit smarter when it comes to shared
# libraries, by dropping those that are not host compatible.
with open(path, "rb") as f:
elf = elf_utils.parse_elf(f, only_header=True)
return (elf.is_64_bit, elf.is_little_endian, elf.elf_hdr.e_machine)
def accept_elf(path, host_compat):
"""Accept an ELF file if the header matches the given compat triplet,
obtained with :py:func:`get_elf_compat`. In case it's not an ELF (e.g.
static library, or some arbitrary file, fall back to is_readable_file)."""
# Fast path: assume libraries at least have .so in their basename.
# Note: don't replace with splitext, because of libsmth.so.1.2.3 file names.
if ".so" not in os.path.basename(path):
return llnl.util.filesystem.is_readable_file(path)
try:
return host_compat == get_elf_compat(path)
except (OSError, elf_utils.ElfParsingError):
return llnl.util.filesystem.is_readable_file(path)
def libraries_in_ld_and_system_library_path(
path_hints: Optional[List[str]] = None,
) -> Dict[str, str]:
"""Get the paths of all libraries available from ``path_hints`` or the
following defaults:
- Environment variables (Linux: ``LD_LIBRARY_PATH``, Darwin: ``DYLD_LIBRARY_PATH``,
and ``DYLD_FALLBACK_LIBRARY_PATH``)
- Dynamic linker default paths (glibc: ld.so.conf, musl: ld-musl-<arch>.path)
- Default system library paths.
"""Get the paths of all libraries available from LD_LIBRARY_PATH,
LIBRARY_PATH, DYLD_LIBRARY_PATH, DYLD_FALLBACK_LIBRARY_PATH, and
standard system library paths.
For convenience, this is constructed as a dictionary where the keys are
the library paths and the values are the names of the libraries
@@ -130,71 +87,31 @@ def libraries_in_ld_and_system_library_path(
constructed based on the set of LD_LIBRARY_PATH, LIBRARY_PATH,
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
variables as well as the standard system library paths.
path_hints (list): list of paths to be searched. If ``None``, the default
system paths are used.
"""
if path_hints:
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
else:
search_paths = []
# Environment variables
if sys.platform == "darwin":
search_paths.extend(environment.get_path("DYLD_LIBRARY_PATH"))
search_paths.extend(environment.get_path("DYLD_FALLBACK_LIBRARY_PATH"))
elif sys.platform.startswith("linux"):
search_paths.extend(environment.get_path("LD_LIBRARY_PATH"))
# Dynamic linker paths
search_paths.extend(spack.util.ld_so_conf.host_dynamic_linker_search_paths())
# Drop redundant paths
search_paths = list(filter(os.path.isdir, search_paths))
# Make use we don't doubly list /usr/lib and /lib etc
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
try:
host_compat = get_elf_compat(sys.executable)
accept = lambda path: accept_elf(path, host_compat)
except (OSError, elf_utils.ElfParsingError):
accept = llnl.util.filesystem.is_readable_file
path_to_lib = {}
# Reverse order of search directories so that a lib in the first
# search path entry overrides later entries
for search_path in reversed(search_paths):
for lib in os.listdir(search_path):
lib_path = os.path.join(search_path, lib)
if accept(lib_path):
path_to_lib[lib_path] = lib
return path_to_lib
def libraries_in_windows_paths(path_hints: Optional[List[str]] = None) -> Dict[str, str]:
"""Get the paths of all libraries available from the system PATH paths.
For more details, see `libraries_in_ld_and_system_library_path` regarding
return type and contents.
Args:
path_hints: list of paths to be searched. If None the list will be
constructed based on the set of PATH environment
variables as well as the standard system library paths.
"""
search_hints = (
path_hints if path_hints is not None else spack.util.environment.get_path("PATH")
path_hints = (
path_hints
or spack.util.environment.get_path("LD_LIBRARY_PATH")
+ spack.util.environment.get_path("DYLD_LIBRARY_PATH")
+ spack.util.environment.get_path("DYLD_FALLBACK_LIBRARY_PATH")
+ spack.util.ld_so_conf.host_dynamic_linker_search_paths()
)
search_paths = llnl.util.filesystem.search_paths_for_libraries(*search_hints)
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
return path_to_dict(search_paths)
def libraries_in_windows_paths(path_hints: List[str]) -> Dict[str, str]:
path_hints.extend(spack.util.environment.get_path("PATH"))
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
# on Windows, some libraries (.dlls) are found in the bin directory or sometimes
# at the search root. Add both of those options to the search scheme
search_paths.extend(llnl.util.filesystem.search_paths_for_executables(*search_hints))
if path_hints is None:
# if no user provided path was given, add defaults to the search
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_lib_paths())
# SDK and WGL should be handled by above, however on occasion the WDK is in an atypical
# location, so we handle that case specifically.
search_paths.extend(WindowsKitExternalPaths.find_windows_driver_development_kit_paths())
search_paths.extend(llnl.util.filesystem.search_paths_for_executables(*path_hints))
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_lib_paths())
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_bin_paths())
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_installed_roots_paths())
search_paths.extend(WindowsKitExternalPaths.find_windows_kit_reg_sdk_paths())
# SDK and WGL should be handled by above, however on occasion the WDK is in an atypical
# location, so we handle that case specifically.
search_paths.extend(WindowsKitExternalPaths.find_windows_driver_development_kit_paths())
return path_to_dict(search_paths)
@@ -208,8 +125,19 @@ def _group_by_prefix(paths: Set[str]) -> Dict[str, Set[str]]:
class Finder:
"""Inspects the file-system looking for packages. Guesses places where to look using PATH."""
def default_path_hints(self) -> List[str]:
return []
def path_hints(
self, *, pkg: "spack.package_base.PackageBase", initial_guess: Optional[List[str]] = None
) -> List[str]:
"""Returns the list of paths to be searched.
Args:
pkg: package being detected
initial_guess: initial list of paths from caller
"""
result = initial_guess or []
result.extend(compute_windows_user_path_for_package(pkg))
result.extend(compute_windows_program_path_for_package(pkg))
return result
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
"""Returns the list of patterns used to match candidate files.
@@ -317,8 +245,6 @@ def find(
Args:
pkg_name: package being detected
initial_guess: initial list of paths to search from the caller
if None, default paths are searched. If this
is an empty list, nothing will be searched.
"""
import spack.repo
@@ -326,18 +252,13 @@ def find(
patterns = self.search_patterns(pkg=pkg_cls)
if not patterns:
return []
if initial_guess is None:
initial_guess = self.default_path_hints()
initial_guess.extend(common_windows_package_paths(pkg_cls))
candidates = self.candidate_files(patterns=patterns, paths=initial_guess)
path_hints = self.path_hints(pkg=pkg_cls, initial_guess=initial_guess)
candidates = self.candidate_files(patterns=patterns, paths=path_hints)
result = self.detect_specs(pkg=pkg_cls, paths=candidates)
return result
class ExecutablesFinder(Finder):
def default_path_hints(self) -> List[str]:
return spack.util.environment.get_path("PATH")
def search_patterns(self, *, pkg: "spack.package_base.PackageBase") -> List[str]:
result = []
if hasattr(pkg, "executables") and hasattr(pkg, "platform_executables"):
@@ -377,7 +298,7 @@ def candidate_files(self, *, patterns: List[str], paths: List[str]) -> List[str]
libraries_by_path = (
libraries_in_ld_and_system_library_path(path_hints=paths)
if sys.platform != "win32"
else libraries_in_windows_paths(path_hints=paths)
else libraries_in_windows_paths(paths)
)
patterns = [re.compile(x) for x in patterns]
result = []
@@ -413,16 +334,21 @@ def by_path(
# TODO: Packages should be able to define both .libraries and .executables in the future
# TODO: determine_spec_details should get all relevant libraries and executables in one call
executables_finder, libraries_finder = ExecutablesFinder(), LibrariesFinder()
executables_path_guess = (
spack.util.environment.get_path("PATH") if path_hints is None else path_hints
)
libraries_path_guess = [] if path_hints is None else path_hints
detected_specs_by_package: Dict[str, Tuple[concurrent.futures.Future, ...]] = {}
result = collections.defaultdict(list)
with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
for pkg in packages_to_search:
executable_future = executor.submit(
executables_finder.find, pkg_name=pkg, initial_guess=path_hints
executables_finder.find, pkg_name=pkg, initial_guess=executables_path_guess
)
library_future = executor.submit(
libraries_finder.find, pkg_name=pkg, initial_guess=path_hints
libraries_finder.find, pkg_name=pkg, initial_guess=libraries_path_guess
)
detected_specs_by_package[pkg] = executable_future, library_future
@@ -433,13 +359,9 @@ def by_path(
if detected:
_, unqualified_name = spack.repo.partition_package_name(pkg_name)
result[unqualified_name].extend(detected)
except concurrent.futures.TimeoutError:
except Exception:
llnl.util.tty.debug(
f"[EXTERNAL DETECTION] Skipping {pkg_name}: timeout reached"
)
except Exception as e:
llnl.util.tty.debug(
f"[EXTERNAL DETECTION] Skipping {pkg_name}: exception occured {e}"
)
return result

View File

@@ -64,7 +64,6 @@ class OpenMpi(Package):
"depends_on",
"extends",
"maintainers",
"license",
"provides",
"patch",
"variant",
@@ -137,7 +136,6 @@ class DirectiveMeta(type):
_directive_dict_names: Set[str] = set()
_directives_to_be_executed: List[str] = []
_when_constraints_from_context: List[str] = []
_default_args: List[dict] = []
def __new__(cls, name, bases, attr_dict):
# Initialize the attribute containing the list of directives
@@ -200,16 +198,6 @@ def pop_from_context():
"""Pop the last constraint from the context"""
return DirectiveMeta._when_constraints_from_context.pop()
@staticmethod
def push_default_args(default_args):
"""Push default arguments"""
DirectiveMeta._default_args.append(default_args)
@staticmethod
def pop_default_args():
"""Pop default arguments"""
return DirectiveMeta._default_args.pop()
@staticmethod
def directive(dicts=None):
"""Decorator for Spack directives.
@@ -270,13 +258,7 @@ def _decorator(decorated_function):
directive_names.append(decorated_function.__name__)
@functools.wraps(decorated_function)
def _wrapper(*args, **_kwargs):
# First merge default args with kwargs
kwargs = dict()
for default_args in DirectiveMeta._default_args:
kwargs.update(default_args)
kwargs.update(_kwargs)
def _wrapper(*args, **kwargs):
# Inject when arguments from the context
if DirectiveMeta._when_constraints_from_context:
# Check that directives not yet supporting the when= argument
@@ -590,21 +572,17 @@ def _execute_extends(pkg):
return _execute_extends
@directive(dicts=("provided", "provided_together"))
def provides(*specs, when: Optional[str] = None):
"""Allows packages to provide a virtual dependency.
If a package provides "mpi", other packages can declare that they depend on "mpi",
and spack can use the providing package to satisfy the dependency.
Args:
*specs: virtual specs provided by this package
when: condition when this provides clause needs to be considered
@directive("provided")
def provides(*specs, **kwargs):
"""Allows packages to provide a virtual dependency. If a package provides
'mpi', other packages can declare that they depend on "mpi", and spack
can use the providing package to satisfy the dependency.
"""
def _execute_provides(pkg):
import spack.parser # Avoid circular dependency
when = kwargs.get("when")
when_spec = make_when_spec(when)
if not when_spec:
return
@@ -612,18 +590,15 @@ def _execute_provides(pkg):
# ``when`` specs for ``provides()`` need a name, as they are used
# to build the ProviderIndex.
when_spec.name = pkg.name
spec_objs = [spack.spec.Spec(x) for x in specs]
spec_names = [x.name for x in spec_objs]
if len(spec_names) > 1:
pkg.provided_together.setdefault(when_spec, []).append(set(spec_names))
for provided_spec in spec_objs:
if pkg.name == provided_spec.name:
raise CircularReferenceError("Package '%s' cannot provide itself." % pkg.name)
for string in specs:
for provided_spec in spack.parser.parse(string):
if pkg.name == provided_spec.name:
raise CircularReferenceError("Package '%s' cannot provide itself." % pkg.name)
if provided_spec not in pkg.provided:
pkg.provided[provided_spec] = set()
pkg.provided[provided_spec].add(when_spec)
if provided_spec not in pkg.provided:
pkg.provided[provided_spec] = set()
pkg.provided[provided_spec].add(when_spec)
return _execute_provides
@@ -887,44 +862,6 @@ def _execute_maintainer(pkg):
return _execute_maintainer
def _execute_license(pkg, license_identifier: str, when):
# If when is not specified the license always holds
when_spec = make_when_spec(when)
if not when_spec:
return
for other_when_spec in pkg.licenses:
if when_spec.intersects(other_when_spec):
when_message = ""
if when_spec != make_when_spec(None):
when_message = f"when {when_spec}"
other_when_message = ""
if other_when_spec != make_when_spec(None):
other_when_message = f"when {other_when_spec}"
err_msg = (
f"{pkg.name} is specified as being licensed as {license_identifier} "
f"{when_message}, but it is also specified as being licensed under "
f"{pkg.licenses[other_when_spec]} {other_when_message}, which conflict."
)
raise OverlappingLicenseError(err_msg)
pkg.licenses[when_spec] = license_identifier
@directive("licenses")
def license(license_identifier: str, when=None):
"""Add a new license directive, to specify the SPDX identifier the software is
distributed under.
Args:
license_identifiers: A list of SPDX identifiers specifying the licenses
the software is distributed under.
when: A spec specifying when the license applies.
"""
return lambda pkg: _execute_license(pkg, license_identifier, when)
@directive("requirements")
def requires(*requirement_specs, policy="one_of", when=None, msg=None):
"""Allows a package to request a configuration to be present in all valid solutions.
@@ -983,7 +920,3 @@ class DependencyPatchError(DirectiveError):
class UnsupportedPackageDirective(DirectiveError):
"""Raised when an invalid or unsupported package directive is specified."""
class OverlappingLicenseError(DirectiveError):
"""Raised when two licenses are declared that apply on overlapping specs."""

View File

@@ -104,7 +104,7 @@ def relative_path_for_spec(self, spec):
_check_concrete(spec)
projection = spack.projections.get_projection(self.projections, spec)
path = spec.format_path(projection)
path = spec.format(projection)
return str(Path(path))
def write_spec(self, spec, path):

View File

@@ -339,7 +339,6 @@
from .environment import (
TOP_LEVEL_KEY,
Environment,
SpackEnvironmentConfigError,
SpackEnvironmentError,
SpackEnvironmentViewError,
activate,
@@ -366,14 +365,12 @@
read,
root,
spack_env_var,
spack_env_view_var,
update_yaml,
)
__all__ = [
"TOP_LEVEL_KEY",
"Environment",
"SpackEnvironmentConfigError",
"SpackEnvironmentError",
"SpackEnvironmentViewError",
"activate",
@@ -400,6 +397,5 @@
"read",
"root",
"spack_env_var",
"spack_env_view_var",
"update_yaml",
]

View File

@@ -232,10 +232,6 @@ def to_dict(self):
"pkg_ids": " ".join(self.all_pkg_identifiers),
}
@property
def empty(self):
return len(self.roots) == 0
@staticmethod
def from_env(
env: ev.Environment,
@@ -258,10 +254,15 @@ def from_env(
jobserver: when enabled, make will invoke Spack with jobserver support. For
dry-run this should be disabled.
"""
roots = env.all_matching_specs(*filter_specs) if filter_specs else env.concrete_roots()
# If no specs are provided as a filter, build all the specs in the environment.
if filter_specs:
entrypoints = [env.matching_spec(s) for s in filter_specs]
else:
entrypoints = [s for _, s in env.concretized_specs()]
visitor = DepfileSpecVisitor(pkg_buildcache, dep_buildcache)
traverse.traverse_breadth_first_with_visitor(
roots, traverse.CoverNodesVisitor(visitor, key=lambda s: s.dag_hash())
entrypoints, traverse.CoverNodesVisitor(visitor, key=lambda s: s.dag_hash())
)
return MakefileModel(env, roots, visitor.adjacency_list, make_prefix, jobserver)
return MakefileModel(env, entrypoints, visitor.adjacency_list, make_prefix, jobserver)

View File

@@ -64,8 +64,6 @@
#: environment variable used to indicate the active environment
spack_env_var = "SPACK_ENV"
#: environment variable used to indicate the active environment view
spack_env_view_var = "SPACK_ENV_VIEW"
#: currently activated environment
_active_environment: Optional["Environment"] = None
@@ -330,21 +328,16 @@ def create_in_dir(
if with_view is None and keep_relative:
return Environment(manifest_dir)
try:
manifest = EnvironmentManifestFile(manifest_dir)
manifest = EnvironmentManifestFile(manifest_dir)
if with_view is not None:
manifest.set_default_view(with_view)
if with_view is not None:
manifest.set_default_view(with_view)
if not keep_relative and init_file is not None and str(init_file).endswith(manifest_name):
init_file = pathlib.Path(init_file)
manifest.absolutify_dev_paths(init_file.parent)
if not keep_relative and init_file is not None and str(init_file).endswith(manifest_name):
init_file = pathlib.Path(init_file)
manifest.absolutify_dev_paths(init_file.parent)
manifest.flush()
except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e:
shutil.rmtree(manifest_dir)
raise e
manifest.flush()
return Environment(manifest_dir)
@@ -396,13 +389,7 @@ def all_environments():
def _read_yaml(str_or_file):
"""Read YAML from a file for round-trip parsing."""
try:
data = syaml.load_config(str_or_file)
except syaml.SpackYAMLError as e:
raise SpackEnvironmentConfigError(
f"Invalid environment configuration detected: {e.message}"
)
data = syaml.load_config(str_or_file)
filename = getattr(str_or_file, "name", None)
default_data = spack.config.validate(data, spack.schema.env.schema, filename)
return data, default_data
@@ -787,18 +774,10 @@ def _re_read(self):
"""Reinitialize the environment object."""
self.clear(re_read=True)
self.manifest = EnvironmentManifestFile(self.path)
self._read(re_read=True)
self._read()
def _read(self, re_read=False):
# If the manifest has included files, then some of the information
# (e.g., definitions) MAY be in those files. So we need to ensure
# the config is populated with any associated spec lists in order
# to fully construct the manifest state.
includes = self.manifest[TOP_LEVEL_KEY].get("include", [])
if includes and not re_read:
prepare_config_scope(self)
self._construct_state_from_manifest(re_read)
def _read(self):
self._construct_state_from_manifest()
if os.path.exists(self.lock_path):
with open(self.lock_path) as f:
@@ -812,30 +791,21 @@ def write_transaction(self):
"""Get a write lock context manager for use in a `with` block."""
return lk.WriteTransaction(self.txlock, acquire=self._re_read)
def _process_definition(self, item):
"""Process a single spec definition item."""
entry = copy.deepcopy(item)
when = _eval_conditional(entry.pop("when", "True"))
assert len(entry) == 1
if when:
name, spec_list = next(iter(entry.items()))
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
if name in self.spec_lists:
self.spec_lists[name].extend(user_specs)
else:
self.spec_lists[name] = user_specs
def _construct_state_from_manifest(self, re_read=False):
def _construct_state_from_manifest(self):
"""Read manifest file and set up user specs."""
self.spec_lists = collections.OrderedDict()
if not re_read:
for item in spack.config.get("definitions", []):
self._process_definition(item)
env_configuration = self.manifest[TOP_LEVEL_KEY]
for item in env_configuration.get("definitions", []):
self._process_definition(item)
entry = copy.deepcopy(item)
when = _eval_conditional(entry.pop("when", "True"))
assert len(entry) == 1
if when:
name, spec_list = next(iter(entry.items()))
user_specs = SpecList(name, spec_list, self.spec_lists.copy())
if name in self.spec_lists:
self.spec_lists[name].extend(user_specs)
else:
self.spec_lists[name] = user_specs
spec_list = env_configuration.get(user_speclist_name, [])
user_specs = SpecList(
@@ -880,9 +850,7 @@ def clear(self, re_read=False):
yaml, and need to be maintained when re-reading an existing
environment.
"""
self.spec_lists = collections.OrderedDict()
self.spec_lists[user_speclist_name] = SpecList()
self.spec_lists = {user_speclist_name: SpecList()} # specs from yaml
self.dev_specs = {} # dev-build specs from yaml
self.concretized_user_specs = [] # user specs from last concretize
self.concretized_order = [] # roots of last concretize, in order
@@ -1031,8 +999,7 @@ def included_config_scopes(self):
elif include_url.scheme:
raise ValueError(
f"Unsupported URL scheme ({include_url.scheme}) for "
f"environment include: {config_path}"
"Unsupported URL scheme for environment include: {}".format(config_path)
)
# treat relative paths as relative to the environment
@@ -1094,10 +1061,8 @@ def update_stale_references(self, from_list=None):
from_list = next(iter(self.spec_lists.keys()))
index = list(self.spec_lists.keys()).index(from_list)
# spec_lists is an OrderedDict to ensure lists read from the manifest
# are maintainted in order, hence, all list entries after the modified
# list may refer to the modified list requiring stale references to be
# updated.
# spec_lists is an OrderedDict, all list entries after the modified
# list may refer to the modified list. Update stale references
for i, (name, speclist) in enumerate(
list(self.spec_lists.items())[index + 1 :], index + 1
):
@@ -1195,7 +1160,7 @@ def change_existing_spec(
def remove(self, query_spec, list_name=user_speclist_name, force=False):
"""Remove specs from an environment that match a query_spec"""
err_msg_header = (
f"Cannot remove '{query_spec}' from '{list_name}' definition "
f"cannot remove {query_spec} from '{list_name}' definition "
f"in {self.manifest.manifest_file}"
)
query_spec = Spec(query_spec)
@@ -1226,10 +1191,11 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
list_to_change.remove(spec)
self.update_stale_references(list_name)
new_specs = set(self.user_specs)
except spack.spec_list.SpecListError as e:
except spack.spec_list.SpecListError:
# define new specs list
new_specs = set(self.user_specs)
msg = str(e)
msg = f"Spec '{spec}' is part of a spec matrix and "
msg += f"cannot be removed from list '{list_to_change}'."
if force:
msg += " It will be removed from the concrete specs."
# Mock new specs, so we can remove this spec from concrete spec lists
@@ -1512,12 +1478,11 @@ def _concretize_separately(self, tests=False):
self._add_concrete_spec(s, concrete, new=False)
# Concretize any new user specs that we haven't concretized yet
args, root_specs, i = [], [], 0
arguments, root_specs = [], []
for uspec, uspec_constraints in zip(self.user_specs, self.user_specs.specs_as_constraints):
if uspec not in old_concretized_user_specs:
root_specs.append(uspec)
args.append((i, [str(x) for x in uspec_constraints], tests))
i += 1
arguments.append((uspec_constraints, tests))
# Ensure we don't try to bootstrap clingo in parallel
if spack.config.get("config:concretizer", "clingo") == "clingo":
@@ -1536,46 +1501,34 @@ def _concretize_separately(self, tests=False):
_ = spack.compilers.get_compiler_config()
# Early return if there is nothing to do
if len(args) == 0:
if len(arguments) == 0:
return []
# Solve the environment in parallel on Linux
start = time.time()
num_procs = min(len(args), spack.util.cpus.determine_number_of_jobs(parallel=True))
max_processes = min(
len(arguments), # Number of specs
spack.util.cpus.determine_number_of_jobs(parallel=True),
)
# TODO: support parallel concretization on macOS and Windows
# TODO: revisit this print as soon as darwin is parallel too
msg = "Starting concretization"
if sys.platform not in ("darwin", "win32") and num_procs > 1:
msg += f" pool with {num_procs} processes"
if sys.platform != "darwin":
pool_size = spack.util.parallel.num_processes(max_processes=max_processes)
if pool_size > 1:
msg = msg + " pool with {0} processes".format(pool_size)
tty.msg(msg)
batch = []
for j, (i, concrete, duration) in enumerate(
spack.util.parallel.imap_unordered(
_concretize_task,
args,
processes=num_procs,
debug=tty.is_debug(),
maxtaskperchild=1,
)
):
batch.append((i, concrete))
percentage = (j + 1) / len(args) * 100
tty.verbose(
f"{duration:6.1f}s [{percentage:3.0f}%] {concrete.cformat('{hash:7}')} "
f"{root_specs[i].colored_str}"
)
sys.stdout.flush()
# Add specs in original order
batch.sort(key=lambda x: x[0])
by_hash = {} # for attaching information on test dependencies
for root, (_, concrete) in zip(root_specs, batch):
self._add_concrete_spec(root, concrete)
by_hash[concrete.dag_hash()] = concrete
concretized_root_specs = spack.util.parallel.parallel_map(
_concretize_task, arguments, max_processes=max_processes, debug=tty.is_debug()
)
finish = time.time()
tty.msg(f"Environment concretized in {finish - start:.2f} seconds")
tty.msg("Environment concretized in %.2f seconds." % (finish - start))
by_hash = {}
for abstract, concrete in zip(root_specs, concretized_root_specs):
self._add_concrete_spec(abstract, concrete)
by_hash[concrete.dag_hash()] = concrete
# Unify the specs objects, so we get correct references to all parents
self._read_lockfile_dict(self._to_lockfile_dict())
@@ -1642,14 +1595,16 @@ def concretize_and_add(self, user_spec, concrete_spec=None, tests=False):
@property
def default_view(self):
if not self.has_view(default_view_name):
raise SpackEnvironmentError(f"{self.name} does not have a default view enabled")
if not self.views:
raise SpackEnvironmentError("{0} does not have a view enabled".format(self.name))
if default_view_name not in self.views:
raise SpackEnvironmentError(
"{0} does not have a default view enabled".format(self.name)
)
return self.views[default_view_name]
def has_view(self, view_name: str) -> bool:
return view_name in self.views
def update_default_view(self, path_or_bool: Union[str, bool]) -> None:
"""Updates the path of the default view.
@@ -1735,34 +1690,62 @@ def check_views(self):
"Loading the environment view will require reconcretization." % self.name
)
def _env_modifications_for_view(
self, view: ViewDescriptor, reverse: bool = False
) -> spack.util.environment.EnvironmentModifications:
try:
mods = uenv.environment_modifications_for_specs(*self.concrete_roots(), view=view)
except Exception as e:
# Failing to setup spec-specific changes shouldn't be a hard error.
tty.warn(
"couldn't load runtime environment due to {}: {}".format(e.__class__.__name__, e)
)
return spack.util.environment.EnvironmentModifications()
return mods.reversed() if reverse else mods
def _env_modifications_for_default_view(self, reverse=False):
all_mods = spack.util.environment.EnvironmentModifications()
def add_view_to_env(
self, env_mod: spack.util.environment.EnvironmentModifications, view: str
) -> spack.util.environment.EnvironmentModifications:
"""Collect the environment modifications to activate an environment using the provided
view. Removes duplicate paths.
visited = set()
errors = []
for root_spec in self.concrete_roots():
if root_spec in self.default_view and root_spec.installed and root_spec.package:
for spec in root_spec.traverse(deptype="run", root=True):
if spec.name in visited:
# It is expected that only one instance of the package
# can be added to the environment - do not attempt to
# add multiple.
tty.debug(
"Not adding {0} to shell modifications: "
"this package has already been added".format(
spec.format("{name}/{hash:7}")
)
)
continue
else:
visited.add(spec.name)
try:
mods = uenv.environment_modifications_for_spec(spec, self.default_view)
except Exception as e:
msg = "couldn't get environment settings for %s" % spec.format(
"{name}@{version} /{hash:7}"
)
errors.append((msg, str(e)))
continue
all_mods.extend(mods.reversed() if reverse else mods)
return all_mods, errors
def add_default_view_to_env(self, env_mod):
"""
Collect the environment modifications to activate an environment using the
default view. Removes duplicate paths.
Args:
env_mod: the environment modifications object that is modified.
view: the name of the view to activate."""
descriptor = self.views.get(view)
if not descriptor:
env_mod (spack.util.environment.EnvironmentModifications): the environment
modifications object that is modified.
"""
if default_view_name not in self.views:
# No default view to add to shell
return env_mod
env_mod.extend(uenv.unconditional_environment_modifications(descriptor))
env_mod.extend(self._env_modifications_for_view(descriptor))
env_mod.extend(uenv.unconditional_environment_modifications(self.default_view))
mods, errors = self._env_modifications_for_default_view()
env_mod.extend(mods)
if errors:
for err in errors:
tty.warn(*err)
# deduplicate paths from specs mapped to the same location
for env_var in env_mod.group_by_name():
@@ -1770,21 +1753,23 @@ def add_view_to_env(
return env_mod
def rm_view_from_env(
self, env_mod: spack.util.environment.EnvironmentModifications, view: str
) -> spack.util.environment.EnvironmentModifications:
"""Collect the environment modifications to deactivate an environment using the provided
view. Reverses the action of ``add_view_to_env``.
def rm_default_view_from_env(self, env_mod):
"""
Collect the environment modifications to deactivate an environment using the
default view. Reverses the action of ``add_default_view_to_env``.
Args:
env_mod: the environment modifications object that is modified.
view: the name of the view to deactivate."""
descriptor = self.views.get(view)
if not descriptor:
env_mod (spack.util.environment.EnvironmentModifications): the environment
modifications object that is modified.
"""
if default_view_name not in self.views:
# No default view to add to shell
return env_mod
env_mod.extend(uenv.unconditional_environment_modifications(descriptor).reversed())
env_mod.extend(self._env_modifications_for_view(descriptor, reverse=True))
env_mod.extend(uenv.unconditional_environment_modifications(self.default_view).reversed())
mods, _ = self._env_modifications_for_default_view(reverse=True)
env_mod.extend(mods)
return env_mod
@@ -2094,7 +2079,7 @@ def matching_spec(self, spec):
def removed_specs(self):
"""Tuples of (user spec, concrete spec) for all specs that will be
removed on next concretize."""
removed on nexg concretize."""
needed = set()
for s, c in self.concretized_specs():
if s in self.user_specs:
@@ -2437,13 +2422,10 @@ def _concretize_from_constraints(spec_constraints, tests=False):
invalid_constraints.extend(inv_variant_constraints)
def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
index, spec_constraints, tests = packed_arguments
spec_constraints = [Spec(x) for x in spec_constraints]
def _concretize_task(packed_arguments):
spec_constraints, tests = packed_arguments
with tty.SuppressOutput(msg_enabled=False):
start = time.time()
spec = _concretize_from_constraints(spec_constraints, tests)
return index, spec, time.time() - start
return _concretize_from_constraints(spec_constraints, tests)
def make_repo_path(root):
@@ -2753,7 +2735,7 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
self.changed = True
def add_definition(self, user_spec: str, list_name: str) -> None:
"""Appends a user spec to the first active definition matching the name passed as argument.
"""Appends a user spec to the first active definition mathing the name passed as argument.
Args:
user_spec: user spec to be appended
@@ -2966,7 +2948,3 @@ class SpackEnvironmentError(spack.error.SpackError):
class SpackEnvironmentViewError(SpackEnvironmentError):
"""Class for errors regarding view generation."""
class SpackEnvironmentConfigError(SpackEnvironmentError):
"""Class for Spack environment-specific configuration errors."""

View File

@@ -3,7 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from typing import Optional
import llnl.util.tty as tty
from llnl.util.tty.color import colorize
@@ -14,14 +13,12 @@
from spack.util.environment import EnvironmentModifications
def activate_header(env, shell, prompt=None, view: Optional[str] = None):
def activate_header(env, shell, prompt=None):
# Construct the commands to run
cmds = ""
if shell == "csh":
# TODO: figure out how to make color work for csh
cmds += "setenv SPACK_ENV %s;\n" % env.path
if view:
cmds += "setenv SPACK_ENV_VIEW %s;\n" % view
cmds += 'alias despacktivate "spack env deactivate";\n'
if prompt:
cmds += "if (! $?SPACK_OLD_PROMPT ) "
@@ -32,8 +29,6 @@ def activate_header(env, shell, prompt=None, view: Optional[str] = None):
prompt = colorize("@G{%s} " % prompt, color=True)
cmds += "set -gx SPACK_ENV %s;\n" % env.path
if view:
cmds += "set -gx SPACK_ENV_VIEW %s;\n" % view
cmds += "function despacktivate;\n"
cmds += " spack env deactivate;\n"
cmds += "end;\n"
@@ -45,21 +40,15 @@ def activate_header(env, shell, prompt=None, view: Optional[str] = None):
elif shell == "bat":
# TODO: Color
cmds += 'set "SPACK_ENV=%s"\n' % env.path
if view:
cmds += 'set "SPACK_ENV_VIEW=%s"\n' % view
# TODO: despacktivate
# TODO: prompt
elif shell == "pwsh":
cmds += "$Env:SPACK_ENV='%s'\n" % env.path
if view:
cmds += "$Env:SPACK_ENV_VIEW='%s'\n" % view
else:
if "color" in os.getenv("TERM", "") and prompt:
prompt = colorize("@G{%s}" % prompt, color=True, enclose=True)
cmds += "export SPACK_ENV=%s;\n" % env.path
if view:
cmds += "export SPACK_ENV_VIEW=%s;\n" % view
cmds += "alias despacktivate='spack env deactivate';\n"
if prompt:
cmds += "if [ -z ${SPACK_OLD_PS1+x} ]; then\n"
@@ -77,14 +66,12 @@ def deactivate_header(shell):
cmds = ""
if shell == "csh":
cmds += "unsetenv SPACK_ENV;\n"
cmds += "unsetenv SPACK_ENV_VIEW;\n"
cmds += "if ( $?SPACK_OLD_PROMPT ) "
cmds += ' eval \'set prompt="$SPACK_OLD_PROMPT" &&'
cmds += " unsetenv SPACK_OLD_PROMPT';\n"
cmds += "unalias despacktivate;\n"
elif shell == "fish":
cmds += "set -e SPACK_ENV;\n"
cmds += "set -e SPACK_ENV_VIEW;\n"
cmds += "functions -e despacktivate;\n"
#
# NOTE: Not changing fish_prompt (above) => no need to restore it here.
@@ -92,19 +79,14 @@ def deactivate_header(shell):
elif shell == "bat":
# TODO: Color
cmds += 'set "SPACK_ENV="\n'
cmds += 'set "SPACK_ENV_VIEW="\n'
# TODO: despacktivate
# TODO: prompt
elif shell == "pwsh":
cmds += "Set-Item -Path Env:SPACK_ENV\n"
cmds += "Set-Item -Path Env:SPACK_ENV_VIEW\n"
else:
cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
cmds += "fi;\n"
cmds += "if [ ! -z ${SPACK_ENV_VIEW+x} ]; then\n"
cmds += "unset SPACK_ENV_VIEW; export SPACK_ENV_VIEW;\n"
cmds += "fi;\n"
cmds += "alias despacktivate > /dev/null 2>&1 && unalias despacktivate;\n"
cmds += "if [ ! -z ${SPACK_OLD_PS1+x} ]; then\n"
cmds += " if [ \"$SPACK_OLD_PS1\" = '$$$$' ]; then\n"
@@ -118,23 +100,24 @@ def deactivate_header(shell):
return cmds
def activate(
env: ev.Environment, use_env_repo=False, view: Optional[str] = "default"
) -> EnvironmentModifications:
"""Activate an environment and append environment modifications
def activate(env, use_env_repo=False, add_view=True):
"""
Activate an environment and append environment modifications
To activate an environment, we add its configuration scope to the
existing Spack configuration, and we set active to the current
environment.
Arguments:
env: the environment to activate
use_env_repo: use the packages exactly as they appear in the environment's repository
view: generate commands to add runtime environment variables for named view
env (spack.environment.Environment): the environment to activate
use_env_repo (bool): use the packages exactly as they appear in the
environment's repository
add_view (bool): generate commands to add view to path variables
Returns:
spack.util.environment.EnvironmentModifications: Environment variables
modifications to activate environment."""
modifications to activate environment.
"""
ev.activate(env, use_env_repo=use_env_repo)
env_mods = EnvironmentModifications()
@@ -146,9 +129,9 @@ def activate(
# become PATH variables.
#
try:
if view and env.has_view(view):
if add_view and ev.default_view_name in env.views:
with spack.store.STORE.db.read_transaction():
env.add_view_to_env(env_mods, view)
env.add_default_view_to_env(env_mods)
except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e:
tty.error(e)
tty.die(
@@ -162,15 +145,17 @@ def activate(
return env_mods
def deactivate() -> EnvironmentModifications:
"""Deactivate an environment and collect corresponding environment modifications.
def deactivate():
"""
Deactivate an environment and collect corresponding environment modifications.
Note: unloads the environment in its current state, not in the state it was
loaded in, meaning that specs that were removed from the spack environment
after activation are not unloaded.
Returns:
Environment variables modifications to activate environment.
spack.util.environment.EnvironmentModifications: Environment variables
modifications to activate environment.
"""
env_mods = EnvironmentModifications()
active = ev.active_environment()
@@ -178,12 +163,10 @@ def deactivate() -> EnvironmentModifications:
if active is None:
return env_mods
active_view = os.getenv(ev.spack_env_view_var)
if active_view and active.has_view(active_view):
if ev.default_view_name in active.views:
try:
with spack.store.STORE.db.read_transaction():
active.rm_view_from_env(env_mods, active_view)
active.rm_default_view_from_env(env_mods)
except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e:
tty.warn(e)
tty.warn(

View File

@@ -5,7 +5,6 @@
"""Service functions and classes to implement the hooks
for Spack's command extensions.
"""
import difflib
import importlib
import os
import re
@@ -177,19 +176,10 @@ class CommandNotFoundError(spack.error.SpackError):
"""
def __init__(self, cmd_name):
msg = (
super().__init__(
"{0} is not a recognized Spack command or extension command;"
" check with `spack commands`.".format(cmd_name)
)
long_msg = None
similar = difflib.get_close_matches(cmd_name, spack.cmd.all_commands())
if 1 <= len(similar) <= 5:
long_msg = "\nDid you mean one of the following commands?\n "
long_msg += "\n ".join(similar)
super().__init__(msg, long_msg)
class ExtensionNamingError(spack.error.SpackError):

View File

@@ -28,7 +28,6 @@
import os.path
import re
import shutil
import urllib.error
import urllib.parse
from typing import List, Optional
@@ -42,7 +41,6 @@
import spack.config
import spack.error
import spack.oci.opener
import spack.url
import spack.util.crypto as crypto
import spack.util.git
@@ -539,34 +537,6 @@ def fetch(self):
tty.msg("Using cached archive: {0}".format(path))
class OCIRegistryFetchStrategy(URLFetchStrategy):
def __init__(self, url=None, checksum=None, **kwargs):
super().__init__(url, checksum, **kwargs)
self._urlopen = kwargs.get("_urlopen", spack.oci.opener.urlopen)
@_needs_stage
def fetch(self):
file = self.stage.save_filename
tty.msg(f"Fetching {self.url}")
try:
response = self._urlopen(self.url)
except urllib.error.URLError as e:
# clean up archive on failure.
if self.archive_file:
os.remove(self.archive_file)
if os.path.lexists(file):
os.remove(file)
raise FailedDownloadError(self.url, f"Failed to fetch {self.url}: {e}") from e
if os.path.lexists(file):
os.remove(file)
with open(file, "wb") as f:
shutil.copyfileobj(response, f)
class VCSFetchStrategy(FetchStrategy):
"""Superclass for version control system fetch strategies.
@@ -773,7 +743,8 @@ def git(self):
# Disable advice for a quieter fetch
# https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt
if self.git_version >= spack.version.Version("1.7.2"):
self._git.add_default_arg("-c", "advice.detachedHead=false")
self._git.add_default_arg("-c")
self._git.add_default_arg("advice.detachedHead=false")
# If the user asked for insecure fetching, make that work
# with git as well.

Some files were not shown because too many files have changed in this diff Show More