Compare commits
16 Commits
e4s-23.08
...
features/e
Author | SHA1 | Date | |
---|---|---|---|
![]() |
1b4ff30665 | ||
![]() |
c10797718a | ||
![]() |
81ea29b007 | ||
![]() |
d8666a7fdf | ||
![]() |
7c41bba6f8 | ||
![]() |
20e9fe3785 | ||
![]() |
401218b4f1 | ||
![]() |
adfc1c0896 | ||
![]() |
f4402c1cde | ||
![]() |
c1d6d93388 | ||
![]() |
e9012c7781 | ||
![]() |
59acfe4f0b | ||
![]() |
004ff9d4e2 | ||
![]() |
9d20be5fe5 | ||
![]() |
edc07dab27 | ||
![]() |
acde8ef104 |
5
.github/dependabot.yml
vendored
5
.github/dependabot.yml
vendored
@@ -5,8 +5,3 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to build documentation
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/lib/spack/docs"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
11
.github/workflows/audit.yaml
vendored
11
.github/workflows/audit.yaml
vendored
@@ -17,13 +17,10 @@ concurrency:
|
||||
jobs:
|
||||
# Run audits on all the packages in the built-in repository
|
||||
package-audits:
|
||||
runs-on: ${{ matrix.operating_system }}
|
||||
strategy:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -44,4 +41,4 @@ jobs:
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
flags: unittests,linux,audits
|
||||
|
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
14
.github/workflows/build-containers.yml
vendored
14
.github/workflows/build-containers.yml
vendored
@@ -49,14 +49,14 @@ jobs:
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'rockylinux:9'],
|
||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -92,13 +92,13 @@ jobs:
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@2b82ce82d56a2a04d2637cd93a637ae1b359c0a7 # @v1
|
||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4c0219f9ac95b02789c1075625400b2acbff50b1 # @v1
|
||||
uses: docker/setup-buildx-action@4b4e9c3e2d4531116a6f8ba8e71fc6e2cb6e6c8c # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -106,13 +106,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc # @v1
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@2eb1c1961a95fc15694676618e422e8ba1d63825 # @v2
|
||||
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
4
.github/workflows/nightly-win-builds.yml
vendored
4
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,10 +14,10 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
19
.github/workflows/unit_tests.yaml
vendored
19
.github/workflows/unit_tests.yaml
vendored
@@ -47,10 +47,10 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -94,10 +94,10 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -152,10 +152,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -165,7 +165,6 @@ jobs:
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -187,10 +186,10 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
|
11
.github/workflows/valid-style.yml
vendored
11
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -35,10 +35,10 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1 # @v2
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -68,7 +68,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9 # @v2
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -81,7 +81,6 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack unit-test -V
|
||||
|
12
.github/workflows/windows_python.yml
vendored
12
.github/workflows/windows_python.yml
vendored
@@ -15,10 +15,10 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -39,10 +39,10 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -63,10 +63,10 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@c85c95e3d7251135ab7dc9ce3241c5835cc595a9
|
||||
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@61a6322f88396a6271a6ee3565807d608ecaddd1
|
||||
- uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
@@ -1,16 +1,10 @@
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: "ubuntu-22.04"
|
||||
apt_packages:
|
||||
- graphviz
|
||||
tools:
|
||||
python: "3.11"
|
||||
|
||||
sphinx:
|
||||
configuration: lib/spack/docs/conf.py
|
||||
fail_on_warning: true
|
||||
|
||||
python:
|
||||
version: 3.7
|
||||
install:
|
||||
- requirements: lib/spack/docs/requirements.txt
|
||||
|
18
CHANGELOG.md
18
CHANGELOG.md
@@ -1,21 +1,3 @@
|
||||
# v0.20.1 (2023-07-10)
|
||||
|
||||
## Spack Bugfixes
|
||||
|
||||
- Spec removed from an environment where not actually removed if `--force` was not given (#37877)
|
||||
- Speed-up module file generation (#37739)
|
||||
- Hotfix for a few recipes that treat CMake as a link dependency (#35816)
|
||||
- Fix re-running stand-alone test a second time, which was getting a trailing spurious failure (#37840)
|
||||
- Fixed reading JSON manifest on Cray, reporting non-concrete specs (#37909)
|
||||
- Fixed a few bugs when generating Dockerfiles from Spack (#37766,#37769)
|
||||
- Fixed a few long-standing bugs when generating module files (#36678,#38347,#38465,#38455)
|
||||
- Fixed issues with building Python extensions using an external Python (#38186)
|
||||
- Fixed compiler removal from command line (#38057)
|
||||
- Show external status as [e] (#33792)
|
||||
- Backported `archspec` fixes (#37793)
|
||||
- Improved a few error messages (#37791)
|
||||
|
||||
|
||||
# v0.20.0 (2023-05-21)
|
||||
|
||||
`v0.20.0` is a major feature release.
|
||||
|
@@ -25,6 +25,8 @@ exit 1
|
||||
# Line above is a shell no-op, and ends a python multi-line comment.
|
||||
# The code above runs this file with our preferred python interpreter.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
@@ -214,7 +214,7 @@ goto :end_switch
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
|
132
bin/spack.ps1
132
bin/spack.ps1
@@ -1,132 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
# #######################################################################
|
||||
|
||||
function Compare-CommonArgs {
|
||||
$CMDArgs = $args[0]
|
||||
# These aruments take precedence and call for no futher parsing of arguments
|
||||
# invoke actual Spack entrypoint with that context and exit after
|
||||
"--help", "-h", "--version", "-V" | ForEach-Object {
|
||||
$arg_opt = $_
|
||||
if(($CMDArgs) -and ([bool]($CMDArgs.Where({$_ -eq $arg_opt})))) {
|
||||
return $true
|
||||
}
|
||||
}
|
||||
return $false
|
||||
}
|
||||
|
||||
function Read-SpackArgs {
|
||||
$SpackCMD_params = @()
|
||||
$SpackSubCommand = $NULL
|
||||
$SpackSubCommandArgs = @()
|
||||
$args_ = $args[0]
|
||||
$args_ | ForEach-Object {
|
||||
if (!$SpackSubCommand) {
|
||||
if($_.SubString(0,1) -eq "-")
|
||||
{
|
||||
$SpackCMD_params += $_
|
||||
}
|
||||
else{
|
||||
$SpackSubCommand = $_
|
||||
}
|
||||
}
|
||||
else{
|
||||
$SpackSubCommandArgs += $_
|
||||
}
|
||||
}
|
||||
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
|
||||
}
|
||||
|
||||
function Invoke-SpackCD {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack cd -h
|
||||
}
|
||||
else {
|
||||
$LOC = $(python $Env:SPACK_ROOT/bin/spack location $SpackSubCommandArgs)
|
||||
if (($NULL -ne $LOC)){
|
||||
if ( Test-Path -Path $LOC){
|
||||
Set-Location $LOC
|
||||
}
|
||||
else{
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
else {
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Invoke-SpackEnv {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs[0]) {
|
||||
python $Env:SPACK_ROOT/bin/spack env -h
|
||||
}
|
||||
else {
|
||||
$SubCommandSubCommand = $SpackSubCommandArgs[0]
|
||||
$SubCommandSubCommandArgs = $SpackSubCommandArgs[1..$SpackSubCommandArgs.Count]
|
||||
switch ($SubCommandSubCommand) {
|
||||
"activate" {
|
||||
if (Compare-CommonArgs $SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif (!$SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
}
|
||||
}
|
||||
"deactivate" {
|
||||
if ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack env deactivate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif($SubCommandSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack env deactivate -h
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params env deactivate --pwsh)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
}
|
||||
}
|
||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Invoke-SpackLoad {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SpackSubCommandArgs.Where({($_ -eq "--pwsh") -or ($_ -eq "--list")}))) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
||||
$ExecutionContext.InvokeCommand($SpackEnv)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
$SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs = Read-SpackArgs $args
|
||||
|
||||
if (Compare-CommonArgs $SpackCMD_params) {
|
||||
python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
exit $LASTEXITCODE
|
||||
}
|
||||
|
||||
# Process Spack commands with special conditions
|
||||
# all other commands are piped directly to Spack
|
||||
switch($SpackSubCommand)
|
||||
{
|
||||
"cd" {Invoke-SpackCD}
|
||||
"env" {Invoke-SpackEnv}
|
||||
"load" {Invoke-SpackLoad}
|
||||
"unload" {Invoke-SpackLoad}
|
||||
default {python $Env:SPACK_ROOT/bin/spack $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
@@ -36,9 +36,3 @@ concretizer:
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: true
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: none
|
@@ -216,11 +216,10 @@ config:
|
||||
# manipulation by unprivileged user (e.g. AFS)
|
||||
allow_sgid: true
|
||||
|
||||
# Whether to show status information during building and installing packages.
|
||||
# This gives information about Spack's current progress as well as the current
|
||||
# and total number of packages. Information is shown both in the terminal
|
||||
# title and inline.
|
||||
install_status: true
|
||||
# Whether to set the terminal title to display status information during
|
||||
# building and installing packages. This gives information about Spack's
|
||||
# current progress as well as the current and total number of packages.
|
||||
terminal_title: false
|
||||
|
||||
# Number of seconds a buildcache's index.json is cached locally before probing
|
||||
# for updates, within a single Spack invocation. Defaults to 10 minutes.
|
||||
|
@@ -1,4 +1,2 @@
|
||||
mirrors:
|
||||
spack-public:
|
||||
binary: false
|
||||
url: https://mirror.spack.io
|
||||
spack-public: https://mirror.spack.io
|
||||
|
@@ -49,7 +49,6 @@ packages:
|
||||
pbs: [openpbs, torque]
|
||||
pil: [py-pillow]
|
||||
pkgconfig: [pkgconf, pkg-config]
|
||||
qmake: [qt-base, qt]
|
||||
rpc: [libtirpc]
|
||||
scalapack: [netlib-scalapack, amdscalapack]
|
||||
sycl: [hipsycl]
|
||||
@@ -60,7 +59,6 @@ packages:
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
zlib-api: [zlib, zlib-ng+compat]
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
|
@@ -1,16 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
# We use our own extension of the default style with a few modifications
|
||||
from pygments.styles.default import DefaultStyle
|
||||
from pygments.token import Generic
|
||||
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
background_color = "#f4f4f8"
|
||||
styles[Generic.Output] = "#355"
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
@@ -48,10 +48,14 @@ Here is an example where a build cache is created in a local directory named
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache push ./spack-cache ninja
|
||||
$ spack buildcache push --allow-root ./spack-cache ninja
|
||||
==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache
|
||||
|
||||
Note that ``ninja`` must be installed locally for this to work.
|
||||
Not that ``ninja`` must be installed locally for this to work.
|
||||
|
||||
We're using the ``--allow-root`` flag to tell Spack that is OK when any of
|
||||
the binaries we're pushing contain references to the local Spack install
|
||||
directory.
|
||||
|
||||
Once you have a build cache, you can add it as a mirror, discussed next.
|
||||
|
||||
@@ -143,7 +147,7 @@ and then install from it exclusively, you would do:
|
||||
|
||||
$ spack mirror add E4S https://cache.e4s.io
|
||||
$ spack buildcache keys --install --trust
|
||||
$ spack install --use-buildcache only <package>
|
||||
$ spack install --use-buildache only <package>
|
||||
|
||||
We use ``--install`` and ``--trust`` to say that we are installing keys to our
|
||||
keyring, and trusting all downloaded keys.
|
||||
|
@@ -32,14 +32,9 @@ can't be found. You can readily check if any prerequisite for using Spack is mis
|
||||
|
||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||
|
||||
% echo $?
|
||||
1
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. The return code of this command summarizes the results, if any
|
||||
dependencies are missing the return code is ``1``, otherwise ``0``. Running a command that
|
||||
concretizes a spec, like:
|
||||
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -49,7 +44,7 @@ concretizes a spec, like:
|
||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||
[ ... ]
|
||||
|
||||
automatically triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
Users can also bootstrap all the dependencies needed by Spack in a single command, which
|
||||
might be useful to setup containers or other similar environments:
|
||||
|
@@ -76,55 +76,6 @@ To build with with ``icx``, do ::
|
||||
|
||||
spack install patchelf%oneapi
|
||||
|
||||
|
||||
Using oneAPI Spack environment
|
||||
-------------------------------
|
||||
|
||||
In this example, we build lammps with ``icx`` using Spack environment for oneAPI packages created by Intel. The
|
||||
compilers are installed with Spack like in example above.
|
||||
|
||||
Install the oneAPI compilers::
|
||||
|
||||
spack install intel-oneapi-compilers
|
||||
|
||||
Add the compilers to your ``compilers.yaml`` so Spack can use them::
|
||||
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
|
||||
|
||||
Verify that the compilers are available::
|
||||
|
||||
spack compiler list
|
||||
|
||||
Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activate Intel oneAPI CPU environment::
|
||||
|
||||
git clone https://github.com/spack/spack-configs
|
||||
spack env activate spack-configs/INTEL/CPU
|
||||
spack concretize -f
|
||||
|
||||
`Intel oneAPI CPU environment <https://github.com/spack/spack-configs/blob/main/INTEL/CPU/spack.yaml>`_ contains applications tested and validated by Intel, this list is constantly extended. And currently it supports:
|
||||
|
||||
- `Devito <https://www.devitoproject.org/>`_
|
||||
- `GROMACS <https://www.gromacs.org/>`_
|
||||
- `HPCG <https://www.hpcg-benchmark.org/>`_
|
||||
- `HPL <https://netlib.org/benchmark/hpl/>`_
|
||||
- `LAMMPS <https://www.lammps.org/#gsc.tab=0>`_
|
||||
- `OpenFOAM <https://www.openfoam.com/>`_
|
||||
- `Quantum Espresso <https://www.quantum-espresso.org/>`_
|
||||
- `STREAM <https://www.cs.virginia.edu/stream/>`_
|
||||
- `WRF <https://github.com/wrf-model/WRF>`_
|
||||
|
||||
To build lammps with oneAPI compiler from this environment just run::
|
||||
|
||||
spack install lammps
|
||||
|
||||
Compiled binaries can be find using::
|
||||
|
||||
spack cd -i lammps
|
||||
|
||||
You can do the same for all other applications from this environment.
|
||||
|
||||
|
||||
Using oneAPI MPI to Satisfy a Virtual Dependence
|
||||
------------------------------------------------------
|
||||
|
||||
|
@@ -32,7 +32,7 @@ By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ sip-build --verbose --target-dir ...
|
||||
$ python configure.py --bindir ... --destdir ...
|
||||
$ make
|
||||
$ make install
|
||||
|
||||
@@ -41,30 +41,30 @@ By default, these phases run:
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Each SIP package comes with a custom configuration file written in Python.
|
||||
For newer packages, this is called ``project.py``, while in older packages,
|
||||
it may be called ``configure.py``. This script contains instructions to build
|
||||
the project.
|
||||
Each SIP package comes with a custom ``configure.py`` build script,
|
||||
written in Python. This script contains instructions to build the project.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
``SIPPackage`` requires several dependencies. Python and SIP are needed at build-time
|
||||
to run the aforementioned configure script. Python is also needed at run-time to
|
||||
actually use the installed Python library. And as we are building Python bindings
|
||||
for C/C++ libraries, Python is also needed as a link dependency. All of these
|
||||
dependencies are automatically added via the base class.
|
||||
``SIPPackage`` requires several dependencies. Python is needed to run
|
||||
the ``configure.py`` build script, and to run the resulting Python
|
||||
libraries. Qt is needed to provide the ``qmake`` command. SIP is also
|
||||
needed to build the package. All of these dependencies are automatically
|
||||
added via the base class
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends("python", type=("build", "link", "run"))
|
||||
depends_on("py-sip", type="build")
|
||||
extends('python')
|
||||
|
||||
depends_on('qt', type='build')
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to ``sip-build``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
depends_on('py-sip', type='build')
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to ``configure.py``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Each phase comes with a ``<phase_args>`` function that can be used to pass
|
||||
arguments to that particular phase. For example, if you need to pass
|
||||
@@ -72,11 +72,11 @@ arguments to the configure phase, you can use:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
return ["--no-python-dbus"]
|
||||
def configure_args(self, spec, prefix):
|
||||
return ['--no-python-dbus']
|
||||
|
||||
|
||||
A list of valid options can be found by running ``sip-build --help``.
|
||||
A list of valid options can be found by running ``python configure.py --help``.
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
|
@@ -97,7 +97,9 @@ class PatchedPythonDomain(PythonDomain):
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
if "refspecific" in node:
|
||||
del node["refspecific"]
|
||||
return super().resolve_xref(env, fromdocname, builder, typ, target, node, contnode)
|
||||
return super(PatchedPythonDomain, self).resolve_xref(
|
||||
env, fromdocname, builder, typ, target, node, contnode
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
@@ -147,6 +149,7 @@ def setup(sphinx):
|
||||
# Get nice vector graphics
|
||||
graphviz_output_format = "svg"
|
||||
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["_templates"]
|
||||
|
||||
@@ -214,7 +217,6 @@ def setup(sphinx):
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
]
|
||||
|
||||
@@ -231,8 +233,30 @@ def setup(sphinx):
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
# show_authors = False
|
||||
sys.path.append("./_pygments")
|
||||
pygments_style = "style.SpackStyle"
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
# We use our own extension of the default style with a few modifications
|
||||
from pygments.style import Style
|
||||
from pygments.styles.default import DefaultStyle
|
||||
from pygments.token import Comment, Generic, Text
|
||||
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
background_color = "#f4f4f8"
|
||||
styles[Generic.Output] = "#355"
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
||||
|
||||
|
||||
import pkg_resources
|
||||
|
||||
dist = pkg_resources.Distribution(__file__)
|
||||
sys.path.append(".") # make 'conf' module findable
|
||||
ep = pkg_resources.EntryPoint.parse("spack = conf:SpackStyle", dist=dist)
|
||||
dist._ep_map = {"pygments.styles": {"plugin1": ep}}
|
||||
pkg_resources.working_set.add(dist)
|
||||
|
||||
pygments_style = "spack"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
@@ -317,15 +341,16 @@ def setup(sphinx):
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = "Spackdoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
# 'papersize': 'letterpaper',
|
||||
#'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
# 'pointsize': '10pt',
|
||||
#'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
# 'preamble': '',
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
|
@@ -292,13 +292,12 @@ It is also worth noting that:
|
||||
non_bindable_shared_objects = ["libinterface.so"]
|
||||
|
||||
----------------------
|
||||
``install_status``
|
||||
``terminal_title``
|
||||
----------------------
|
||||
|
||||
When set to ``true``, Spack will show information about its current progress
|
||||
as well as the current and total package numbers. Progress is shown both
|
||||
in the terminal title and inline. Setting it to ``false`` will not show any
|
||||
progress information.
|
||||
By setting this option to ``true``, Spack will update the terminal's title to
|
||||
provide information about its current progress as well as the current and
|
||||
total package numbers.
|
||||
|
||||
To work properly, this requires your terminal to reset its title after
|
||||
Spack has finished its work, otherwise Spack's status information will
|
||||
|
@@ -636,7 +636,7 @@ to customize the generation of container recipes:
|
||||
- No
|
||||
* - ``os_packages:command``
|
||||
- Tool used to manage system packages
|
||||
- ``apt``, ``yum``, ``dnf``, ``dnf_epel``, ``zypper``, ``apk``, ``yum_amazon``
|
||||
- ``apt``, ``yum``, ``zypper``, ``apk``, ``yum_amazon``
|
||||
- Only with custom base images
|
||||
* - ``os_packages:update``
|
||||
- Whether or not to update the list of available packages
|
||||
|
@@ -916,9 +916,9 @@ function, as shown in the example below:
|
||||
.. code-block:: yaml
|
||||
|
||||
projections:
|
||||
zlib: "{name}-{version}"
|
||||
^mpi: "{name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}"
|
||||
all: "{name}-{version}/{compiler.name}-{compiler.version}"
|
||||
zlib: {name}-{version}
|
||||
^mpi: {name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}
|
||||
all: {name}-{version}/{compiler.name}-{compiler.version}
|
||||
|
||||
The entries in the projections configuration file must all be either
|
||||
specs or the keyword ``all``. For each spec, the projection used will
|
||||
@@ -1132,11 +1132,11 @@ index once every package is pushed. Note how this target uses the generated
|
||||
example/push/%: example/install/%
|
||||
@mkdir -p $(dir $@)
|
||||
$(info About to push $(SPEC) to a buildcache)
|
||||
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
|
||||
@touch $@
|
||||
|
||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||
$(info Updating the buildcache index)
|
||||
$(SPACK) -e . buildcache update-index $(BUILDCACHE_DIR)
|
||||
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
|
||||
$(info Done!)
|
||||
@touch $@
|
||||
|
@@ -76,7 +76,6 @@ or refer to the full manual below.
|
||||
chain
|
||||
extensions
|
||||
pipelines
|
||||
signing
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
@@ -275,12 +275,10 @@ of the installed software. For instance, in the snippet below:
|
||||
set:
|
||||
BAR: 'bar'
|
||||
# This anonymous spec selects any package that
|
||||
# depends on mpi. The double colon at the
|
||||
# depends on openmpi. The double colon at the
|
||||
# end clears the set of rules that matched so far.
|
||||
^mpi::
|
||||
^openmpi::
|
||||
environment:
|
||||
prepend_path:
|
||||
PATH: '{^mpi.prefix}/bin'
|
||||
set:
|
||||
BAR: 'baz'
|
||||
# Selects any zlib package
|
||||
@@ -295,9 +293,7 @@ of the installed software. For instance, in the snippet below:
|
||||
- FOOBAR
|
||||
|
||||
you are instructing Spack to set the environment variable ``BAR=bar`` for every module,
|
||||
unless the associated spec satisfies the abstract dependency ``^mpi`` in which case
|
||||
``BAR=baz``, and the directory containing the respective MPI executables is prepended
|
||||
to the ``PATH`` variable.
|
||||
unless the associated spec satisfies ``^openmpi`` in which case ``BAR=baz``.
|
||||
In addition in any spec that satisfies ``zlib`` the value ``foo`` will be
|
||||
prepended to ``LD_LIBRARY_PATH`` and in any spec that satisfies ``zlib%gcc@4.8``
|
||||
the variable ``FOOBAR`` will be unset.
|
||||
@@ -400,30 +396,28 @@ that are already in the Lmod hierarchy.
|
||||
|
||||
|
||||
.. note::
|
||||
Tcl and Lua modules also allow for explicit conflicts between modulefiles.
|
||||
Tcl modules
|
||||
Tcl modules also allow for explicit conflicts between modulefiles.
|
||||
|
||||
.. code-block:: yaml
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
default:
|
||||
enable:
|
||||
- tcl
|
||||
tcl:
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}-{compiler.version}'
|
||||
all:
|
||||
conflict:
|
||||
- '{name}'
|
||||
- 'intel/14.0.1'
|
||||
modules:
|
||||
default:
|
||||
enable:
|
||||
- tcl
|
||||
tcl:
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}-{compiler.version}'
|
||||
all:
|
||||
conflict:
|
||||
- '{name}'
|
||||
- 'intel/14.0.1'
|
||||
|
||||
will create module files that will conflict with ``intel/14.0.1`` and with the
|
||||
base directory of the same module, effectively preventing the possibility to
|
||||
load two or more versions of the same software at the same time. The tokens
|
||||
that are available for use in this directive are the same understood by the
|
||||
:meth:`~spack.spec.Spec.format` method.
|
||||
|
||||
For Lmod and Environment Modules versions prior 4.2, it is important to
|
||||
express the conflict on both modulefiles conflicting with each other.
|
||||
will create module files that will conflict with ``intel/14.0.1`` and with the
|
||||
base directory of the same module, effectively preventing the possibility to
|
||||
load two or more versions of the same software at the same time. The tokens
|
||||
that are available for use in this directive are the same understood by
|
||||
the :meth:`~spack.spec.Spec.format` method.
|
||||
|
||||
|
||||
.. note::
|
||||
|
@@ -121,7 +121,7 @@ Since v0.19, Spack supports two ways of writing a package recipe. The most comm
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version >= Version("2.1.1"):
|
||||
return super().url_for_version(version)
|
||||
return super(Openjpeg, self).url_for_version(version)
|
||||
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
||||
return url_fmt.format(version)
|
||||
|
||||
@@ -155,7 +155,7 @@ builder class explicitly. Using the same example as above, this reads:
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version >= Version("2.1.1"):
|
||||
return super().url_for_version(version)
|
||||
return super(Openjpeg, self).url_for_version(version)
|
||||
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
||||
return url_fmt.format(version)
|
||||
|
||||
@@ -2243,7 +2243,7 @@ looks like this:
|
||||
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
|
||||
|
||||
version("1.0.1h", md5="8d6d684a9430d5cc98a62a5d8fbda8cf")
|
||||
depends_on("zlib-api")
|
||||
depends_on("zlib")
|
||||
|
||||
parallel = False
|
||||
|
||||
@@ -3071,7 +3071,7 @@ follows:
|
||||
# The library provided by the bar virtual package
|
||||
@property
|
||||
def bar_libs(self):
|
||||
return find_libraries("libFooBar", root=self.home, recursive=True)
|
||||
return find_libraries("libFooBar", root=sef.home, recursive=True)
|
||||
|
||||
# The baz virtual package home
|
||||
@property
|
||||
|
@@ -1,13 +1,13 @@
|
||||
sphinx==6.2.1
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.2.2
|
||||
python-levenshtein==0.21.1
|
||||
docutils==0.18.1
|
||||
pygments==2.16.1
|
||||
urllib3==2.0.4
|
||||
pytest==7.4.0
|
||||
isort==5.12.0
|
||||
black==23.7.0
|
||||
flake8==6.1.0
|
||||
mypy==1.5.0
|
||||
# These dependencies should be installed using pip in order
|
||||
# to build the documentation.
|
||||
|
||||
sphinx>=3.4,!=4.1.2,!=5.1.0
|
||||
sphinxcontrib-programoutput
|
||||
sphinx-design
|
||||
sphinx-rtd-theme
|
||||
python-levenshtein
|
||||
# Restrict to docutils <0.17 to workaround a list rendering issue in sphinx.
|
||||
# https://stackoverflow.com/questions/67542699
|
||||
docutils <0.17
|
||||
pygments <2.13
|
||||
urllib3 <2
|
||||
|
@@ -1,478 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _signing:
|
||||
|
||||
=====================
|
||||
Spack Package Signing
|
||||
=====================
|
||||
|
||||
The goal of package signing in Spack is to provide data integrity
|
||||
assurances around official packages produced by the automated Spack CI
|
||||
pipelines. These assurances directly address the security of Spack’s
|
||||
software supply chain by explaining why a security-conscious user can
|
||||
be reasonably justified in the belief that packages installed via Spack
|
||||
have an uninterrupted auditable trail back to change management
|
||||
decisions judged to be appropriate by the Spack maintainers. This is
|
||||
achieved through cryptographic signing of packages built by Spack CI
|
||||
pipelines based on code that has been transparently reviewed and
|
||||
approved on GitHub. This document describes the signing process for
|
||||
interested users.
|
||||
|
||||
.. _risks:
|
||||
|
||||
------------------------------
|
||||
Risks, Impact and Threat Model
|
||||
------------------------------
|
||||
|
||||
This document addresses the approach taken to safeguard Spack’s
|
||||
reputation with regard to the integrity of the package data produced by
|
||||
Spack’s CI pipelines. It does not address issues of data confidentiality
|
||||
(Spack is intended to be largely open source) or availability (efforts
|
||||
are described elsewhere). With that said the main reputational risk can
|
||||
be broadly categorized as a loss of faith in the data integrity due to a
|
||||
breach of the private key used to sign packages. Remediation of a
|
||||
private key breach would require republishing the public key with a
|
||||
revocation certificate, generating a new signing key, an assessment and
|
||||
potential rebuild/resigning of all packages since the key was breached,
|
||||
and finally direct intervention by every spack user to update their copy
|
||||
of Spack’s public keys used for local verification.
|
||||
|
||||
The primary threat model used in mitigating the risks of these stated
|
||||
impacts is one of individual error not malicious intent or insider
|
||||
threat. The primary objective is to avoid the above impacts by making a
|
||||
private key breach nearly impossible due to oversight or configuration
|
||||
error. Obvious and straightforward measures are taken to mitigate issues
|
||||
of malicious interference in data integrity and insider threats but
|
||||
these attack vectors are not systematically addressed. It should be hard
|
||||
to exfiltrate the private key intentionally, and almost impossible to
|
||||
leak the key by accident.
|
||||
|
||||
.. _overview:
|
||||
|
||||
-----------------
|
||||
Pipeline Overview
|
||||
-----------------
|
||||
|
||||
Spack pipelines build software through progressive stages where packages
|
||||
in later stages nominally depend on packages built in earlier stages.
|
||||
For both technical and design reasons these dependencies are not
|
||||
implemented through the default GitLab artifacts mechanism; instead
|
||||
built packages are uploaded to AWS S3 mirrors (buckets) where they are
|
||||
retrieved by subsequent stages in the pipeline. Two broad categories of
|
||||
pipelines exist: Pull Request (PR) pipelines and Develop/Release
|
||||
pipelines.
|
||||
|
||||
- PR pipelines are launched in response to pull requests made by
|
||||
trusted and untrusted users. Packages built on these pipelines upload
|
||||
code to quarantined AWS S3 locations which cache the built packages
|
||||
for the purposes of review and iteration on the changes proposed in
|
||||
the pull request. Packages built on PR pipelines can come from
|
||||
untrusted users so signing of these pipelines is not implemented.
|
||||
Jobs in these pipelines are executed via normal GitLab runners both
|
||||
within the AWS GitLab infrastructure and at affiliated institutions.
|
||||
- Develop and Release pipelines **sign** the packages they produce and carry
|
||||
strong integrity assurances that trace back to auditable change management
|
||||
decisions. These pipelines only run after members from a trusted group of
|
||||
reviewers verify that the proposed changes in a pull request are appropriate.
|
||||
Once the PR is merged, or a release is cut, a pipeline is run on protected
|
||||
GitLab runners which provide access to the required signing keys within the
|
||||
job. Intermediary keys are used to sign packages in each stage of the
|
||||
pipeline as they are built and a final job officially signs each package
|
||||
external to any specific packages’ build environment. An intermediate key
|
||||
exists in the AWS infrastructure and for each affiliated instritution that
|
||||
maintains protected runners. The runners that execute these pipelines
|
||||
exclusively accept jobs from protected branches meaning the intermediate keys
|
||||
are never exposed to unreviewed code and the official keys are never exposed
|
||||
to any specific build environment.
|
||||
|
||||
.. _key_architecture:
|
||||
|
||||
----------------
|
||||
Key Architecture
|
||||
----------------
|
||||
|
||||
Spack’s CI process uses public-key infrastructure (PKI) based on GNU Privacy
|
||||
Guard (gpg) keypairs to sign public releases of spack package metadata, also
|
||||
called specs. Two classes of GPG keys are involved in the process to reduce the
|
||||
impact of an individual private key compromise, these key classes are the
|
||||
*Intermediate CI Key* and *Reputational Key*. Each of these keys has signing
|
||||
sub-keys that are used exclusively for signing packages. This can be confusing
|
||||
so for the purpose of this explanation we’ll refer to Root and Signing keys.
|
||||
Each key has a private and a public component as well as one or more identities
|
||||
and zero or more signatures.
|
||||
|
||||
-------------------
|
||||
Intermediate CI Key
|
||||
-------------------
|
||||
|
||||
The Intermediate key class is used to sign and verify packages between stages
|
||||
within a develop or release pipeline. An intermediate key exists for the AWS
|
||||
infrastructure as well as each affiliated institution that maintains protected
|
||||
runners. These intermediate keys are made available to the GitLab execution
|
||||
environment building the package so that the package’s dependencies may be
|
||||
verified by the Signing Intermediate CI Public Key and the final package may be
|
||||
signed by the Signing Intermediate CI Private Key.
|
||||
|
||||
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| **Intermediate CI Key (GPG)** |
|
||||
+==================================================+======================================================+
|
||||
| Root Intermediate CI Private Key (RSA 4096)# | Root Intermediate CI Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Signing Intermediate CI Private Key (RSA 4096) | Signing Intermediate CI Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Identity: “Intermediate CI Key <maintainers@spack.io>” |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| Signatures: None |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
|
||||
|
||||
The *Root intermediate CI Private Key*\ Is stripped out of the GPG key and
|
||||
stored offline completely separate from Spack’s infrastructure. This allows the
|
||||
core development team to append revocation certificates to the GPG key and
|
||||
issue new sub-keys for use in the pipeline. It is our expectation that this
|
||||
will happen on a semi regular basis. A corollary of this is that *this key
|
||||
should not be used to verify package integrity outside the internal CI process.*
|
||||
|
||||
----------------
|
||||
Reputational Key
|
||||
----------------
|
||||
|
||||
The Reputational Key is the public facing key used to sign complete groups of
|
||||
development and release packages. Only one key pair exsits in this class of
|
||||
keys. In contrast to the Intermediate CI Key the Reputational Key *should* be
|
||||
used to verify package integrity. At the end of develop and release pipeline a
|
||||
final pipeline job pulls down all signed package metadata built by the pipeline,
|
||||
verifies they were signed with an Intermediate CI Key, then strips the
|
||||
Intermediate CI Key signature from the package and re-signs them with the
|
||||
Signing Reputational Private Key. The officially signed packages are then
|
||||
uploaded back to the AWS S3 mirror. Please note that separating use of the
|
||||
reputational key into this final job is done to prevent leakage of the key in a
|
||||
spack package. Because the Signing Reputational Private Key is never exposed to
|
||||
a build job it cannot accidentally end up in any built package.
|
||||
|
||||
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| **Reputational Key (GPG)** |
|
||||
+==================================================+======================================================+
|
||||
| Root Reputational Private Key (RSA 4096)# | Root Reputational Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Signing Reputational Private Key (RSA 4096) | Signing Reputational Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Identity: “Spack Project <maintainers@spack.io>” |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| Signatures: Signed by core development team [#f1]_ |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
|
||||
The Root Reputational Private Key is stripped out of the GPG key and stored
|
||||
offline completely separate from Spack’s infrastructure. This allows the core
|
||||
development team to append revocation certificates to the GPG key in the
|
||||
unlikely event that the Signing Reputation Private Key is compromised. In
|
||||
general it is the expectation that rotating this key will happen infrequently if
|
||||
at all. This should allow relatively transparent verification for the end-user
|
||||
community without needing deep familiarity with GnuPG or Public Key
|
||||
Infrastructure.
|
||||
|
||||
|
||||
.. _build_cache_format:
|
||||
|
||||
------------------
|
||||
Build Cache Format
|
||||
------------------
|
||||
|
||||
A binary package consists of a metadata file unambiguously defining the
|
||||
built package (and including other details such as how to relocate it)
|
||||
and the installation directory of the package stored as a compressed
|
||||
archive file. The metadata files can either be unsigned, in which case
|
||||
the contents are simply the json-serialized concrete spec plus metadata,
|
||||
or they can be signed, in which case the json-serialized concrete spec
|
||||
plus metadata is wrapped in a gpg cleartext signature. Built package
|
||||
metadata files are named to indicate the operating system and
|
||||
architecture for which the package was built as well as the compiler
|
||||
used to build it and the packages name and version. For example::
|
||||
|
||||
linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
|
||||
would contain the concrete spec and binary metadata for a binary package
|
||||
of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell``
|
||||
architecture. The id of the built package exists in the name of the file
|
||||
as well (after the package name and version) and in this case begins
|
||||
with ``llv2ys``. The id distinguishes a particular built package from all
|
||||
other built packages with the same os/arch, compiler, name, and version.
|
||||
Below is an example of a signed binary package metadata file. Such a
|
||||
file would live in the ``build_cache`` directory of a binary mirror::
|
||||
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec": {
|
||||
<concrete-spec-contents-omitted>
|
||||
},
|
||||
|
||||
"buildcache_layout_version": 1,
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
|
||||
}
|
||||
}
|
||||
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc
|
||||
ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn
|
||||
4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+
|
||||
QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b
|
||||
887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6
|
||||
4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH
|
||||
qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7
|
||||
QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi
|
||||
Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx
|
||||
j3DXty57
|
||||
=3gvm
|
||||
-----END PGP SIGNATURE-----
|
||||
|
||||
If a user has trusted the public key associated with the private key
|
||||
used to sign the above spec file, the signature can be verified with
|
||||
gpg, as follows::
|
||||
|
||||
$ gpg –verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
|
||||
The metadata (regardless whether signed or unsigned) contains the checksum
|
||||
of the ``.spack`` file containing the actual installation. The checksum should
|
||||
be compared to a checksum computed locally on the ``.spack`` file to ensure the
|
||||
contents have not changed since the binary spec plus metadata were signed. The
|
||||
``.spack`` files are actually tarballs containing the compressed archive of the
|
||||
install tree. These files, along with the metadata files, live within the
|
||||
``build_cache`` directory of the mirror, and together are organized as follows::
|
||||
|
||||
build_cache/
|
||||
# unsigned metadata (for indexing, contains sha256 of .spack file)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
|
||||
# clearsigned metadata (same as above, but signed)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig
|
||||
<arch>/
|
||||
<compiler>/
|
||||
<name>-<ver>/
|
||||
# tar.gz-compressed prefix (may support more compression formats later)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
|
||||
|
||||
Uncompressing and extracting the ``.spack`` file results in the install tree.
|
||||
This is in contrast to previous versions of spack, where the ``.spack`` file
|
||||
contained a (duplicated) metadata file, a signature file and a nested tarball
|
||||
containing the install tree.
|
||||
|
||||
.. _internal_implementation:
|
||||
|
||||
-----------------------
|
||||
Internal Implementation
|
||||
-----------------------
|
||||
|
||||
The technical implementation of the pipeline signing process includes components
|
||||
defined in Amazon Web Services, the Kubernetes cluster, at affilicated
|
||||
institutions, and the GitLab/GitLab Runner deployment. We present the techincal
|
||||
implementation in two interdependent sections. The first addresses how secrets
|
||||
are managed through the lifecycle of a develop or release pipeline. The second
|
||||
section describes how Gitlab Runner and pipelines are configured and managed to
|
||||
support secure automated signing.
|
||||
|
||||
Secrets Management
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
As stated above the Root Private Keys (intermediate and reputational)
|
||||
are stripped from the GPG keys and stored outside Spack’s
|
||||
infrastructure.
|
||||
|
||||
.. warning::
|
||||
**TODO**
|
||||
- Explanation here about where and how access is handled for these keys.
|
||||
- Both Root private keys are protected with strong passwords
|
||||
- Who has access to these and how?
|
||||
|
||||
**Intermediate CI Key**
|
||||
-----------------------
|
||||
|
||||
Multiple intermediate CI signing keys exist, one Intermediate CI Key for jobs
|
||||
run in AWS, and one key for each affiliated institution (e.g. Univerity of
|
||||
Oregon). Here we describe how the Intermediate CI Key is managed in AWS:
|
||||
|
||||
The Intermediate CI Key (including the Signing Intermediate CI Private Key is
|
||||
exported as an ASCII armored file and stored in a Kubernetes secret called
|
||||
``spack-intermediate-ci-signing-key``. For convenience sake, this same secret
|
||||
contains an ASCII-armored export of just the *public* components of the
|
||||
Reputational Key. This secret also contains the *public* components of each of
|
||||
the affiliated institutions' Intermediate CI Key. These are potentially needed
|
||||
to verify dependent packages which may have been found in the public mirror or
|
||||
built by a protected job running on an affiliated institution's infrastrcuture
|
||||
in an earlier stage of the pipeline.
|
||||
|
||||
Procedurally the ``spack-intermediate-ci-signing-key`` secret is used in
|
||||
the following way:
|
||||
|
||||
1. A ``large-arm-prot`` or ``large-x86-prot`` protected runner picks up
|
||||
a job tagged ``protected`` from a protected GitLab branch. (See
|
||||
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
||||
2. Based on its configuration, the runner creates a job Pod in the
|
||||
pipeline namespace and mounts the spack-intermediate-ci-signing-key
|
||||
Kubernetes secret into the build container
|
||||
3. The Intermediate CI Key, affiliated institutions' public key and the
|
||||
Reputational Public Key are imported into a keyring by the ``spack gpg …``
|
||||
sub-command. This is initiated by the job’s build script which is created by
|
||||
the generate job at the beginning of the pipeline.
|
||||
4. Assuming the package has dependencies those specs are verified using
|
||||
the keyring.
|
||||
5. The package is built and the spec.json is generated
|
||||
6. The spec.json is signed by the keyring and uploaded to the mirror’s
|
||||
build cache.
|
||||
|
||||
**Reputational Key**
|
||||
--------------------
|
||||
|
||||
Because of the increased impact to end users in the case of a private
|
||||
key breach, the Reputational Key is managed separately from the
|
||||
Intermediate CI Keys and has additional controls. First, the Reputational
|
||||
Key was generated outside of Spack’s infrastructure and has been signed
|
||||
by the core development team. The Reputational Key (along with the
|
||||
Signing Reputational Private Key) was then ASCII armor exported to a
|
||||
file. Unlike the Intermediate CI Key this exported file is not stored as
|
||||
a base64 encoded secret in Kubernetes. Instead\ *the key file
|
||||
itself*\ is encrypted and stored in Kubernetes as the
|
||||
``spack-signing-key-encrypted`` secret in the pipeline namespace.
|
||||
|
||||
The encryption of the exported Reputational Key (including the Signing
|
||||
Reputational Private Key) is handled by `AWS Key Management Store (KMS) data
|
||||
keys
|
||||
<https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-keys>`__.
|
||||
The private key material is decrypted and imported at the time of signing into a
|
||||
memory mounted temporary directory holding the keychain. The signing job uses
|
||||
the `AWS Encryption SDK
|
||||
<https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli.html>`__
|
||||
(i.e. ``aws-encryption-cli``) to decrypt the Reputational Key. Permission to
|
||||
decrypt the key is granted to the job Pod through a Kubernetes service account
|
||||
specifically used for this, and only this, function. Finally, for convenience
|
||||
sake, this same secret contains an ASCII-armored export of the *public*
|
||||
components of the Intermediate CI Keys and the Reputational Key. This allows the
|
||||
signing script to verify that packages were built by the pipeline (both on AWS
|
||||
or at affiliated institutions), or signed previously as a part of a different
|
||||
pipeline. This is is done *before* importing decrypting and importing the
|
||||
Signing Reputational Private Key material and officially signing the packages.
|
||||
|
||||
Procedurally the ``spack-singing-key-encrypted`` secret is used in the
|
||||
following way:
|
||||
|
||||
1. The ``spack-package-signing-gitlab-runner`` protected runner picks
|
||||
up a job tagged ``notary`` from a protected GitLab branch (See
|
||||
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
||||
2. Based on its configuration, the runner creates a job pod in the
|
||||
pipeline namespace. The job is run in a stripped down purpose-built
|
||||
image ``ghcr.io/spack/notary:latest`` Docker image. The runner is
|
||||
configured to only allow running jobs with this image.
|
||||
3. The runner also mounts the ``spack-signing-key-encrypted`` secret to
|
||||
a path on disk. Note that this becomes several files on disk, the
|
||||
public components of the Intermediate CI Keys, the public components
|
||||
of the Reputational CI, and an AWS KMS encrypted file containing the
|
||||
Singing Reputational Private Key.
|
||||
4. In addition to the secret, the runner creates a tmpfs memory mounted
|
||||
directory where the GnuPG keyring will be created to verify, and
|
||||
then resign the package specs.
|
||||
5. The job script syncs all spec.json.sig files from the build cache to
|
||||
a working directory in the job’s execution environment.
|
||||
6. The job script then runs the ``sign.sh`` script built into the
|
||||
notary Docker image.
|
||||
7. The ``sign.sh`` script imports the public components of the
|
||||
Reputational and Intermediate CI Keys and uses them to verify good
|
||||
signatures on the spec.json.sig files. If any signed spec does not
|
||||
verify the job immediately fails.
|
||||
8. Assuming all specs are verified, the ``sign.sh`` script then unpacks
|
||||
the spec json data from the signed file in preparation for being
|
||||
re-signed with the Reputational Key.
|
||||
9. The private components of the Reputational Key are decrypted to
|
||||
standard out using ``aws-encryption-cli`` directly into a ``gpg
|
||||
–import …`` statement which imports the key into the
|
||||
keyring mounted in-memory.
|
||||
10. The private key is then used to sign each of the json specs and the
|
||||
keyring is removed from disk.
|
||||
11. The re-signed json specs are resynced to the AWS S3 Mirror and the
|
||||
public signing of the packages for the develop or release pipeline
|
||||
that created them is complete.
|
||||
|
||||
Non service-account access to the private components of the Reputational
|
||||
Key that are managed through access to the symmetric secret in KMS used
|
||||
to encrypt the data key (which in turn is used to encrypt the GnuPG key
|
||||
- See:\ `Encryption SDK
|
||||
Documentation <https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli-examples.html#cli-example-encrypt-file>`__).
|
||||
A small trusted subset of the core development team are the only
|
||||
individuals with access to this symmetric key.
|
||||
|
||||
.. _protected_runners:
|
||||
|
||||
Protected Runners and Reserved Tags
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack has a large number of Gitlab Runners operating in its build farm.
|
||||
These include runners deployed in the AWS Kubernetes cluster as well as
|
||||
runners deployed at affiliated institutions. The majority of runners are
|
||||
shared runners that operate across projects in gitlab.spack.io. These
|
||||
runners pick up jobs primarily from the spack/spack project and execute
|
||||
them in PR pipelines.
|
||||
|
||||
A small number of runners operating on AWS and at affiliated institutions are
|
||||
registered as specific *protected* runners on the spack/spack project. In
|
||||
addition to protected runners there are protected branches on the spack/spack
|
||||
project. These are the ``develop`` branch, any release branch (i.e. managed with
|
||||
the ``releases/v*`` wildcard) and any tag branch (managed with the ``v*``
|
||||
wildcard) Finally Spack’s pipeline generation code reserves certain tags to make
|
||||
sure jobs are routed to the correct runners, these tags are ``public``,
|
||||
``protected``, and ``notary``. Understanding how all this works together to
|
||||
protect secrets and provide integrity assurances can be a little confusing so
|
||||
lets break these down:
|
||||
|
||||
- **Protected Branches**- Protected branches in Spack prevent anyone
|
||||
other than Maintainers in GitLab from pushing code. In the case of
|
||||
Spack the only Maintainer level entity pushing code to protected
|
||||
branches is Spack bot. Protecting branches also marks them in such a
|
||||
way that Protected Runners will only run jobs from those branches
|
||||
- **Protected Runners**- Protected Runners only run jobs from protected
|
||||
branches. Because protected runners have access to secrets, it's critical
|
||||
that they not run Jobs from untrusted code (i.e. PR branches). If they did it
|
||||
would be possible for a PR branch to tag a job in such a way that a protected
|
||||
runner executed that job and mounted secrets into a code execution
|
||||
environment that had not been reviewed by Spack maintainers. Note however
|
||||
that in the absence of tagging used to route jobs, public runners *could* run
|
||||
jobs from protected branches. No secrets would be at risk of being breached
|
||||
because non-protected runners do not have access to those secrets; lack of
|
||||
secrets would, however, cause the jobs to fail.
|
||||
- **Reserved Tags**- To mitigate the issue of public runners picking up
|
||||
protected jobs Spack uses a small set of “reserved” job tags (Note that these
|
||||
are *job* tags not git tags). These tags are “public”, “private”, and
|
||||
“notary.” The majority of jobs executed in Spack’s GitLab instance are
|
||||
executed via a ``generate`` job. The generate job code systematically ensures
|
||||
that no user defined configuration sets these tags. Instead, the ``generate``
|
||||
job sets these tags based on rules related to the branch where this pipeline
|
||||
originated. If the job is a part of a pipeline on a PR branch it sets the
|
||||
``public`` tag. If the job is part of a pipeline on a protected branch it
|
||||
sets the ``protected`` tag. Finally if the job is the package signing job and
|
||||
it is running on a pipeline that is part of a protected branch then it sets
|
||||
the ``notary`` tag.
|
||||
|
||||
Protected Runners are configured to only run jobs from protected branches. Only
|
||||
jobs running in pipelines on protected branches are tagged with ``protected`` or
|
||||
``notary`` tags. This tightly couples jobs on protected branches to protected
|
||||
runners that provide access to the secrets required to sign the built packages.
|
||||
The secrets are can **only** be accessed via:
|
||||
|
||||
1. Runners under direct control of the core development team.
|
||||
2. Runners under direct control of trusted maintainers at affiliated institutions.
|
||||
3. By code running the automated pipeline that has been reviewed by the
|
||||
Spack maintainers and judged to be appropriate.
|
||||
|
||||
Other attempts (either through malicious intent or incompetence) can at
|
||||
worst grab jobs intended for protected runners which will cause those
|
||||
jobs to fail alerting both Spack maintainers and the core development
|
||||
team.
|
||||
|
||||
.. [#f1]
|
||||
The Reputational Key has also cross signed core development team
|
||||
keys.
|
428
lib/spack/env/cc
vendored
428
lib/spack/env/cc
vendored
@@ -416,14 +416,30 @@ input_command="$*"
|
||||
# The lists are all bell-separated to be as flexible as possible, as their
|
||||
# contents may come from the command line, from ' '-separated lists,
|
||||
# ':'-separated lists, etc.
|
||||
include_dirs_list=""
|
||||
lib_dirs_list=""
|
||||
rpath_dirs_list=""
|
||||
system_include_dirs_list=""
|
||||
system_lib_dirs_list=""
|
||||
system_rpath_dirs_list=""
|
||||
isystem_system_include_dirs_list=""
|
||||
isystem_include_dirs_list=""
|
||||
libs_list=""
|
||||
other_args_list=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
|
||||
# Same, but for -Xlinker -rpath -Xlinker /path
|
||||
xlinker_expect_rpath=no
|
||||
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
append system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
append rpath_dirs_list "$1"
|
||||
fi
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
@@ -433,9 +449,9 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
@@ -443,9 +459,9 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
@@ -459,7 +475,7 @@ parse_Wl() {
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "-Wl,$1"
|
||||
append other_args_list "-Wl,$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
@@ -467,210 +483,177 @@ parse_Wl() {
|
||||
done
|
||||
}
|
||||
|
||||
categorize_arguments() {
|
||||
|
||||
unset IFS
|
||||
while [ $# -ne 0 ]; do
|
||||
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
# an RPATH to be added after the case statement.
|
||||
rp=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
# Multiple consecutive spaces in the command line can
|
||||
# result in blank arguments
|
||||
if [ -z "$1" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
# Same, but for -Xlinker -rpath -Xlinker /path
|
||||
xlinker_expect_rpath=no
|
||||
|
||||
while [ $# -ne 0 ]; do
|
||||
|
||||
# an RPATH to be added after the case statement.
|
||||
rp=""
|
||||
|
||||
# Multiple consecutive spaces in the command line can
|
||||
# result in blank arguments
|
||||
if [ -z "$1" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||
eval "\
|
||||
case \"\$1\" in
|
||||
$SPACK_COMPILER_FLAGS_KEEP)
|
||||
append return_other_args_list \"\$1\"
|
||||
shift
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
"
|
||||
fi
|
||||
# the replace list is a space-separated list of pipe-separated pairs,
|
||||
# the first in each pair is the original prefix to be matched, the
|
||||
# second is the replacement prefix
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||
before=${rep%|*}
|
||||
after=${rep#*|}
|
||||
eval "\
|
||||
stripped=\"\${1##$before}\"
|
||||
"
|
||||
if [ "$stripped" = "$1" ] ; then
|
||||
continue
|
||||
fi
|
||||
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||
eval "\
|
||||
case \"\$1\" in
|
||||
$SPACK_COMPILER_FLAGS_KEEP)
|
||||
append other_args_list \"\$1\"
|
||||
shift
|
||||
|
||||
if [ -z "$replaced" ] ; then
|
||||
# completely removed, continue OUTER loop
|
||||
continue 2
|
||||
fi
|
||||
|
||||
# re-build argument list with replacement
|
||||
set -- "$replaced" "$@"
|
||||
done
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_lib_dirs_list "$arg"
|
||||
else
|
||||
append return_lib_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# library. Filter it out.
|
||||
# TODO: generalize filtering of args with an env var, so that
|
||||
# TODO: we do not have to special case this here.
|
||||
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
||||
&& [ "$1" != "${1#-loopopt}" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
arg="${1#-l}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append return_other_args_list "-l$arg"
|
||||
;;
|
||||
-Wl,*)
|
||||
IFS=,
|
||||
if ! parse_Wl ${1#-Wl,}; then
|
||||
append return_other_args_list "$1"
|
||||
fi
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
shift
|
||||
if [ $# -eq 0 ]; then
|
||||
# -Xlinker without value: let the compiler error about it.
|
||||
append return_other_args_list -Xlinker
|
||||
xlinker_expect_rpath=no
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list -Xlinker
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "$1"
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
"
|
||||
fi
|
||||
# the replace list is a space-separated list of pipe-separated pairs,
|
||||
# the first in each pair is the original prefix to be matched, the
|
||||
# second is the replacement prefix
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||
before=${rep%|*}
|
||||
after=${rep#*|}
|
||||
eval "\
|
||||
stripped=\"\${1##$before}\"
|
||||
"
|
||||
if [ "$stripped" = "$1" ] ; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
||||
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
||||
# parsing.
|
||||
if [ "$xlinker_expect_rpath" = yes ]; then
|
||||
append return_other_args_list -Xlinker
|
||||
append return_other_args_list -rpath
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
shift
|
||||
|
||||
if [ -z "$replaced" ] ; then
|
||||
# completely removed, continue OUTER loop
|
||||
continue 2
|
||||
fi
|
||||
|
||||
# re-build argument list with replacement
|
||||
set -- "$replaced" "$@"
|
||||
done
|
||||
fi
|
||||
|
||||
# Same, but for -Wl flags.
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append return_other_args_list -Wl,-rpath
|
||||
fi
|
||||
}
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_include_dirs_list "$arg"
|
||||
else
|
||||
append include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_lib_dirs_list "$arg"
|
||||
else
|
||||
append lib_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# library. Filter it out.
|
||||
# TODO: generalize filtering of args with an env var, so that
|
||||
# TODO: we do not have to special case this here.
|
||||
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
||||
&& [ "$1" != "${1#-loopopt}" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
arg="${1#-l}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append other_args_list "-l$arg"
|
||||
;;
|
||||
-Wl,*)
|
||||
IFS=,
|
||||
if ! parse_Wl ${1#-Wl,}; then
|
||||
append other_args_list "$1"
|
||||
fi
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
shift
|
||||
if [ $# -eq 0 ]; then
|
||||
# -Xlinker without value: let the compiler error about it.
|
||||
append other_args_list -Xlinker
|
||||
xlinker_expect_rpath=no
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append system_rpath_dirs_list "$1"
|
||||
else
|
||||
append rpath_dirs_list "$1"
|
||||
fi
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append other_args_list -Xlinker
|
||||
append other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
categorize_arguments "$@"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
other_args_list="$return_other_args_list"
|
||||
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
||||
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
||||
# parsing.
|
||||
if [ "$xlinker_expect_rpath" = yes ]; then
|
||||
append other_args_list -Xlinker
|
||||
append other_args_list -rpath
|
||||
fi
|
||||
|
||||
# Same, but for -Wl flags.
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append other_args_list -Wl,-rpath
|
||||
fi
|
||||
|
||||
#
|
||||
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
||||
@@ -690,14 +673,12 @@ elif [ "$SPACK_ADD_DEBUG_FLAGS" = "custom" ]; then
|
||||
extend flags_list SPACK_DEBUG_FLAGS
|
||||
fi
|
||||
|
||||
spack_flags_list=""
|
||||
|
||||
# Fortran flags come before CPPFLAGS
|
||||
case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
F)
|
||||
extend spack_flags_list SPACK_FFLAGS
|
||||
extend flags_list SPACK_FFLAGS
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
@@ -706,7 +687,7 @@ esac
|
||||
# C preprocessor flags come before any C/CXX flags
|
||||
case "$mode" in
|
||||
cpp|as|cc|ccld)
|
||||
extend spack_flags_list SPACK_CPPFLAGS
|
||||
extend flags_list SPACK_CPPFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -716,10 +697,10 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
C)
|
||||
extend spack_flags_list SPACK_CFLAGS
|
||||
extend flags_list SPACK_CFLAGS
|
||||
;;
|
||||
CXX)
|
||||
extend spack_flags_list SPACK_CXXFLAGS
|
||||
extend flags_list SPACK_CXXFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -731,25 +712,10 @@ esac
|
||||
# Linker flags
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
extend spack_flags_list SPACK_LDFLAGS
|
||||
extend flags_list SPACK_LDFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
|
||||
# On macOS insert headerpad_max_install_names linker flag
|
||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
||||
@@ -775,8 +741,6 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
extend lib_dirs_list SPACK_LINK_DIRS
|
||||
fi
|
||||
|
||||
libs_list=""
|
||||
|
||||
# add RPATHs if we're in in any linking mode
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
@@ -805,16 +769,12 @@ args_list="$flags_list"
|
||||
|
||||
# Insert include directories just prior to any system include directories
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_flags_include_dirs_list "-I"
|
||||
extend args_list include_dirs_list "-I"
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
elif [ "$isystem_was_used" = "true" ]; then
|
||||
if [ "$isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
else
|
||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
||||
@@ -822,15 +782,11 @@ case "$mode" in
|
||||
;;
|
||||
esac
|
||||
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
# RPATHs arguments
|
||||
@@ -839,25 +795,20 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
ld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Other arguments from the input command
|
||||
extend args_list other_args_list
|
||||
extend args_list spack_flags_other_args_list
|
||||
|
||||
# Inject SPACK_LDLIBS, if supplied
|
||||
extend args_list libs_list "-l"
|
||||
@@ -913,4 +864,3 @@ fi
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
|
||||
|
7
lib/spack/external/ctest_log_parser.py
vendored
7
lib/spack/external/ctest_log_parser.py
vendored
@@ -65,6 +65,9 @@
|
||||
up to date with CTest, just make sure the ``*_matches`` and
|
||||
``*_exceptions`` lists are kept up to date with CTest's build handler.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from __future__ import division
|
||||
|
||||
import re
|
||||
import math
|
||||
import multiprocessing
|
||||
@@ -208,7 +211,7 @@
|
||||
]
|
||||
|
||||
|
||||
class LogEvent:
|
||||
class LogEvent(object):
|
||||
"""Class representing interesting events (e.g., errors) in a build log."""
|
||||
def __init__(self, text, line_no,
|
||||
source_file=None, source_line_no=None,
|
||||
@@ -345,7 +348,7 @@ def _parse_unpack(args):
|
||||
return _parse(*args)
|
||||
|
||||
|
||||
class CTestLogParser:
|
||||
class CTestLogParser(object):
|
||||
"""Log file parser that extracts errors and warnings."""
|
||||
def __init__(self, profile=False):
|
||||
# whether to record timing information
|
||||
|
@@ -3,42 +3,33 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import abc
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import io
|
||||
import re
|
||||
import sys
|
||||
from argparse import ArgumentParser
|
||||
from typing import IO, Any, Iterable, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
|
||||
class Command:
|
||||
class Command(object):
|
||||
"""Parsed representation of a command from argparse.
|
||||
|
||||
This is a single command from an argparse parser. ``ArgparseWriter`` creates these and returns
|
||||
them from ``parse()``, and it passes one of these to each call to ``format()`` so that we can
|
||||
take an action for a single command.
|
||||
This is a single command from an argparse parser. ``ArgparseWriter``
|
||||
creates these and returns them from ``parse()``, and it passes one of
|
||||
these to each call to ``format()`` so that we can take an action for
|
||||
a single command.
|
||||
|
||||
Parts of a Command:
|
||||
- prog: command name (str)
|
||||
- description: command description (str)
|
||||
- usage: command usage (str)
|
||||
- positionals: list of positional arguments (list)
|
||||
- optionals: list of optional arguments (list)
|
||||
- subcommands: list of subcommand parsers (list)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
prog: str,
|
||||
description: Optional[str],
|
||||
usage: str,
|
||||
positionals: List[Tuple[str, Optional[Iterable[Any]], Union[int, str, None], str]],
|
||||
optionals: List[Tuple[Sequence[str], List[str], str, Union[int, str, None], str]],
|
||||
subcommands: List[Tuple[ArgumentParser, str, str]],
|
||||
) -> None:
|
||||
"""Initialize a new Command instance.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
description: Command description.
|
||||
usage: Command usage.
|
||||
positionals: List of positional arguments.
|
||||
optionals: List of optional arguments.
|
||||
subcommands: List of subcommand parsers.
|
||||
"""
|
||||
def __init__(self, prog, description, usage, positionals, optionals, subcommands):
|
||||
self.prog = prog
|
||||
self.description = description
|
||||
self.usage = usage
|
||||
@@ -47,34 +38,35 @@ def __init__(
|
||||
self.subcommands = subcommands
|
||||
|
||||
|
||||
# NOTE: The only reason we subclass argparse.HelpFormatter is to get access to self._expand_help(),
|
||||
# ArgparseWriter is not intended to be used as a formatter_class.
|
||||
class ArgparseWriter(argparse.HelpFormatter, abc.ABC):
|
||||
"""Analyze an argparse ArgumentParser for easy generation of help."""
|
||||
# NOTE: The only reason we subclass argparse.HelpFormatter is to get access
|
||||
# to self._expand_help(), ArgparseWriter is not intended to be used as a
|
||||
# formatter_class.
|
||||
class ArgparseWriter(argparse.HelpFormatter):
|
||||
"""Analyzes an argparse ArgumentParser for easy generation of help."""
|
||||
|
||||
def __init__(self, prog: str, out: IO = sys.stdout, aliases: bool = False) -> None:
|
||||
"""Initialize a new ArgparseWriter instance.
|
||||
def __init__(self, prog, out=None, aliases=False):
|
||||
"""Initializes a new ArgparseWriter instance.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
out: File object to write to.
|
||||
aliases: Whether or not to include subparsers for aliases.
|
||||
Parameters:
|
||||
prog (str): the program name
|
||||
out (file object): the file to write to (default sys.stdout)
|
||||
aliases (bool): whether or not to include subparsers for aliases
|
||||
"""
|
||||
super().__init__(prog)
|
||||
super(ArgparseWriter, self).__init__(prog)
|
||||
self.level = 0
|
||||
self.prog = prog
|
||||
self.out = out
|
||||
self.out = sys.stdout if out is None else out
|
||||
self.aliases = aliases
|
||||
|
||||
def parse(self, parser: ArgumentParser, prog: str) -> Command:
|
||||
"""Parse the parser object and return the relavent components.
|
||||
def parse(self, parser, prog):
|
||||
"""Parses the parser object and returns the relavent components.
|
||||
|
||||
Args:
|
||||
parser: Command parser.
|
||||
prog: Program name.
|
||||
Parameters:
|
||||
parser (argparse.ArgumentParser): the parser
|
||||
prog (str): the command name
|
||||
|
||||
Returns:
|
||||
Information about the command from the parser.
|
||||
(Command) information about the command from the parser
|
||||
"""
|
||||
self.parser = parser
|
||||
|
||||
@@ -88,7 +80,8 @@ def parse(self, parser: ArgumentParser, prog: str) -> Command:
|
||||
groups = parser._mutually_exclusive_groups
|
||||
usage = fmt._format_usage(None, actions, groups, "").strip()
|
||||
|
||||
# Go through actions and split them into optionals, positionals, and subcommands
|
||||
# Go through actions and split them into optionals, positionals,
|
||||
# and subcommands
|
||||
optionals = []
|
||||
positionals = []
|
||||
subcommands = []
|
||||
@@ -96,97 +89,74 @@ def parse(self, parser: ArgumentParser, prog: str) -> Command:
|
||||
if action.option_strings:
|
||||
flags = action.option_strings
|
||||
dest_flags = fmt._format_action_invocation(action)
|
||||
nargs = action.nargs
|
||||
help = (
|
||||
self._expand_help(action)
|
||||
if action.help and action.help != argparse.SUPPRESS
|
||||
else ""
|
||||
)
|
||||
help = help.split("\n")[0]
|
||||
|
||||
if action.choices is not None:
|
||||
dest = [str(choice) for choice in action.choices]
|
||||
else:
|
||||
dest = [action.dest]
|
||||
|
||||
optionals.append((flags, dest, dest_flags, nargs, help))
|
||||
help = self._expand_help(action) if action.help else ""
|
||||
help = help.replace("\n", " ")
|
||||
optionals.append((flags, dest_flags, help))
|
||||
elif isinstance(action, argparse._SubParsersAction):
|
||||
for subaction in action._choices_actions:
|
||||
subparser = action._name_parser_map[subaction.dest]
|
||||
help = (
|
||||
self._expand_help(subaction)
|
||||
if subaction.help and action.help != argparse.SUPPRESS
|
||||
else ""
|
||||
)
|
||||
help = help.split("\n")[0]
|
||||
subcommands.append((subparser, subaction.dest, help))
|
||||
subcommands.append((subparser, subaction.dest))
|
||||
|
||||
# Look for aliases of the form 'name (alias, ...)'
|
||||
if self.aliases and isinstance(subaction.metavar, str):
|
||||
if self.aliases:
|
||||
match = re.match(r"(.*) \((.*)\)", subaction.metavar)
|
||||
if match:
|
||||
aliases = match.group(2).split(", ")
|
||||
for alias in aliases:
|
||||
subparser = action._name_parser_map[alias]
|
||||
help = (
|
||||
self._expand_help(subaction)
|
||||
if subaction.help and action.help != argparse.SUPPRESS
|
||||
else ""
|
||||
)
|
||||
help = help.split("\n")[0]
|
||||
subcommands.append((subparser, alias, help))
|
||||
subcommands.append((subparser, alias))
|
||||
else:
|
||||
args = fmt._format_action_invocation(action)
|
||||
help = (
|
||||
self._expand_help(action)
|
||||
if action.help and action.help != argparse.SUPPRESS
|
||||
else ""
|
||||
)
|
||||
help = help.split("\n")[0]
|
||||
positionals.append((args, action.choices, action.nargs, help))
|
||||
help = self._expand_help(action) if action.help else ""
|
||||
help = help.replace("\n", " ")
|
||||
positionals.append((args, help))
|
||||
|
||||
return Command(prog, description, usage, positionals, optionals, subcommands)
|
||||
|
||||
@abc.abstractmethod
|
||||
def format(self, cmd: Command) -> str:
|
||||
"""Return the string representation of a single node in the parser tree.
|
||||
def format(self, cmd):
|
||||
"""Returns the string representation of a single node in the
|
||||
parser tree.
|
||||
|
||||
Override this in subclasses to define how each subcommand should be displayed.
|
||||
Override this in subclasses to define how each subcommand
|
||||
should be displayed.
|
||||
|
||||
Args:
|
||||
cmd: Parsed information about a command or subcommand.
|
||||
Parameters:
|
||||
(Command): parsed information about a command or subcommand
|
||||
|
||||
Returns:
|
||||
String representation of this subcommand.
|
||||
str: the string representation of this subcommand
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def _write(self, parser: ArgumentParser, prog: str, level: int = 0) -> None:
|
||||
"""Recursively write a parser.
|
||||
def _write(self, parser, prog, level=0):
|
||||
"""Recursively writes a parser.
|
||||
|
||||
Args:
|
||||
parser: Command parser.
|
||||
prog: Program name.
|
||||
level: Current level.
|
||||
Parameters:
|
||||
parser (argparse.ArgumentParser): the parser
|
||||
prog (str): the command name
|
||||
level (int): the current level
|
||||
"""
|
||||
self.level = level
|
||||
|
||||
cmd = self.parse(parser, prog)
|
||||
self.out.write(self.format(cmd))
|
||||
|
||||
for subparser, prog, help in cmd.subcommands:
|
||||
for subparser, prog in cmd.subcommands:
|
||||
self._write(subparser, prog, level=level + 1)
|
||||
|
||||
def write(self, parser: ArgumentParser) -> None:
|
||||
def write(self, parser):
|
||||
"""Write out details about an ArgumentParser.
|
||||
|
||||
Args:
|
||||
parser: Command parser.
|
||||
parser (argparse.ArgumentParser): the parser
|
||||
"""
|
||||
try:
|
||||
self._write(parser, self.prog)
|
||||
except BrokenPipeError:
|
||||
except IOError as e:
|
||||
# Swallow pipe errors
|
||||
pass
|
||||
# Raises IOError in Python 2 and BrokenPipeError in Python 3
|
||||
if e.errno != errno.EPIPE:
|
||||
raise
|
||||
|
||||
|
||||
_rst_levels = ["=", "-", "^", "~", ":", "`"]
|
||||
@@ -195,33 +165,21 @@ def write(self, parser: ArgumentParser) -> None:
|
||||
class ArgparseRstWriter(ArgparseWriter):
|
||||
"""Write argparse output as rst sections."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
prog: str,
|
||||
out: IO = sys.stdout,
|
||||
aliases: bool = False,
|
||||
rst_levels: Sequence[str] = _rst_levels,
|
||||
) -> None:
|
||||
"""Initialize a new ArgparseRstWriter instance.
|
||||
def __init__(self, prog, out=None, aliases=False, rst_levels=_rst_levels):
|
||||
"""Create a new ArgparseRstWriter.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
out: File object to write to.
|
||||
aliases: Whether or not to include subparsers for aliases.
|
||||
rst_levels: List of characters for rst section headings.
|
||||
Parameters:
|
||||
prog (str): program name
|
||||
out (file object): file to write to
|
||||
aliases (bool): whether or not to include subparsers for aliases
|
||||
rst_levels (list of str): list of characters
|
||||
for rst section headings
|
||||
"""
|
||||
super().__init__(prog, out, aliases)
|
||||
out = sys.stdout if out is None else out
|
||||
super(ArgparseRstWriter, self).__init__(prog, out, aliases)
|
||||
self.rst_levels = rst_levels
|
||||
|
||||
def format(self, cmd: Command) -> str:
|
||||
"""Return the string representation of a single node in the parser tree.
|
||||
|
||||
Args:
|
||||
cmd: Parsed information about a command or subcommand.
|
||||
|
||||
Returns:
|
||||
String representation of a node.
|
||||
"""
|
||||
def format(self, cmd):
|
||||
string = io.StringIO()
|
||||
string.write(self.begin_command(cmd.prog))
|
||||
|
||||
@@ -232,13 +190,13 @@ def format(self, cmd: Command) -> str:
|
||||
|
||||
if cmd.positionals:
|
||||
string.write(self.begin_positionals())
|
||||
for args, choices, nargs, help in cmd.positionals:
|
||||
for args, help in cmd.positionals:
|
||||
string.write(self.positional(args, help))
|
||||
string.write(self.end_positionals())
|
||||
|
||||
if cmd.optionals:
|
||||
string.write(self.begin_optionals())
|
||||
for flags, dest, dest_flags, nargs, help in cmd.optionals:
|
||||
for flags, dest_flags, help in cmd.optionals:
|
||||
string.write(self.optional(dest_flags, help))
|
||||
string.write(self.end_optionals())
|
||||
|
||||
@@ -247,15 +205,7 @@ def format(self, cmd: Command) -> str:
|
||||
|
||||
return string.getvalue()
|
||||
|
||||
def begin_command(self, prog: str) -> str:
|
||||
"""Text to print before a command.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
|
||||
Returns:
|
||||
Text before a command.
|
||||
"""
|
||||
def begin_command(self, prog):
|
||||
return """
|
||||
----
|
||||
|
||||
@@ -268,26 +218,10 @@ def begin_command(self, prog: str) -> str:
|
||||
prog.replace(" ", "-"), prog, self.rst_levels[self.level] * len(prog)
|
||||
)
|
||||
|
||||
def description(self, description: str) -> str:
|
||||
"""Description of a command.
|
||||
|
||||
Args:
|
||||
description: Command description.
|
||||
|
||||
Returns:
|
||||
Description of a command.
|
||||
"""
|
||||
def description(self, description):
|
||||
return description + "\n\n"
|
||||
|
||||
def usage(self, usage: str) -> str:
|
||||
"""Example usage of a command.
|
||||
|
||||
Args:
|
||||
usage: Command usage.
|
||||
|
||||
Returns:
|
||||
Usage of a command.
|
||||
"""
|
||||
def usage(self, usage):
|
||||
return """\
|
||||
.. code-block:: console
|
||||
|
||||
@@ -297,24 +231,10 @@ def usage(self, usage: str) -> str:
|
||||
usage
|
||||
)
|
||||
|
||||
def begin_positionals(self) -> str:
|
||||
"""Text to print before positional arguments.
|
||||
|
||||
Returns:
|
||||
Positional arguments header.
|
||||
"""
|
||||
def begin_positionals(self):
|
||||
return "\n**Positional arguments**\n\n"
|
||||
|
||||
def positional(self, name: str, help: str) -> str:
|
||||
"""Description of a positional argument.
|
||||
|
||||
Args:
|
||||
name: Argument name.
|
||||
help: Help text.
|
||||
|
||||
Returns:
|
||||
Positional argument description.
|
||||
"""
|
||||
def positional(self, name, help):
|
||||
return """\
|
||||
{0}
|
||||
{1}
|
||||
@@ -323,32 +243,13 @@ def positional(self, name: str, help: str) -> str:
|
||||
name, help
|
||||
)
|
||||
|
||||
def end_positionals(self) -> str:
|
||||
"""Text to print after positional arguments.
|
||||
|
||||
Returns:
|
||||
Positional arguments footer.
|
||||
"""
|
||||
def end_positionals(self):
|
||||
return ""
|
||||
|
||||
def begin_optionals(self) -> str:
|
||||
"""Text to print before optional arguments.
|
||||
|
||||
Returns:
|
||||
Optional arguments header.
|
||||
"""
|
||||
def begin_optionals(self):
|
||||
return "\n**Optional arguments**\n\n"
|
||||
|
||||
def optional(self, opts: str, help: str) -> str:
|
||||
"""Description of an optional argument.
|
||||
|
||||
Args:
|
||||
opts: Optional argument.
|
||||
help: Help text.
|
||||
|
||||
Returns:
|
||||
Optional argument description.
|
||||
"""
|
||||
def optional(self, opts, help):
|
||||
return """\
|
||||
``{0}``
|
||||
{1}
|
||||
@@ -357,23 +258,10 @@ def optional(self, opts: str, help: str) -> str:
|
||||
opts, help
|
||||
)
|
||||
|
||||
def end_optionals(self) -> str:
|
||||
"""Text to print after optional arguments.
|
||||
|
||||
Returns:
|
||||
Optional arguments footer.
|
||||
"""
|
||||
def end_optionals(self):
|
||||
return ""
|
||||
|
||||
def begin_subcommands(self, subcommands: List[Tuple[ArgumentParser, str, str]]) -> str:
|
||||
"""Table with links to other subcommands.
|
||||
|
||||
Arguments:
|
||||
subcommands: List of subcommands.
|
||||
|
||||
Returns:
|
||||
Subcommand linking text.
|
||||
"""
|
||||
def begin_subcommands(self, subcommands):
|
||||
string = """
|
||||
**Subcommands**
|
||||
|
||||
@@ -382,8 +270,116 @@ def begin_subcommands(self, subcommands: List[Tuple[ArgumentParser, str, str]])
|
||||
|
||||
"""
|
||||
|
||||
for cmd, _, _ in subcommands:
|
||||
for cmd, _ in subcommands:
|
||||
prog = re.sub(r"^[^ ]* ", "", cmd.prog)
|
||||
string += " * :ref:`{0} <{1}>`\n".format(prog, cmd.prog.replace(" ", "-"))
|
||||
|
||||
return string + "\n"
|
||||
|
||||
|
||||
class ArgparseCompletionWriter(ArgparseWriter):
|
||||
"""Write argparse output as shell programmable tab completion functions."""
|
||||
|
||||
def format(self, cmd):
|
||||
"""Returns the string representation of a single node in the
|
||||
parser tree.
|
||||
|
||||
Override this in subclasses to define how each subcommand
|
||||
should be displayed.
|
||||
|
||||
Parameters:
|
||||
(Command): parsed information about a command or subcommand
|
||||
|
||||
Returns:
|
||||
str: the string representation of this subcommand
|
||||
"""
|
||||
|
||||
assert cmd.optionals # we should always at least have -h, --help
|
||||
assert not (cmd.positionals and cmd.subcommands) # one or the other
|
||||
|
||||
# We only care about the arguments/flags, not the help messages
|
||||
positionals = []
|
||||
if cmd.positionals:
|
||||
positionals, _ = zip(*cmd.positionals)
|
||||
optionals, _, _ = zip(*cmd.optionals)
|
||||
subcommands = []
|
||||
if cmd.subcommands:
|
||||
_, subcommands = zip(*cmd.subcommands)
|
||||
|
||||
# Flatten lists of lists
|
||||
optionals = [x for xx in optionals for x in xx]
|
||||
|
||||
return (
|
||||
self.start_function(cmd.prog)
|
||||
+ self.body(positionals, optionals, subcommands)
|
||||
+ self.end_function(cmd.prog)
|
||||
)
|
||||
|
||||
def start_function(self, prog):
|
||||
"""Returns the syntax needed to begin a function definition.
|
||||
|
||||
Parameters:
|
||||
prog (str): the command name
|
||||
|
||||
Returns:
|
||||
str: the function definition beginning
|
||||
"""
|
||||
name = prog.replace("-", "_").replace(" ", "_")
|
||||
return "\n_{0}() {{".format(name)
|
||||
|
||||
def end_function(self, prog=None):
|
||||
"""Returns the syntax needed to end a function definition.
|
||||
|
||||
Parameters:
|
||||
prog (str or None): the command name
|
||||
|
||||
Returns:
|
||||
str: the function definition ending
|
||||
"""
|
||||
return "}\n"
|
||||
|
||||
def body(self, positionals, optionals, subcommands):
|
||||
"""Returns the body of the function.
|
||||
|
||||
Parameters:
|
||||
positionals (list): list of positional arguments
|
||||
optionals (list): list of optional arguments
|
||||
subcommands (list): list of subcommand parsers
|
||||
|
||||
Returns:
|
||||
str: the function body
|
||||
"""
|
||||
return ""
|
||||
|
||||
def positionals(self, positionals):
|
||||
"""Returns the syntax for reporting positional arguments.
|
||||
|
||||
Parameters:
|
||||
positionals (list): list of positional arguments
|
||||
|
||||
Returns:
|
||||
str: the syntax for positional arguments
|
||||
"""
|
||||
return ""
|
||||
|
||||
def optionals(self, optionals):
|
||||
"""Returns the syntax for reporting optional flags.
|
||||
|
||||
Parameters:
|
||||
optionals (list): list of optional arguments
|
||||
|
||||
Returns:
|
||||
str: the syntax for optional flags
|
||||
"""
|
||||
return ""
|
||||
|
||||
def subcommands(self, subcommands):
|
||||
"""Returns the syntax for reporting subcommands.
|
||||
|
||||
Parameters:
|
||||
subcommands (list): list of subcommand parsers
|
||||
|
||||
Returns:
|
||||
str: the syntax for subcommand parsers
|
||||
"""
|
||||
return ""
|
||||
|
@@ -402,7 +402,7 @@ def groupid_to_group(x):
|
||||
os.remove(backup_filename)
|
||||
|
||||
|
||||
class FileFilter:
|
||||
class FileFilter(object):
|
||||
"""Convenience class for calling ``filter_file`` a lot."""
|
||||
|
||||
def __init__(self, *filenames):
|
||||
@@ -610,8 +610,6 @@ def chgrp(path, group, follow_symlinks=True):
|
||||
gid = grp.getgrnam(group).gr_gid
|
||||
else:
|
||||
gid = group
|
||||
if os.stat(path).st_gid == gid:
|
||||
return
|
||||
if follow_symlinks:
|
||||
os.chown(path, -1, gid)
|
||||
else:
|
||||
@@ -1338,7 +1336,7 @@ def lexists_islink_isdir(path):
|
||||
return True, is_link, is_dir
|
||||
|
||||
|
||||
class BaseDirectoryVisitor:
|
||||
class BaseDirectoryVisitor(object):
|
||||
"""Base class and interface for :py:func:`visit_directory_tree`."""
|
||||
|
||||
def visit_file(self, root, rel_path, depth):
|
||||
@@ -1754,14 +1752,9 @@ def find(root, files, recursive=True):
|
||||
files = [files]
|
||||
|
||||
if recursive:
|
||||
tty.debug(f"Find (recursive): {root} {str(files)}")
|
||||
result = _find_recursive(root, files)
|
||||
return _find_recursive(root, files)
|
||||
else:
|
||||
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
||||
result = _find_non_recursive(root, files)
|
||||
|
||||
tty.debug(f"Find complete: {root} {str(files)}")
|
||||
return result
|
||||
return _find_non_recursive(root, files)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1897,7 +1890,7 @@ class HeaderList(FileList):
|
||||
include_regex = re.compile(r"(.*?)(\binclude\b)(.*)")
|
||||
|
||||
def __init__(self, files):
|
||||
super().__init__(files)
|
||||
super(HeaderList, self).__init__(files)
|
||||
|
||||
self._macro_definitions = []
|
||||
self._directories = None
|
||||
@@ -1923,7 +1916,7 @@ def _default_directories(self):
|
||||
"""Default computation of directories based on the list of
|
||||
header files.
|
||||
"""
|
||||
dir_list = super().directories
|
||||
dir_list = super(HeaderList, self).directories
|
||||
values = []
|
||||
for d in dir_list:
|
||||
# If the path contains a subdirectory named 'include' then stop
|
||||
@@ -2359,7 +2352,7 @@ def find_all_libraries(root, recursive=False):
|
||||
)
|
||||
|
||||
|
||||
class WindowsSimulatedRPath:
|
||||
class WindowsSimulatedRPath(object):
|
||||
"""Class representing Windows filesystem rpath analog
|
||||
|
||||
One instance of this class is associated with a package (only on Windows)
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import division
|
||||
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import functools
|
||||
@@ -766,10 +768,10 @@ def pretty_seconds(seconds):
|
||||
|
||||
class RequiredAttributeError(ValueError):
|
||||
def __init__(self, message):
|
||||
super().__init__(message)
|
||||
super(RequiredAttributeError, self).__init__(message)
|
||||
|
||||
|
||||
class ObjectWrapper:
|
||||
class ObjectWrapper(object):
|
||||
"""Base class that wraps an object. Derived classes can add new behavior
|
||||
while staying undercover.
|
||||
|
||||
@@ -796,7 +798,7 @@ def __init__(self, wrapped_object):
|
||||
self.__dict__ = wrapped_object.__dict__
|
||||
|
||||
|
||||
class Singleton:
|
||||
class Singleton(object):
|
||||
"""Simple wrapper for lazily initialized singleton objects."""
|
||||
|
||||
def __init__(self, factory):
|
||||
@@ -821,7 +823,7 @@ def __getattr__(self, name):
|
||||
# 'instance'/'_instance' to be defined or it will enter an infinite
|
||||
# loop, so protect against that here.
|
||||
if name in ["_instance", "instance"]:
|
||||
raise AttributeError(f"cannot create {name}")
|
||||
raise AttributeError()
|
||||
return getattr(self.instance, name)
|
||||
|
||||
def __getitem__(self, name):
|
||||
@@ -843,6 +845,27 @@ def __repr__(self):
|
||||
return repr(self.instance)
|
||||
|
||||
|
||||
class LazyReference(object):
|
||||
"""Lazily evaluated reference to part of a singleton."""
|
||||
|
||||
def __init__(self, ref_function):
|
||||
self.ref_function = ref_function
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name == "ref_function":
|
||||
raise AttributeError()
|
||||
return getattr(self.ref_function(), name)
|
||||
|
||||
def __getitem__(self, name):
|
||||
return self.ref_function()[name]
|
||||
|
||||
def __str__(self):
|
||||
return str(self.ref_function())
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.ref_function())
|
||||
|
||||
|
||||
def load_module_from_file(module_name, module_path):
|
||||
"""Loads a python module from the path of the corresponding file.
|
||||
|
||||
@@ -920,7 +943,7 @@ def _wrapper(args):
|
||||
return _wrapper
|
||||
|
||||
|
||||
class Devnull:
|
||||
class Devnull(object):
|
||||
"""Null stream with less overhead than ``os.devnull``.
|
||||
|
||||
See https://stackoverflow.com/a/2929954.
|
||||
@@ -1037,7 +1060,7 @@ def __str__(self):
|
||||
return str(self.data)
|
||||
|
||||
|
||||
class GroupedExceptionHandler:
|
||||
class GroupedExceptionHandler(object):
|
||||
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
||||
|
||||
def __init__(self):
|
||||
@@ -1068,7 +1091,7 @@ def grouped_message(self, with_tracebacks: bool = True) -> str:
|
||||
return "due to the following failures:\n{0}".format("\n".join(each_exception_message))
|
||||
|
||||
|
||||
class GroupedExceptionForwarder:
|
||||
class GroupedExceptionForwarder(object):
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
@@ -1088,7 +1111,7 @@ def __exit__(self, exc_type, exc_value, tb):
|
||||
return True
|
||||
|
||||
|
||||
class classproperty:
|
||||
class classproperty(object):
|
||||
"""Non-data descriptor to evaluate a class-level property. The function that performs
|
||||
the evaluation is injected at creation time and take an instance (could be None) and
|
||||
an owner (i.e. the class that originated the instance)
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
"""LinkTree class for setting up trees of symbolic links."""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import filecmp
|
||||
import os
|
||||
import shutil
|
||||
@@ -285,7 +287,7 @@ def visit_symlinked_file(self, root, rel_path, depth):
|
||||
self.visit_file(root, rel_path, depth)
|
||||
|
||||
|
||||
class LinkTree:
|
||||
class LinkTree(object):
|
||||
"""Class to create trees of symbolic links from a source directory.
|
||||
|
||||
LinkTree objects are constructed with a source root. Their
|
||||
@@ -430,12 +432,12 @@ class MergeConflictError(Exception):
|
||||
|
||||
class ConflictingSpecsError(MergeConflictError):
|
||||
def __init__(self, spec_1, spec_2):
|
||||
super().__init__(spec_1, spec_2)
|
||||
super(MergeConflictError, self).__init__(spec_1, spec_2)
|
||||
|
||||
|
||||
class SingleMergeConflictError(MergeConflictError):
|
||||
def __init__(self, path):
|
||||
super().__init__("Package merge blocked by file: %s" % path)
|
||||
super(MergeConflictError, self).__init__("Package merge blocked by file: %s" % path)
|
||||
|
||||
|
||||
class MergeConflictSummary(MergeConflictError):
|
||||
@@ -450,4 +452,4 @@ def __init__(self, conflicts):
|
||||
msg += "\n `{0}` and `{1}` both project to `{2}`".format(
|
||||
conflict.src_a, conflict.src_b, conflict.dst
|
||||
)
|
||||
super().__init__(msg)
|
||||
super(MergeConflictSummary, self).__init__(msg)
|
||||
|
@@ -9,10 +9,9 @@
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime
|
||||
from types import TracebackType
|
||||
from typing import IO, Any, Callable, ContextManager, Dict, Generator, Optional, Tuple, Type, Union
|
||||
|
||||
from llnl.util import lang, tty
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import pretty_seconds
|
||||
|
||||
import spack.util.string
|
||||
|
||||
@@ -35,15 +34,12 @@
|
||||
]
|
||||
|
||||
|
||||
ReleaseFnType = Optional[Callable[[], bool]]
|
||||
#: A useful replacement for functions that should return True when not provided
|
||||
#: for example.
|
||||
true_fn = lambda: True
|
||||
|
||||
|
||||
def true_fn() -> bool:
|
||||
"""A function that always returns True."""
|
||||
return True
|
||||
|
||||
|
||||
class OpenFile:
|
||||
class OpenFile(object):
|
||||
"""Record for keeping track of open lockfiles (with reference counting).
|
||||
|
||||
There's really only one ``OpenFile`` per inode, per process, but we record the
|
||||
@@ -52,12 +48,12 @@ class OpenFile:
|
||||
file descriptors as well in the future.
|
||||
"""
|
||||
|
||||
def __init__(self, fh: IO) -> None:
|
||||
def __init__(self, fh):
|
||||
self.fh = fh
|
||||
self.refs = 0
|
||||
|
||||
|
||||
class OpenFileTracker:
|
||||
class OpenFileTracker(object):
|
||||
"""Track open lockfiles, to minimize number of open file descriptors.
|
||||
|
||||
The ``fcntl`` locks that Spack uses are associated with an inode and a process.
|
||||
@@ -82,11 +78,11 @@ class OpenFileTracker:
|
||||
work in Python and assume the GIL.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
def __init__(self):
|
||||
"""Create a new ``OpenFileTracker``."""
|
||||
self._descriptors: Dict[Any, OpenFile] = {}
|
||||
self._descriptors = {}
|
||||
|
||||
def get_fh(self, path: str) -> IO:
|
||||
def get_fh(self, path):
|
||||
"""Get a filehandle for a lockfile.
|
||||
|
||||
This routine will open writable files for read/write even if you're asking
|
||||
@@ -94,7 +90,7 @@ def get_fh(self, path: str) -> IO:
|
||||
(write) lock later if requested.
|
||||
|
||||
Arguments:
|
||||
path: path to lock file we want a filehandle for
|
||||
path (str): path to lock file we want a filehandle for
|
||||
"""
|
||||
# Open writable files as 'r+' so we can upgrade to write later
|
||||
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "r+"
|
||||
@@ -143,7 +139,7 @@ def get_fh(self, path: str) -> IO:
|
||||
def release_by_stat(self, stat):
|
||||
key = (stat.st_dev, stat.st_ino, os.getpid())
|
||||
open_file = self._descriptors.get(key)
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_ino
|
||||
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_inode
|
||||
|
||||
open_file.refs -= 1
|
||||
if not open_file.refs:
|
||||
@@ -161,7 +157,7 @@ def purge(self):
|
||||
|
||||
#: Open file descriptors for locks in this process. Used to prevent one process
|
||||
#: from opening the sam file many times for different byte range locks
|
||||
FILE_TRACKER = OpenFileTracker()
|
||||
file_tracker = OpenFileTracker()
|
||||
|
||||
|
||||
def _attempts_str(wait_time, nattempts):
|
||||
@@ -170,10 +166,10 @@ def _attempts_str(wait_time, nattempts):
|
||||
return ""
|
||||
|
||||
attempts = spack.util.string.plural(nattempts, "attempt")
|
||||
return " after {} and {}".format(lang.pretty_seconds(wait_time), attempts)
|
||||
return " after {} and {}".format(pretty_seconds(wait_time), attempts)
|
||||
|
||||
|
||||
class LockType:
|
||||
class LockType(object):
|
||||
READ = 0
|
||||
WRITE = 1
|
||||
|
||||
@@ -192,11 +188,11 @@ def to_module(tid):
|
||||
return lock
|
||||
|
||||
@staticmethod
|
||||
def is_valid(op: int) -> bool:
|
||||
def is_valid(op):
|
||||
return op == LockType.READ or op == LockType.WRITE
|
||||
|
||||
|
||||
class Lock:
|
||||
class Lock(object):
|
||||
"""This is an implementation of a filesystem lock using Python's lockf.
|
||||
|
||||
In Python, ``lockf`` actually calls ``fcntl``, so this should work with
|
||||
@@ -211,16 +207,7 @@ class Lock:
|
||||
overlapping byte ranges in the same file).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path: str,
|
||||
*,
|
||||
start: int = 0,
|
||||
length: int = 0,
|
||||
default_timeout: Optional[float] = None,
|
||||
debug: bool = False,
|
||||
desc: str = "",
|
||||
) -> None:
|
||||
def __init__(self, path, start=0, length=0, default_timeout=None, debug=False, desc=""):
|
||||
"""Construct a new lock on the file at ``path``.
|
||||
|
||||
By default, the lock applies to the whole file. Optionally,
|
||||
@@ -233,17 +220,17 @@ def __init__(
|
||||
beginning of the file.
|
||||
|
||||
Args:
|
||||
path: path to the lock
|
||||
start: optional byte offset at which the lock starts
|
||||
length: optional number of bytes to lock
|
||||
default_timeout: seconds to wait for lock attempts,
|
||||
path (str): path to the lock
|
||||
start (int): optional byte offset at which the lock starts
|
||||
length (int): optional number of bytes to lock
|
||||
default_timeout (int): number of seconds to wait for lock attempts,
|
||||
where None means to wait indefinitely
|
||||
debug: debug mode specific to locking
|
||||
desc: optional debug message lock description, which is
|
||||
debug (bool): debug mode specific to locking
|
||||
desc (str): optional debug message lock description, which is
|
||||
helpful for distinguishing between different Spack locks.
|
||||
"""
|
||||
self.path = path
|
||||
self._file: Optional[IO] = None
|
||||
self._file = None
|
||||
self._reads = 0
|
||||
self._writes = 0
|
||||
|
||||
@@ -255,7 +242,7 @@ def __init__(
|
||||
self.debug = debug
|
||||
|
||||
# optional debug description
|
||||
self.desc = f" ({desc})" if desc else ""
|
||||
self.desc = " ({0})".format(desc) if desc else ""
|
||||
|
||||
# If the user doesn't set a default timeout, or if they choose
|
||||
# None, 0, etc. then lock attempts will not time out (unless the
|
||||
@@ -263,15 +250,11 @@ def __init__(
|
||||
self.default_timeout = default_timeout or None
|
||||
|
||||
# PID and host of lock holder (only used in debug mode)
|
||||
self.pid: Optional[int] = None
|
||||
self.old_pid: Optional[int] = None
|
||||
self.host: Optional[str] = None
|
||||
self.old_host: Optional[str] = None
|
||||
self.pid = self.old_pid = None
|
||||
self.host = self.old_host = None
|
||||
|
||||
@staticmethod
|
||||
def _poll_interval_generator(
|
||||
_wait_times: Optional[Tuple[float, float, float]] = None
|
||||
) -> Generator[float, None, None]:
|
||||
def _poll_interval_generator(_wait_times=None):
|
||||
"""This implements a backoff scheme for polling a contended resource
|
||||
by suggesting a succession of wait times between polls.
|
||||
|
||||
@@ -294,21 +277,21 @@ def _poll_interval_generator(
|
||||
num_requests += 1
|
||||
yield wait_time
|
||||
|
||||
def __repr__(self) -> str:
|
||||
def __repr__(self):
|
||||
"""Formal representation of the lock."""
|
||||
rep = "{0}(".format(self.__class__.__name__)
|
||||
for attr, value in self.__dict__.items():
|
||||
rep += "{0}={1}, ".format(attr, value.__repr__())
|
||||
return "{0})".format(rep.strip(", "))
|
||||
|
||||
def __str__(self) -> str:
|
||||
def __str__(self):
|
||||
"""Readable string (with key fields) of the lock."""
|
||||
location = "{0}[{1}:{2}]".format(self.path, self._start, self._length)
|
||||
timeout = "timeout={0}".format(self.default_timeout)
|
||||
activity = "#reads={0}, #writes={1}".format(self._reads, self._writes)
|
||||
return "({0}, {1}, {2})".format(location, timeout, activity)
|
||||
|
||||
def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
||||
def _lock(self, op, timeout=None):
|
||||
"""This takes a lock using POSIX locks (``fcntl.lockf``).
|
||||
|
||||
The lock is implemented as a spin lock using a nonblocking call
|
||||
@@ -327,7 +310,7 @@ def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
||||
# Create file and parent directories if they don't exist.
|
||||
if self._file is None:
|
||||
self._ensure_parent_directory()
|
||||
self._file = FILE_TRACKER.get_fh(self.path)
|
||||
self._file = file_tracker.get_fh(self.path)
|
||||
|
||||
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "r":
|
||||
# Attempt to upgrade to write lock w/a read-only file.
|
||||
@@ -336,7 +319,7 @@ def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
||||
|
||||
self._log_debug(
|
||||
"{} locking [{}:{}]: timeout {}".format(
|
||||
op_str.lower(), self._start, self._length, lang.pretty_seconds(timeout or 0)
|
||||
op_str.lower(), self._start, self._length, pretty_seconds(timeout or 0)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -360,20 +343,15 @@ def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
||||
total_wait_time = time.time() - start_time
|
||||
raise LockTimeoutError(op_str.lower(), self.path, total_wait_time, num_attempts)
|
||||
|
||||
def _poll_lock(self, op: int) -> bool:
|
||||
def _poll_lock(self, op):
|
||||
"""Attempt to acquire the lock in a non-blocking manner. Return whether
|
||||
the locking attempt succeeds
|
||||
"""
|
||||
assert self._file is not None, "cannot poll a lock without the file being set"
|
||||
module_op = LockType.to_module(op)
|
||||
try:
|
||||
# Try to get the lock (will raise if not available.)
|
||||
fcntl.lockf(
|
||||
self._file.fileno(),
|
||||
module_op | fcntl.LOCK_NB,
|
||||
self._length,
|
||||
self._start,
|
||||
os.SEEK_SET,
|
||||
self._file, module_op | fcntl.LOCK_NB, self._length, self._start, os.SEEK_SET
|
||||
)
|
||||
|
||||
# help for debugging distributed locking
|
||||
@@ -399,7 +377,7 @@ def _poll_lock(self, op: int) -> bool:
|
||||
|
||||
return False
|
||||
|
||||
def _ensure_parent_directory(self) -> str:
|
||||
def _ensure_parent_directory(self):
|
||||
parent = os.path.dirname(self.path)
|
||||
|
||||
# relative paths to lockfiles in the current directory have no parent
|
||||
@@ -418,22 +396,20 @@ def _ensure_parent_directory(self) -> str:
|
||||
raise
|
||||
return parent
|
||||
|
||||
def _read_log_debug_data(self) -> None:
|
||||
def _read_log_debug_data(self):
|
||||
"""Read PID and host data out of the file if it is there."""
|
||||
assert self._file is not None, "cannot read debug log without the file being set"
|
||||
self.old_pid = self.pid
|
||||
self.old_host = self.host
|
||||
|
||||
line = self._file.read()
|
||||
if line:
|
||||
pid, host = line.strip().split(",")
|
||||
_, _, pid = pid.rpartition("=")
|
||||
_, _, self.pid = pid.rpartition("=")
|
||||
_, _, self.host = host.rpartition("=")
|
||||
self.pid = int(pid)
|
||||
self.pid = int(self.pid)
|
||||
|
||||
def _write_log_debug_data(self) -> None:
|
||||
def _write_log_debug_data(self):
|
||||
"""Write PID and host data to the file, recording old values."""
|
||||
assert self._file is not None, "cannot write debug log without the file being set"
|
||||
self.old_pid = self.pid
|
||||
self.old_host = self.host
|
||||
|
||||
@@ -447,21 +423,20 @@ def _write_log_debug_data(self) -> None:
|
||||
self._file.flush()
|
||||
os.fsync(self._file.fileno())
|
||||
|
||||
def _unlock(self) -> None:
|
||||
def _unlock(self):
|
||||
"""Releases a lock using POSIX locks (``fcntl.lockf``)
|
||||
|
||||
Releases the lock regardless of mode. Note that read locks may
|
||||
be masquerading as write locks, but this removes either.
|
||||
|
||||
"""
|
||||
assert self._file is not None, "cannot unlock without the file being set"
|
||||
fcntl.lockf(self._file.fileno(), fcntl.LOCK_UN, self._length, self._start, os.SEEK_SET)
|
||||
FILE_TRACKER.release_by_fh(self._file)
|
||||
fcntl.lockf(self._file, fcntl.LOCK_UN, self._length, self._start, os.SEEK_SET)
|
||||
file_tracker.release_by_fh(self._file)
|
||||
self._file = None
|
||||
self._reads = 0
|
||||
self._writes = 0
|
||||
|
||||
def acquire_read(self, timeout: Optional[float] = None) -> bool:
|
||||
def acquire_read(self, timeout=None):
|
||||
"""Acquires a recursive, shared lock for reading.
|
||||
|
||||
Read and write locks can be acquired and released in arbitrary
|
||||
@@ -486,7 +461,7 @@ def acquire_read(self, timeout: Optional[float] = None) -> bool:
|
||||
self._reads += 1
|
||||
return False
|
||||
|
||||
def acquire_write(self, timeout: Optional[float] = None) -> bool:
|
||||
def acquire_write(self, timeout=None):
|
||||
"""Acquires a recursive, exclusive lock for writing.
|
||||
|
||||
Read and write locks can be acquired and released in arbitrary
|
||||
@@ -516,7 +491,7 @@ def acquire_write(self, timeout: Optional[float] = None) -> bool:
|
||||
self._writes += 1
|
||||
return False
|
||||
|
||||
def is_write_locked(self) -> bool:
|
||||
def is_write_locked(self):
|
||||
"""Check if the file is write locked
|
||||
|
||||
Return:
|
||||
@@ -533,7 +508,7 @@ def is_write_locked(self) -> bool:
|
||||
|
||||
return False
|
||||
|
||||
def downgrade_write_to_read(self, timeout: Optional[float] = None) -> None:
|
||||
def downgrade_write_to_read(self, timeout=None):
|
||||
"""
|
||||
Downgrade from an exclusive write lock to a shared read.
|
||||
|
||||
@@ -552,7 +527,7 @@ def downgrade_write_to_read(self, timeout: Optional[float] = None) -> None:
|
||||
else:
|
||||
raise LockDowngradeError(self.path)
|
||||
|
||||
def upgrade_read_to_write(self, timeout: Optional[float] = None) -> None:
|
||||
def upgrade_read_to_write(self, timeout=None):
|
||||
"""
|
||||
Attempts to upgrade from a shared read lock to an exclusive write.
|
||||
|
||||
@@ -571,7 +546,7 @@ def upgrade_read_to_write(self, timeout: Optional[float] = None) -> None:
|
||||
else:
|
||||
raise LockUpgradeError(self.path)
|
||||
|
||||
def release_read(self, release_fn: ReleaseFnType = None) -> bool:
|
||||
def release_read(self, release_fn=None):
|
||||
"""Releases a read lock.
|
||||
|
||||
Arguments:
|
||||
@@ -607,7 +582,7 @@ def release_read(self, release_fn: ReleaseFnType = None) -> bool:
|
||||
self._reads -= 1
|
||||
return False
|
||||
|
||||
def release_write(self, release_fn: ReleaseFnType = None) -> bool:
|
||||
def release_write(self, release_fn=None):
|
||||
"""Releases a write lock.
|
||||
|
||||
Arguments:
|
||||
@@ -648,65 +623,65 @@ def release_write(self, release_fn: ReleaseFnType = None) -> bool:
|
||||
else:
|
||||
return False
|
||||
|
||||
def cleanup(self) -> None:
|
||||
def cleanup(self):
|
||||
if self._reads == 0 and self._writes == 0:
|
||||
os.unlink(self.path)
|
||||
else:
|
||||
raise LockError("Attempting to cleanup active lock.")
|
||||
|
||||
def _get_counts_desc(self) -> str:
|
||||
def _get_counts_desc(self):
|
||||
return (
|
||||
"(reads {0}, writes {1})".format(self._reads, self._writes) if tty.is_verbose() else ""
|
||||
)
|
||||
|
||||
def _log_acquired(self, locktype, wait_time, nattempts) -> None:
|
||||
def _log_acquired(self, locktype, wait_time, nattempts):
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = "Acquired at %s" % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg(locktype, "{0}{1}".format(desc, attempts_part)))
|
||||
|
||||
def _log_acquiring(self, locktype) -> None:
|
||||
def _log_acquiring(self, locktype):
|
||||
self._log_debug(self._status_msg(locktype, "Acquiring"), level=3)
|
||||
|
||||
def _log_debug(self, *args, **kwargs) -> None:
|
||||
def _log_debug(self, *args, **kwargs):
|
||||
"""Output lock debug messages."""
|
||||
kwargs["level"] = kwargs.get("level", 2)
|
||||
tty.debug(*args, **kwargs)
|
||||
|
||||
def _log_downgraded(self, wait_time, nattempts) -> None:
|
||||
def _log_downgraded(self, wait_time, nattempts):
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = "Downgraded at %s" % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg("READ LOCK", "{0}{1}".format(desc, attempts_part)))
|
||||
|
||||
def _log_downgrading(self) -> None:
|
||||
def _log_downgrading(self):
|
||||
self._log_debug(self._status_msg("WRITE LOCK", "Downgrading"), level=3)
|
||||
|
||||
def _log_released(self, locktype) -> None:
|
||||
def _log_released(self, locktype):
|
||||
now = datetime.now()
|
||||
desc = "Released at %s" % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg(locktype, desc))
|
||||
|
||||
def _log_releasing(self, locktype) -> None:
|
||||
def _log_releasing(self, locktype):
|
||||
self._log_debug(self._status_msg(locktype, "Releasing"), level=3)
|
||||
|
||||
def _log_upgraded(self, wait_time, nattempts) -> None:
|
||||
def _log_upgraded(self, wait_time, nattempts):
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = "Upgraded at %s" % now.strftime("%H:%M:%S.%f")
|
||||
self._log_debug(self._status_msg("WRITE LOCK", "{0}{1}".format(desc, attempts_part)))
|
||||
|
||||
def _log_upgrading(self) -> None:
|
||||
def _log_upgrading(self):
|
||||
self._log_debug(self._status_msg("READ LOCK", "Upgrading"), level=3)
|
||||
|
||||
def _status_msg(self, locktype: str, status: str) -> str:
|
||||
def _status_msg(self, locktype, status):
|
||||
status_desc = "[{0}] {1}".format(status, self._get_counts_desc())
|
||||
return "{0}{1.desc}: {1.path}[{1._start}:{1._length}] {2}".format(
|
||||
locktype, self, status_desc
|
||||
)
|
||||
|
||||
|
||||
class LockTransaction:
|
||||
class LockTransaction(object):
|
||||
"""Simple nested transaction context manager that uses a file lock.
|
||||
|
||||
Arguments:
|
||||
@@ -734,13 +709,7 @@ class LockTransaction:
|
||||
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
lock: Lock,
|
||||
acquire: Union[ReleaseFnType, ContextManager] = None,
|
||||
release: Union[ReleaseFnType, ContextManager] = None,
|
||||
timeout: Optional[float] = None,
|
||||
) -> None:
|
||||
def __init__(self, lock, acquire=None, release=None, timeout=None):
|
||||
self._lock = lock
|
||||
self._timeout = timeout
|
||||
self._acquire_fn = acquire
|
||||
@@ -755,20 +724,15 @@ def __enter__(self):
|
||||
else:
|
||||
return self._as
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: Optional[Type[BaseException]],
|
||||
exc_value: Optional[BaseException],
|
||||
traceback: Optional[TracebackType],
|
||||
) -> bool:
|
||||
def __exit__(self, type, value, traceback):
|
||||
suppress = False
|
||||
|
||||
def release_fn():
|
||||
if self._release_fn is not None:
|
||||
return self._release_fn(exc_type, exc_value, traceback)
|
||||
return self._release_fn(type, value, traceback)
|
||||
|
||||
if self._as and hasattr(self._as, "__exit__"):
|
||||
if self._as.__exit__(exc_type, exc_value, traceback):
|
||||
if self._as.__exit__(type, value, traceback):
|
||||
suppress = True
|
||||
|
||||
if self._exit(release_fn):
|
||||
@@ -776,12 +740,6 @@ def release_fn():
|
||||
|
||||
return suppress
|
||||
|
||||
def _enter(self) -> bool:
|
||||
return NotImplemented
|
||||
|
||||
def _exit(self, release_fn: ReleaseFnType) -> bool:
|
||||
return NotImplemented
|
||||
|
||||
|
||||
class ReadTransaction(LockTransaction):
|
||||
"""LockTransaction context manager that does a read and releases it."""
|
||||
@@ -812,7 +770,7 @@ class LockDowngradeError(LockError):
|
||||
|
||||
def __init__(self, path):
|
||||
msg = "Cannot downgrade lock from write to read on file: %s" % path
|
||||
super().__init__(msg)
|
||||
super(LockDowngradeError, self).__init__(msg)
|
||||
|
||||
|
||||
class LockLimitError(LockError):
|
||||
@@ -824,10 +782,10 @@ class LockTimeoutError(LockError):
|
||||
|
||||
def __init__(self, lock_type, path, time, attempts):
|
||||
fmt = "Timed out waiting for a {} lock after {}.\n Made {} {} on file: {}"
|
||||
super().__init__(
|
||||
super(LockTimeoutError, self).__init__(
|
||||
fmt.format(
|
||||
lock_type,
|
||||
lang.pretty_seconds(time),
|
||||
pretty_seconds(time),
|
||||
attempts,
|
||||
"attempt" if attempts == 1 else "attempts",
|
||||
path,
|
||||
@@ -840,7 +798,7 @@ class LockUpgradeError(LockError):
|
||||
|
||||
def __init__(self, path):
|
||||
msg = "Cannot upgrade lock from read to write on file: %s" % path
|
||||
super().__init__(msg)
|
||||
super(LockUpgradeError, self).__init__(msg)
|
||||
|
||||
|
||||
class LockPermissionError(LockError):
|
||||
@@ -852,7 +810,7 @@ class LockROFileError(LockPermissionError):
|
||||
|
||||
def __init__(self, path):
|
||||
msg = "Can't take write lock on read-only file: %s" % path
|
||||
super().__init__(msg)
|
||||
super(LockROFileError, self).__init__(msg)
|
||||
|
||||
|
||||
class CantCreateLockError(LockPermissionError):
|
||||
@@ -861,4 +819,4 @@ class CantCreateLockError(LockPermissionError):
|
||||
def __init__(self, path):
|
||||
msg = "cannot create lock '%s': " % path
|
||||
msg += "file does not exist and location is not writable"
|
||||
super().__init__(msg)
|
||||
super(LockError, self).__init__(msg)
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import contextlib
|
||||
import io
|
||||
import os
|
||||
@@ -12,7 +14,6 @@
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from sys import platform as _platform
|
||||
from typing import NoReturn
|
||||
|
||||
if _platform != "win32":
|
||||
import fcntl
|
||||
@@ -245,7 +246,7 @@ def warn(message, *args, **kwargs):
|
||||
info("Warning: " + str(message), *args, **kwargs)
|
||||
|
||||
|
||||
def die(message, *args, **kwargs) -> NoReturn:
|
||||
def die(message, *args, **kwargs):
|
||||
kwargs.setdefault("countback", 4)
|
||||
error(message, *args, **kwargs)
|
||||
sys.exit(1)
|
||||
|
@@ -6,6 +6,8 @@
|
||||
"""
|
||||
Routines for printing columnar output. See ``colify()`` for more information.
|
||||
"""
|
||||
from __future__ import division, unicode_literals
|
||||
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
|
@@ -59,6 +59,8 @@
|
||||
|
||||
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import re
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
@@ -68,7 +70,7 @@ class ColorParseError(Exception):
|
||||
"""Raised when a color format fails to parse."""
|
||||
|
||||
def __init__(self, message):
|
||||
super().__init__(message)
|
||||
super(ColorParseError, self).__init__(message)
|
||||
|
||||
|
||||
# Text styles for ansi codes
|
||||
@@ -203,7 +205,7 @@ def color_when(value):
|
||||
set_color_when(old_value)
|
||||
|
||||
|
||||
class match_to_ansi:
|
||||
class match_to_ansi(object):
|
||||
def __init__(self, color=True, enclose=False):
|
||||
self.color = _color_when_value(color)
|
||||
self.enclose = enclose
|
||||
@@ -319,7 +321,7 @@ def cescape(string):
|
||||
return string
|
||||
|
||||
|
||||
class ColorStream:
|
||||
class ColorStream(object):
|
||||
def __init__(self, stream, color=None):
|
||||
self._stream = stream
|
||||
self._color = color
|
||||
|
@@ -5,6 +5,8 @@
|
||||
|
||||
"""Utility classes for logging the output of blocks of code.
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import atexit
|
||||
import ctypes
|
||||
import errno
|
||||
@@ -65,7 +67,7 @@ def _strip(line):
|
||||
return _escape.sub("", line)
|
||||
|
||||
|
||||
class keyboard_input:
|
||||
class keyboard_input(object):
|
||||
"""Context manager to disable line editing and echoing.
|
||||
|
||||
Use this with ``sys.stdin`` for keyboard input, e.g.::
|
||||
@@ -242,7 +244,7 @@ def __exit__(self, exc_type, exception, traceback):
|
||||
signal.signal(signum, old_handler)
|
||||
|
||||
|
||||
class Unbuffered:
|
||||
class Unbuffered(object):
|
||||
"""Wrapper for Python streams that forces them to be unbuffered.
|
||||
|
||||
This is implemented by forcing a flush after each write.
|
||||
@@ -287,7 +289,7 @@ def _file_descriptors_work(*streams):
|
||||
return False
|
||||
|
||||
|
||||
class FileWrapper:
|
||||
class FileWrapper(object):
|
||||
"""Represents a file. Can be an open stream, a path to a file (not opened
|
||||
yet), or neither. When unwrapped, it returns an open file (or file-like)
|
||||
object.
|
||||
@@ -329,7 +331,7 @@ def close(self):
|
||||
self.file.close()
|
||||
|
||||
|
||||
class MultiProcessFd:
|
||||
class MultiProcessFd(object):
|
||||
"""Return an object which stores a file descriptor and can be passed as an
|
||||
argument to a function run with ``multiprocessing.Process``, such that
|
||||
the file descriptor is available in the subprocess."""
|
||||
@@ -429,7 +431,7 @@ def log_output(*args, **kwargs):
|
||||
return nixlog(*args, **kwargs)
|
||||
|
||||
|
||||
class nixlog:
|
||||
class nixlog(object):
|
||||
"""
|
||||
Under the hood, we spawn a daemon and set up a pipe between this
|
||||
process and the daemon. The daemon writes our output to both the
|
||||
@@ -750,7 +752,7 @@ def close(self):
|
||||
os.close(self.saved_stream)
|
||||
|
||||
|
||||
class winlog:
|
||||
class winlog(object):
|
||||
"""
|
||||
Similar to nixlog, with underlying
|
||||
functionality ported to support Windows.
|
||||
@@ -780,7 +782,7 @@ def __enter__(self):
|
||||
raise RuntimeError("file argument must be set by __init__ ")
|
||||
|
||||
# Open both write and reading on logfile
|
||||
if isinstance(self.logfile, io.StringIO):
|
||||
if type(self.logfile) == io.StringIO:
|
||||
self._ioflag = True
|
||||
# cannot have two streams on tempfile, so we must make our own
|
||||
sys.stdout = self.logfile
|
||||
|
@@ -13,6 +13,8 @@
|
||||
|
||||
Note: The functionality in this module is unsupported on Windows
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import multiprocessing
|
||||
import os
|
||||
import re
|
||||
@@ -34,7 +36,7 @@
|
||||
pass
|
||||
|
||||
|
||||
class ProcessController:
|
||||
class ProcessController(object):
|
||||
"""Wrapper around some fundamental process control operations.
|
||||
|
||||
This allows one process (the controller) to drive another (the
|
||||
@@ -155,7 +157,7 @@ def wait_running(self):
|
||||
self.wait(lambda: "T" not in self.proc_status())
|
||||
|
||||
|
||||
class PseudoShell:
|
||||
class PseudoShell(object):
|
||||
"""Sets up controller and minion processes with a PTY.
|
||||
|
||||
You can create a ``PseudoShell`` if you want to test how some
|
||||
|
@@ -13,7 +13,7 @@
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
class ABI:
|
||||
class ABI(object):
|
||||
"""This class provides methods to test ABI compatibility between specs.
|
||||
The current implementation is rather rough and could be improved."""
|
||||
|
||||
|
@@ -60,7 +60,7 @@ def _search_duplicate_compilers(error_cls):
|
||||
GROUPS = collections.defaultdict(list)
|
||||
|
||||
|
||||
class Error:
|
||||
class Error(object):
|
||||
"""Information on an error reported in a test."""
|
||||
|
||||
def __init__(self, summary, details):
|
||||
@@ -286,7 +286,7 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
"""Ensure stand-alone test method is not included in build-time callbacks"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||
|
||||
# TODO (post-34236): "test*"->"test_*" once remove deprecated methods
|
||||
@@ -312,7 +312,7 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
for condition, patches in pkg_cls.patches.items():
|
||||
for patch in patches:
|
||||
if not isinstance(patch, spack.patch.UrlPatch):
|
||||
@@ -342,7 +342,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
name_definitions = collections.defaultdict(list)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
|
||||
for cls_item in inspect.getmro(pkg_cls):
|
||||
for name in RESERVED_NAMES:
|
||||
@@ -383,7 +383,7 @@ def _ensure_packages_are_pickeleable(pkgs, error_cls):
|
||||
"""Ensure that package objects are pickleable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
pickle.dumps(pkg)
|
||||
@@ -424,7 +424,7 @@ def _ensure_all_versions_can_produce_a_fetcher(pkgs, error_cls):
|
||||
"""Ensure all versions in a package can produce a fetcher"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
pkg = pkg_cls(spack.spec.Spec(pkg_name))
|
||||
try:
|
||||
spack.fetch_strategy.check_pkg_attributes(pkg)
|
||||
@@ -449,7 +449,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
]
|
||||
for pkg_name in pkgs:
|
||||
details = []
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
with open(filename, "r") as package_file:
|
||||
for i, line in enumerate(package_file):
|
||||
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
||||
@@ -461,7 +461,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
||||
error_msg = "Package '{}' contains boilerplate that need to be removed"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), details))
|
||||
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
if not pkg_cls.__doc__:
|
||||
error_msg = "Package '{}' miss a docstring"
|
||||
errors.append(error_cls(error_msg.format(pkg_name), []))
|
||||
@@ -474,7 +474,7 @@ def _ensure_all_packages_use_sha256_checksums(pkgs, error_cls):
|
||||
"""Ensure no packages use md5 checksums"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
if pkg_cls.manual_download:
|
||||
continue
|
||||
|
||||
@@ -511,7 +511,7 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
"""Ensure that methods modifying the build environment are ported to builder classes."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
buildsystem_variant, _ = pkg_cls.variants["build_system"]
|
||||
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
|
||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
|
||||
@@ -538,7 +538,7 @@ def _linting_package_file(pkgs, error_cls):
|
||||
"""Check for correctness of links"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
|
||||
# Does the homepage have http, and if so, does https work?
|
||||
if pkg_cls.homepage.startswith("http://"):
|
||||
@@ -562,7 +562,7 @@ def _unknown_variants_in_directives(pkgs, error_cls):
|
||||
"""Report unknown or wrong variants in directives for this package"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
|
||||
# Check "conflicts" directive
|
||||
for conflict, triggers in pkg_cls.conflicts.items():
|
||||
@@ -628,15 +628,15 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
"""Report unknown dependencies and wrong variants for dependencies"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# No need to analyze virtual packages
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(dependency_name)
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(dependency_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
# This dependency is completely missing, so report
|
||||
# and continue the analysis
|
||||
@@ -675,7 +675,7 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
"""Ensures that variant defaults are present and parsable from cli"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
default_is_parsable = (
|
||||
@@ -709,53 +709,27 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _ensure_variants_have_descriptions(pkgs, error_cls):
|
||||
"""Ensures that all variants have a description."""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
for variant_name, entry in pkg_cls.variants.items():
|
||||
variant, _ = entry
|
||||
if not variant.description:
|
||||
error_msg = "Variant '{}' in package '{}' is missing a description"
|
||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
|
||||
"""Report if version constraints used in directives are not satisfiable"""
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
dependencies_to_check = []
|
||||
for dependency_name, dependency_data in pkg_cls.dependencies.items():
|
||||
# Skip virtual dependencies for the time being, check on
|
||||
# their versions can be added later
|
||||
if spack.repo.PATH.is_virtual(dependency_name):
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
dependencies_to_check.extend([edge.spec for edge in dependency_data.values()])
|
||||
|
||||
host_architecture = spack.spec.ArchSpec.default_arch()
|
||||
for s in dependencies_to_check:
|
||||
dependency_pkg_cls = None
|
||||
try:
|
||||
dependency_pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
|
||||
# Some packages have hacks that might cause failures on some platform
|
||||
# Allow to explicitly set conditions to skip version checks in that case
|
||||
skip_conditions = getattr(dependency_pkg_cls, "skip_version_audit", [])
|
||||
skip_version_check = False
|
||||
for condition in skip_conditions:
|
||||
if host_architecture.satisfies(spack.spec.Spec(condition).architecture):
|
||||
skip_version_check = True
|
||||
break
|
||||
assert skip_version_check or any(
|
||||
v.intersects(s.versions) for v in list(dependency_pkg_cls.versions)
|
||||
)
|
||||
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
assert any(v.intersects(s.versions) for v in list(dependency_pkg_cls.versions))
|
||||
except Exception:
|
||||
summary = (
|
||||
"{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}"
|
||||
@@ -787,7 +761,7 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
except variant_exceptions as e:
|
||||
summary = pkg.name + ': wrong variant in "{0}" directive'
|
||||
summary = summary.format(directive)
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
|
||||
error_msg = str(e).strip()
|
||||
if isinstance(e, KeyError):
|
||||
|
@@ -52,7 +52,6 @@
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.caches import misc_cache_location
|
||||
from spack.package_prefs import get_package_dir_permissions, get_package_group
|
||||
from spack.relocate_text import utf8_paths_to_single_binary_regex
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
@@ -62,22 +61,6 @@
|
||||
_build_cache_keys_relative_path = "_pgp"
|
||||
|
||||
|
||||
class BuildCacheDatabase(spack_db.Database):
|
||||
"""A database for binary buildcaches.
|
||||
|
||||
A database supports writing buildcache index files, in which case certain fields are not
|
||||
needed in each install record, and no locking is required. To use this feature, it provides
|
||||
``lock_cfg=NO_LOCK``, and override the list of ``record_fields``.
|
||||
"""
|
||||
|
||||
record_fields = ("spec", "ref_count", "in_buildcache")
|
||||
|
||||
def __init__(self, root):
|
||||
super().__init__(root, lock_cfg=spack_db.NO_LOCK)
|
||||
self._write_transaction_impl = llnl.util.lang.nullcontext
|
||||
self._read_transaction_impl = llnl.util.lang.nullcontext
|
||||
|
||||
|
||||
class FetchCacheError(Exception):
|
||||
"""Error thrown when fetching the cache failed, usually a composite error list."""
|
||||
|
||||
@@ -97,14 +80,14 @@ def __init__(self, errors):
|
||||
else:
|
||||
err = errors[0]
|
||||
self.message = "{0}: {1}".format(err.__class__.__name__, str(err))
|
||||
super().__init__(self.message)
|
||||
super(FetchCacheError, self).__init__(self.message)
|
||||
|
||||
|
||||
class ListMirrorSpecsError(spack.error.SpackError):
|
||||
"""Raised when unable to retrieve list of specs from the mirror"""
|
||||
|
||||
|
||||
class BinaryCacheIndex:
|
||||
class BinaryCacheIndex(object):
|
||||
"""
|
||||
The BinaryCacheIndex tracks what specs are available on (usually remote)
|
||||
binary caches.
|
||||
@@ -207,7 +190,8 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
try:
|
||||
db = BuildCacheDatabase(tmpdir)
|
||||
db_root_dir = os.path.join(tmpdir, "db_root")
|
||||
db = spack_db.Database(None, db_dir=db_root_dir, enable_transaction_locking=False)
|
||||
|
||||
try:
|
||||
self._index_file_cache.init_entry(cache_key)
|
||||
@@ -333,9 +317,9 @@ def update(self, with_cooldown=False):
|
||||
from each configured mirror and stored locally (both in memory and
|
||||
on disk under ``_index_cache_root``)."""
|
||||
self._init_local_index_cache()
|
||||
configured_mirror_urls = [
|
||||
m.fetch_url for m in spack.mirror.MirrorCollection(binary=True).values()
|
||||
]
|
||||
|
||||
mirrors = spack.mirror.MirrorCollection()
|
||||
configured_mirror_urls = [m.fetch_url for m in mirrors.values()]
|
||||
items_to_remove = []
|
||||
spec_cache_clear_needed = False
|
||||
spec_cache_regenerate_needed = not self._mirrors_for_spec
|
||||
@@ -533,7 +517,9 @@ class NoOverwriteException(spack.error.SpackError):
|
||||
"""Raised when a file would be overwritten"""
|
||||
|
||||
def __init__(self, file_path):
|
||||
super().__init__(f"Refusing to overwrite the following file: {file_path}")
|
||||
super(NoOverwriteException, self).__init__(
|
||||
f"Refusing to overwrite the following file: {file_path}"
|
||||
)
|
||||
|
||||
|
||||
class NoGpgException(spack.error.SpackError):
|
||||
@@ -542,7 +528,7 @@ class NoGpgException(spack.error.SpackError):
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
super(NoGpgException, self).__init__(msg)
|
||||
|
||||
|
||||
class NoKeyException(spack.error.SpackError):
|
||||
@@ -551,7 +537,7 @@ class NoKeyException(spack.error.SpackError):
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
super(NoKeyException, self).__init__(msg)
|
||||
|
||||
|
||||
class PickKeyException(spack.error.SpackError):
|
||||
@@ -562,7 +548,7 @@ class PickKeyException(spack.error.SpackError):
|
||||
def __init__(self, keys):
|
||||
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
||||
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
||||
super().__init__(err_msg)
|
||||
super(PickKeyException, self).__init__(err_msg)
|
||||
|
||||
|
||||
class NoVerifyException(spack.error.SpackError):
|
||||
@@ -579,7 +565,7 @@ class NoChecksumException(spack.error.SpackError):
|
||||
"""
|
||||
|
||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||
super().__init__(
|
||||
super(NoChecksumException, self).__init__(
|
||||
f"{algorithm} checksum failed for {path}",
|
||||
f"Expected {expected} but got {computed}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
@@ -592,7 +578,7 @@ class NewLayoutException(spack.error.SpackError):
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
super(NewLayoutException, self).__init__(msg)
|
||||
|
||||
|
||||
class UnsignedPackageException(spack.error.SpackError):
|
||||
@@ -719,7 +705,7 @@ def get_buildfile_manifest(spec):
|
||||
# look for them to decide if text file needs to be relocated or not
|
||||
prefixes = [d.prefix for d in spec.traverse(root=True, deptype="all") if not d.external]
|
||||
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
||||
prefixes.append(str(spack.store.STORE.layout.root))
|
||||
prefixes.append(str(spack.store.layout.root))
|
||||
|
||||
# Create a giant regex that matches all prefixes
|
||||
regex = utf8_paths_to_single_binary_regex(prefixes)
|
||||
@@ -732,7 +718,7 @@ def get_buildfile_manifest(spec):
|
||||
for rel_path in visitor.symlinks:
|
||||
abs_path = os.path.join(root, rel_path)
|
||||
link = os.readlink(abs_path)
|
||||
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
||||
if os.path.isabs(link) and link.startswith(spack.store.layout.root):
|
||||
data["link_to_relocate"].append(rel_path)
|
||||
|
||||
# Non-symlinks.
|
||||
@@ -774,15 +760,16 @@ def hashes_to_prefixes(spec):
|
||||
}
|
||||
|
||||
|
||||
def get_buildinfo_dict(spec):
|
||||
def get_buildinfo_dict(spec, rel=False):
|
||||
"""Create metadata for a tarball"""
|
||||
manifest = get_buildfile_manifest(spec)
|
||||
|
||||
return {
|
||||
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
||||
"buildpath": spack.store.STORE.layout.root,
|
||||
"relative_rpaths": rel,
|
||||
"buildpath": spack.store.layout.root,
|
||||
"spackprefix": spack.paths.prefix,
|
||||
"relative_prefix": os.path.relpath(spec.prefix, spack.store.STORE.layout.root),
|
||||
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
|
||||
"relocate_textfiles": manifest["text_to_relocate"],
|
||||
"relocate_binaries": manifest["binary_to_relocate"],
|
||||
"relocate_links": manifest["link_to_relocate"],
|
||||
@@ -1075,10 +1062,13 @@ def generate_package_index(cache_prefix, concurrency=32):
|
||||
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
db = BuildCacheDatabase(tmpdir)
|
||||
db.root = None
|
||||
db_root_dir = db.database_directory
|
||||
db_root_dir = os.path.join(tmpdir, "db_root")
|
||||
db = spack_db.Database(
|
||||
None,
|
||||
db_dir=db_root_dir,
|
||||
enable_transaction_locking=False,
|
||||
record_fields=["spec", "ref_count", "in_buildcache"],
|
||||
)
|
||||
|
||||
try:
|
||||
_read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)
|
||||
@@ -1209,17 +1199,9 @@ def tar_add_metadata(tar: tarfile.TarFile, path: str, data: dict):
|
||||
tar.addfile(deterministic_tarinfo(tarinfo), io.BytesIO(bstring))
|
||||
|
||||
|
||||
def deterministic_tarinfo_without_buildinfo(tarinfo: tarfile.TarInfo):
|
||||
"""Skip buildinfo file when creating a tarball, and normalize other tarinfo fields."""
|
||||
if tarinfo.name.endswith("/.spack/binary_distribution"):
|
||||
return None
|
||||
|
||||
return deterministic_tarinfo(tarinfo)
|
||||
|
||||
|
||||
def _do_create_tarball(tarfile_path: str, binaries_dir: str, pkg_dir: str, buildinfo: dict):
|
||||
def _do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo):
|
||||
with gzip_compressed_tarfile(tarfile_path) as tar:
|
||||
tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo_without_buildinfo)
|
||||
tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo)
|
||||
tar_add_metadata(tar, buildinfo_file_name(pkg_dir), buildinfo)
|
||||
|
||||
|
||||
@@ -1227,6 +1209,12 @@ class PushOptions(NamedTuple):
|
||||
#: Overwrite existing tarball/metadata files in buildcache
|
||||
force: bool = False
|
||||
|
||||
#: Whether to use relative RPATHs
|
||||
relative: bool = False
|
||||
|
||||
#: Allow absolute paths to package prefixes when creating a tarball
|
||||
allow_root: bool = False
|
||||
|
||||
#: Regenerated indices after pushing
|
||||
regenerate_index: bool = False
|
||||
|
||||
@@ -1271,7 +1259,7 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
# without concretizing with the current spack packages
|
||||
# and preferences
|
||||
|
||||
spec_file = spack.store.STORE.layout.spec_file_path(spec)
|
||||
spec_file = spack.store.layout.spec_file_path(spec)
|
||||
specfile_name = tarball_name(spec, ".spec.json")
|
||||
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
||||
signed_specfile_path = "{0}.sig".format(specfile_path)
|
||||
@@ -1293,14 +1281,41 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
|
||||
workdir = os.path.join(stage_dir, pkg_dir)
|
||||
|
||||
binaries_dir = spec.prefix
|
||||
# TODO: We generally don't want to mutate any files, but when using relative
|
||||
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
|
||||
# For now, we only make a full copy of the spec prefix when in relative mode.
|
||||
|
||||
if options.relative:
|
||||
# tarfile is used because it preserves hardlink etc best.
|
||||
binaries_dir = workdir
|
||||
temp_tarfile_name = tarball_name(spec, ".tar")
|
||||
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
|
||||
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
|
||||
tar.add(name="%s" % spec.prefix, arcname=".")
|
||||
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
|
||||
tar.extractall(workdir)
|
||||
os.remove(temp_tarfile_path)
|
||||
else:
|
||||
binaries_dir = spec.prefix
|
||||
|
||||
# create info for later relocation and create tar
|
||||
buildinfo = get_buildinfo_dict(spec)
|
||||
buildinfo = get_buildinfo_dict(spec, options.relative)
|
||||
|
||||
# optionally make the paths in the binaries relative to each other
|
||||
# in the spack install tree before creating tarball
|
||||
if options.relative:
|
||||
make_package_relative(workdir, spec, buildinfo, options.allow_root)
|
||||
elif not options.allow_root:
|
||||
ensure_package_relocatable(buildinfo, binaries_dir)
|
||||
|
||||
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
|
||||
|
||||
# remove copy of install directory
|
||||
if options.relative:
|
||||
shutil.rmtree(workdir)
|
||||
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
@@ -1313,7 +1328,16 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
else:
|
||||
raise ValueError("{0} not a valid spec file type".format(spec_file))
|
||||
spec_dict["buildcache_layout_version"] = 1
|
||||
spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum}
|
||||
bchecksum = {}
|
||||
bchecksum["hash_algorithm"] = "sha256"
|
||||
bchecksum["hash"] = checksum
|
||||
spec_dict["binary_cache_checksum"] = bchecksum
|
||||
# Add original install prefix relative to layout root to spec.json.
|
||||
# This will be used to determine is the directory layout has changed.
|
||||
buildinfo = {}
|
||||
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
|
||||
buildinfo["relative_rpaths"] = options.relative
|
||||
spec_dict["buildinfo"] = buildinfo
|
||||
|
||||
with open(specfile_path, "w") as outfile:
|
||||
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
|
||||
@@ -1370,7 +1394,7 @@ def specs_to_be_packaged(
|
||||
packageable = lambda n: not n.external and n.installed
|
||||
|
||||
# Mass install check
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
with spack.store.db.read_transaction():
|
||||
return list(filter(packageable, nodes))
|
||||
|
||||
|
||||
@@ -1472,9 +1496,8 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||
}
|
||||
"""
|
||||
configured_mirrors = spack.mirror.MirrorCollection(binary=True).values()
|
||||
if not configured_mirrors:
|
||||
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
|
||||
if not spack.mirror.MirrorCollection():
|
||||
tty.die("Please add a spack mirror to allow " + "download of pre-compiled packages.")
|
||||
|
||||
tarball = tarball_path_name(spec, ".spack")
|
||||
specfile_prefix = tarball_name(spec, ".spec")
|
||||
@@ -1491,7 +1514,11 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
# we need was in an un-indexed mirror. No need to check any
|
||||
# mirror for the spec twice though.
|
||||
try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else []
|
||||
try_next = [i.fetch_url for i in configured_mirrors if i.fetch_url not in try_first]
|
||||
try_next = [
|
||||
i.fetch_url
|
||||
for i in spack.mirror.MirrorCollection().values()
|
||||
if i.fetch_url not in try_first
|
||||
]
|
||||
|
||||
for url in try_first + try_next:
|
||||
mirrors_to_try.append(
|
||||
@@ -1569,6 +1596,41 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
return None
|
||||
|
||||
|
||||
def make_package_relative(workdir, spec, buildinfo, allow_root):
|
||||
"""
|
||||
Change paths in binaries to relative paths. Change absolute symlinks
|
||||
to relative symlinks.
|
||||
"""
|
||||
prefix = spec.prefix
|
||||
old_layout_root = buildinfo["buildpath"]
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
for filename in buildinfo["relocate_binaries"]:
|
||||
orig_path_names.append(os.path.join(prefix, filename))
|
||||
cur_path_names.append(os.path.join(workdir, filename))
|
||||
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if "macho" in platform.binary_formats:
|
||||
relocate.make_macho_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||
|
||||
if "elf" in platform.binary_formats:
|
||||
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||
|
||||
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
|
||||
orig_path_names = list()
|
||||
cur_path_names = list()
|
||||
for linkname in buildinfo.get("relocate_links", []):
|
||||
orig_path_names.append(os.path.join(prefix, linkname))
|
||||
cur_path_names.append(os.path.join(workdir, linkname))
|
||||
relocate.make_link_relative(cur_path_names, orig_path_names)
|
||||
|
||||
|
||||
def ensure_package_relocatable(buildinfo, binaries_dir):
|
||||
"""Check if package binaries are relocatable."""
|
||||
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
|
||||
relocate.ensure_binaries_are_relocatable(binaries)
|
||||
|
||||
|
||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
"""Updates a buildinfo dict for old archives that did
|
||||
not dedupe hardlinks. De-duping hardlinks is necessary
|
||||
@@ -1607,7 +1669,7 @@ def relocate_package(spec):
|
||||
"""
|
||||
workdir = str(spec.prefix)
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
new_layout_root = str(spack.store.STORE.layout.root)
|
||||
new_layout_root = str(spack.store.layout.root)
|
||||
new_prefix = str(spec.prefix)
|
||||
new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
|
||||
new_spack_prefix = str(spack.paths.prefix)
|
||||
@@ -1792,27 +1854,6 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
return tarfile_path
|
||||
|
||||
|
||||
def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
||||
"""Strip the top-level directory `prefix` from the member names in a tarfile."""
|
||||
# Including trailing /, otherwise we end up with absolute paths.
|
||||
regex = re.compile(re.escape(prefix) + "/*")
|
||||
|
||||
# Remove the top-level directory from the member (link)names.
|
||||
# Note: when a tarfile is created, relative in-prefix symlinks are
|
||||
# expanded to matching member names of tarfile entries. So, we have
|
||||
# to ensure that those are updated too.
|
||||
# Absolute symlinks are copied verbatim -- relocation should take care of
|
||||
# them.
|
||||
for m in tar.getmembers():
|
||||
result = regex.match(m.name)
|
||||
assert result is not None
|
||||
m.name = m.name[result.end() :]
|
||||
if m.linkname:
|
||||
result = regex.match(m.linkname)
|
||||
if result:
|
||||
m.linkname = m.linkname[result.end() :]
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
@@ -1823,14 +1864,6 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
else:
|
||||
raise NoOverwriteException(str(spec.prefix))
|
||||
|
||||
# Create the install prefix
|
||||
fsys.mkdirp(
|
||||
spec.prefix,
|
||||
mode=get_package_dir_permissions(spec),
|
||||
group=get_package_group(spec),
|
||||
default_perms="parents",
|
||||
)
|
||||
|
||||
specfile_path = download_result["specfile_stage"].save_filename
|
||||
|
||||
with open(specfile_path, "r") as inputfile:
|
||||
@@ -1884,58 +1917,58 @@ def extract_tarball(spec, download_result, unsigned=False, force=False):
|
||||
tarfile_path, size, contents, "sha256", expected, local_checksum
|
||||
)
|
||||
|
||||
try:
|
||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||
# Remove install prefix from tarfil to extract directly into spec.prefix
|
||||
_tar_strip_component(tar, prefix=_ensure_common_prefix(tar))
|
||||
tar.extractall(path=spec.prefix)
|
||||
except Exception:
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
_delete_staged_downloads(download_result)
|
||||
raise
|
||||
new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.layout.root))
|
||||
# if the original relative prefix is in the spec file use it
|
||||
buildinfo = spec_dict.get("buildinfo", {})
|
||||
old_relative_prefix = buildinfo.get("relative_prefix", new_relative_prefix)
|
||||
rel = buildinfo.get("relative_rpaths")
|
||||
info = "old relative prefix %s\nnew relative prefix %s\nrelative rpaths %s"
|
||||
tty.debug(info % (old_relative_prefix, new_relative_prefix, rel), level=2)
|
||||
|
||||
# Extract the tarball into the store root, presumably on the same filesystem.
|
||||
# The directory created is the base directory name of the old prefix.
|
||||
# Moving the old prefix name to the new prefix location should preserve
|
||||
# hard links and symbolic links.
|
||||
extract_tmp = os.path.join(spack.store.layout.root, ".tmp")
|
||||
mkdirp(extract_tmp)
|
||||
extracted_dir = os.path.join(extract_tmp, old_relative_prefix.split(os.path.sep)[-1])
|
||||
|
||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||
try:
|
||||
tar.extractall(path=extract_tmp)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
try:
|
||||
shutil.move(extracted_dir, spec.prefix)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
os.remove(tarfile_path)
|
||||
os.remove(specfile_path)
|
||||
|
||||
try:
|
||||
relocate_package(spec)
|
||||
except Exception as e:
|
||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||
shutil.rmtree(spec.prefix)
|
||||
raise e
|
||||
else:
|
||||
manifest_file = os.path.join(
|
||||
spec.prefix,
|
||||
spack.store.STORE.layout.metadata_dir,
|
||||
spack.store.STORE.layout.manifest_file_name,
|
||||
spec.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
|
||||
)
|
||||
if not os.path.exists(manifest_file):
|
||||
spec_id = spec.format("{name}/{hash:7}")
|
||||
tty.warn("No manifest file in tarball for spec %s" % spec_id)
|
||||
finally:
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
shutil.rmtree(tmpdir)
|
||||
if os.path.exists(filename):
|
||||
os.remove(filename)
|
||||
_delete_staged_downloads(download_result)
|
||||
|
||||
|
||||
def _ensure_common_prefix(tar: tarfile.TarFile) -> str:
|
||||
# Get the shortest length directory.
|
||||
common_prefix = min((e.name for e in tar.getmembers() if e.isdir()), key=len, default=None)
|
||||
|
||||
if common_prefix is None:
|
||||
raise ValueError("Tarball does not contain a common prefix")
|
||||
|
||||
# Validate that each file starts with the prefix
|
||||
for member in tar.getmembers():
|
||||
if not member.name.startswith(common_prefix):
|
||||
raise ValueError(
|
||||
f"Tarball contains file {member.name} outside of prefix {common_prefix}"
|
||||
)
|
||||
|
||||
return common_prefix
|
||||
|
||||
|
||||
def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
"""Install the root node of a concrete spec from a buildcache.
|
||||
|
||||
@@ -1982,7 +2015,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, unsigned, force)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||
spack.store.db.add(spec, spack.store.layout)
|
||||
|
||||
|
||||
def install_single_spec(spec, unsigned=False, force=False):
|
||||
@@ -2007,9 +2040,7 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
specfile_is_signed = False
|
||||
found_specs = []
|
||||
|
||||
binary_mirrors = spack.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
|
||||
|
||||
for mirror in binary_mirrors:
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
||||
buildcache_fetch_url_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, specfile_name
|
||||
)
|
||||
@@ -2072,7 +2103,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
if spec is None:
|
||||
return []
|
||||
|
||||
if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
|
||||
if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check):
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
return {}
|
||||
|
||||
@@ -2111,7 +2142,7 @@ def clear_spec_cache():
|
||||
|
||||
def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
"""Get pgp public keys available on mirror with suffix .pub"""
|
||||
mirror_collection = mirrors or spack.mirror.MirrorCollection(binary=True)
|
||||
mirror_collection = mirrors or spack.mirror.MirrorCollection()
|
||||
|
||||
if not mirror_collection:
|
||||
tty.die("Please add a spack mirror to allow " + "download of build caches.")
|
||||
@@ -2272,7 +2303,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
||||
|
||||
"""
|
||||
rebuilds = {}
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors, binary=True).values():
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors).values():
|
||||
tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
|
||||
|
||||
rebuild_list = []
|
||||
@@ -2316,7 +2347,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
|
||||
|
||||
|
||||
def download_buildcache_entry(file_descriptions, mirror_url=None):
|
||||
if not mirror_url and not spack.mirror.MirrorCollection(binary=True):
|
||||
if not mirror_url and not spack.mirror.MirrorCollection():
|
||||
tty.die(
|
||||
"Please provide or add a spack mirror to allow " + "download of buildcache entries."
|
||||
)
|
||||
@@ -2325,7 +2356,7 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
|
||||
mirror_root = os.path.join(mirror_url, _build_cache_relative_path)
|
||||
return _download_buildcache_entry(mirror_root, file_descriptions)
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection(binary=True).values():
|
||||
for mirror in spack.mirror.MirrorCollection().values():
|
||||
mirror_root = os.path.join(mirror.fetch_url, _build_cache_relative_path)
|
||||
|
||||
if _download_buildcache_entry(mirror_root, file_descriptions):
|
||||
@@ -2364,7 +2395,7 @@ def download_single_spec(concrete_spec, destination, mirror_url=None):
|
||||
return download_buildcache_entry(files_to_fetch, mirror_url)
|
||||
|
||||
|
||||
class BinaryCacheQuery:
|
||||
class BinaryCacheQuery(object):
|
||||
"""Callable object to query if a spec is in a binary cache"""
|
||||
|
||||
def __init__(self, all_architectures):
|
||||
@@ -2383,12 +2414,22 @@ def __init__(self, all_architectures):
|
||||
|
||||
self.possible_specs = specs
|
||||
|
||||
def __call__(self, spec: Spec, **kwargs):
|
||||
def __call__(self, spec, **kwargs):
|
||||
"""
|
||||
Args:
|
||||
spec: The spec being searched for
|
||||
spec (str): The spec being searched for in its string representation or hash.
|
||||
"""
|
||||
return [s for s in self.possible_specs if s.satisfies(spec)]
|
||||
matches = []
|
||||
if spec.startswith("/"):
|
||||
# Matching a DAG hash
|
||||
query_hash = spec.replace("/", "")
|
||||
for candidate_spec in self.possible_specs:
|
||||
if candidate_spec.dag_hash().startswith(query_hash):
|
||||
matches.append(candidate_spec)
|
||||
else:
|
||||
# Matching a spec constraint
|
||||
matches = [s for s in self.possible_specs if s.satisfies(spec)]
|
||||
return matches
|
||||
|
||||
|
||||
class FetchIndexError(Exception):
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Function and classes needed to bootstrap Spack itself."""
|
||||
|
||||
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
|
||||
from .config import ensure_bootstrap_configuration, is_bootstrapping
|
||||
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||
from .status import status_message
|
||||
@@ -18,5 +18,4 @@
|
||||
"ensure_environment_dependencies",
|
||||
"BootstrapEnvironment",
|
||||
"status_message",
|
||||
"store_path",
|
||||
]
|
||||
|
@@ -50,7 +50,7 @@ def _try_import_from_store(
|
||||
# We have to run as part of this python interpreter
|
||||
query_spec += " ^" + spec_for_current_python()
|
||||
|
||||
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
||||
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
pkg = candidate_spec["python"].package
|
||||
@@ -183,7 +183,7 @@ def _executables_in_store(
|
||||
executables_str = ", ".join(executables)
|
||||
msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'"
|
||||
tty.debug(msg.format(executables_str, query_spec))
|
||||
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
||||
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||
if installed_specs:
|
||||
for concrete_spec in installed_specs:
|
||||
bin_dir = concrete_spec.prefix.bin
|
||||
|
@@ -124,9 +124,9 @@ def _read_and_sanitize_configuration() -> Dict[str, Any]:
|
||||
def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||
config_scopes: MutableSequence["spack.config.ConfigScope"] = [
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.CONFIG_DEFAULTS)
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)
|
||||
]
|
||||
configuration_paths = (spack.config.CONFIGURATION_DEFAULTS_PATH, ("bootstrap", _config_path()))
|
||||
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
@@ -150,19 +150,18 @@ def _add_compilers_if_missing() -> None:
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _ensure_bootstrap_configuration() -> Generator:
|
||||
spack.store.ensure_singleton_created()
|
||||
bootstrap_store_path = store_path()
|
||||
user_configuration = _read_and_sanitize_configuration()
|
||||
with spack.environment.no_active_environment():
|
||||
with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
|
||||
spack.platforms.real_host()
|
||||
), spack.repo.use_repositories(spack.paths.packages_path):
|
||||
), spack.repo.use_repositories(spack.paths.packages_path), spack.store.use_store(
|
||||
bootstrap_store_path
|
||||
):
|
||||
# Default configuration scopes excluding command line
|
||||
# and builtin but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes), spack.store.use_store(
|
||||
bootstrap_store_path, extra_data={"padded_length": 0}
|
||||
):
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
# We may need to compile code from sources, so ensure we
|
||||
# have compilers for the current platform
|
||||
_add_compilers_if_missing()
|
||||
|
@@ -476,22 +476,15 @@ def ensure_executables_in_path_or_raise(
|
||||
def _add_externals_if_missing() -> None:
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.PATH.get_pkg_class("cmake"),
|
||||
spack.repo.PATH.get_pkg_class("bison"),
|
||||
spack.repo.path.get_pkg_class("cmake"),
|
||||
spack.repo.path.get_pkg_class("bison"),
|
||||
# GnuPG
|
||||
spack.repo.PATH.get_pkg_class("gawk"),
|
||||
# develop deps
|
||||
spack.repo.PATH.get_pkg_class("git"),
|
||||
spack.repo.path.get_pkg_class("gawk"),
|
||||
]
|
||||
if IS_WINDOWS:
|
||||
search_list.append(spack.repo.PATH.get_pkg_class("winbison"))
|
||||
externals = spack.detection.by_executable(search_list)
|
||||
# System git is typically deprecated, so mark as non-buildable to force it as external
|
||||
non_buildable_externals = {k: externals.pop(k) for k in ("git",) if k in externals}
|
||||
spack.detection.update_configuration(externals, scope="bootstrap", buildable=True)
|
||||
spack.detection.update_configuration(
|
||||
non_buildable_externals, scope="bootstrap", buildable=False
|
||||
)
|
||||
search_list.append(spack.repo.path.get_pkg_class("winbison"))
|
||||
detected_packages = spack.detection.by_executable(search_list)
|
||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||
|
||||
|
||||
def clingo_root_spec() -> str:
|
||||
|
@@ -23,7 +23,6 @@
|
||||
|
||||
from ._common import _root_spec
|
||||
from .config import root_path, spec_for_current_python, store_path
|
||||
from .core import _add_externals_if_missing
|
||||
|
||||
|
||||
class BootstrapEnvironment(spack.environment.Environment):
|
||||
@@ -176,17 +175,16 @@ def black_root_spec() -> str:
|
||||
|
||||
def flake8_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-flake8@3.8.2:")
|
||||
return _root_spec("py-flake8")
|
||||
|
||||
|
||||
def pytest_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-pytest@6.2.4:")
|
||||
return _root_spec("py-pytest")
|
||||
|
||||
|
||||
def ensure_environment_dependencies() -> None:
|
||||
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
||||
_add_externals_if_missing()
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_installations()
|
||||
env.update_syspath_and_environ()
|
||||
|
@@ -148,7 +148,7 @@ class MakeExecutable(Executable):
|
||||
|
||||
def __init__(self, name, jobs, **kwargs):
|
||||
supports_jobserver = kwargs.pop("supports_jobserver", True)
|
||||
super().__init__(name, **kwargs)
|
||||
super(MakeExecutable, self).__init__(name, **kwargs)
|
||||
self.supports_jobserver = supports_jobserver
|
||||
self.jobs = jobs
|
||||
|
||||
@@ -175,7 +175,7 @@ def __call__(self, *args, **kwargs):
|
||||
if jobs_env_jobs is not None:
|
||||
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)}
|
||||
|
||||
return super().__call__(*args, **kwargs)
|
||||
return super(MakeExecutable, self).__call__(*args, **kwargs)
|
||||
|
||||
|
||||
def _on_cray():
|
||||
@@ -1256,8 +1256,9 @@ def make_stack(tb, stack=None):
|
||||
func = getattr(obj, tb.tb_frame.f_code.co_name, "")
|
||||
if func:
|
||||
typename, *_ = func.__qualname__.partition(".")
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
|
||||
if isinstance(obj, CONTEXT_BASES) and typename not in basenames:
|
||||
break
|
||||
else:
|
||||
return None
|
||||
|
||||
@@ -1331,7 +1332,7 @@ class ChildError(InstallError):
|
||||
build_errors = [("spack.util.executable", "ProcessError")]
|
||||
|
||||
def __init__(self, msg, module, classname, traceback_string, log_name, log_type, context):
|
||||
super().__init__(msg)
|
||||
super(ChildError, self).__init__(msg)
|
||||
self.module = module
|
||||
self.name = classname
|
||||
self.traceback = traceback_string
|
||||
|
@@ -39,7 +39,7 @@ def check_paths(path_list, filetype, predicate):
|
||||
check_paths(pkg.sanity_check_is_file, "file", os.path.isfile)
|
||||
check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir)
|
||||
|
||||
ignore_file = llnl.util.lang.match_predicate(spack.store.STORE.layout.hidden_file_regexes)
|
||||
ignore_file = llnl.util.lang.match_predicate(spack.store.layout.hidden_file_regexes)
|
||||
if all(map(ignore_file, os.listdir(pkg.prefix))):
|
||||
msg = "Install failed for {0}. Nothing was installed!"
|
||||
raise spack.installer.InstallError(msg.format(pkg.name))
|
||||
|
@@ -55,8 +55,7 @@ def flags_to_build_system_args(self, flags):
|
||||
setattr(self, "configure_flag_args", [])
|
||||
for flag, values in flags.items():
|
||||
if values:
|
||||
var_name = "LIBS" if flag == "ldlibs" else flag.upper()
|
||||
values_str = "{0}={1}".format(var_name, " ".join(values))
|
||||
values_str = "{0}={1}".format(flag.upper(), " ".join(values))
|
||||
self.configure_flag_args.append(values_str)
|
||||
# Spack's fflags are meant for both F77 and FC, therefore we
|
||||
# additionaly set FCFLAGS if required.
|
||||
|
@@ -162,6 +162,17 @@ def initconfig_compiler_entries(self):
|
||||
libs_string = libs_format_string.format(lang)
|
||||
entries.append(cmake_cache_string(libs_string, libs_flags))
|
||||
|
||||
# Set the generator in the cached config
|
||||
if self.spec.satisfies("generator=make"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Unix Makefiles"))
|
||||
if self.spec.satisfies("generator=ninja"):
|
||||
entries.append(cmake_cache_string("CMAKE_GENERATOR", "Ninja"))
|
||||
entries.append(
|
||||
cmake_cache_string(
|
||||
"CMAKE_MAKE_PROGRAM", "{0}/ninja".format(spec["ninja"].prefix.bin)
|
||||
)
|
||||
)
|
||||
|
||||
return entries
|
||||
|
||||
def initconfig_mpi_entries(self):
|
||||
@@ -241,7 +252,7 @@ def initconfig_hardware_entries(self):
|
||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||
|
||||
archs = spec.variants["cuda_arch"].value
|
||||
if archs[0] != "none":
|
||||
if archs != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
|
||||
@@ -258,7 +269,7 @@ def initconfig_hardware_entries(self):
|
||||
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
|
||||
)
|
||||
archs = self.spec.variants["amdgpu_target"].value
|
||||
if archs[0] != "none":
|
||||
if archs != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
|
||||
@@ -278,7 +289,6 @@ def std_initconfig_entries(self):
|
||||
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
cmake_cache_path("CMAKE_PREFIX_PATH", cmake_prefix_path),
|
||||
self.define_cmake_cache_from_variant("CMAKE_BUILD_TYPE", "build_type"),
|
||||
]
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
@@ -301,7 +311,7 @@ def initconfig(self, pkg, spec, prefix):
|
||||
|
||||
@property
|
||||
def std_cmake_args(self):
|
||||
args = super().std_cmake_args
|
||||
args = super(CachedCMakeBuilder, self).std_cmake_args
|
||||
args.extend(["-C", self.cache_path])
|
||||
return args
|
||||
|
||||
|
@@ -5,7 +5,6 @@
|
||||
import collections.abc
|
||||
import inspect
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
@@ -16,6 +15,7 @@
|
||||
import spack.build_environment
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
import spack.util.path
|
||||
from spack.directives import build_system, conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
|
||||
@@ -248,8 +248,7 @@ def std_cmake_args(self):
|
||||
@staticmethod
|
||||
def std_args(pkg, generator=None):
|
||||
"""Computes the standard cmake arguments for a generic package"""
|
||||
default_generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||
generator = generator or default_generator
|
||||
generator = generator or "Unix Makefiles"
|
||||
valid_primary_generators = ["Unix Makefiles", "Ninja"]
|
||||
primary_generator = _extract_primary_generator(generator)
|
||||
if primary_generator not in valid_primary_generators:
|
||||
@@ -272,7 +271,7 @@ def std_args(pkg, generator=None):
|
||||
args = [
|
||||
"-G",
|
||||
generator,
|
||||
define("CMAKE_INSTALL_PREFIX", pathlib.Path(pkg.prefix).as_posix()),
|
||||
define("CMAKE_INSTALL_PREFIX", pkg.prefix),
|
||||
define("CMAKE_BUILD_TYPE", build_type),
|
||||
define("BUILD_TESTING", pkg.run_tests),
|
||||
]
|
||||
@@ -297,46 +296,8 @@ def std_args(pkg, generator=None):
|
||||
define("CMAKE_PREFIX_PATH", spack.build_environment.get_cmake_prefix_path(pkg)),
|
||||
]
|
||||
)
|
||||
|
||||
return args
|
||||
|
||||
@staticmethod
|
||||
def define_cuda_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_CUDA_ARCHITECTURES:STRING=(expanded cuda_arch)``.
|
||||
|
||||
``cuda_arch`` is variant composed of a list of target CUDA architectures and
|
||||
it is declared in the cuda package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``cuda_arch`` variant is not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "cuda_arch" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.18:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_CUDA_ARCHITECTURES", pkg.spec.variants["cuda_arch"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
|
||||
@staticmethod
|
||||
def define_hip_architectures(pkg):
|
||||
"""Returns the str ``-DCMAKE_HIP_ARCHITECTURES:STRING=(expanded amdgpu_target)``.
|
||||
|
||||
``amdgpu_target`` is variant composed of a list of the target HIP
|
||||
architectures and it is declared in the rocm package.
|
||||
|
||||
This method is no-op for cmake<3.18 and when ``amdgpu_target`` variant is
|
||||
not set.
|
||||
|
||||
"""
|
||||
cmake_flag = str()
|
||||
if "amdgpu_target" in pkg.spec.variants and pkg.spec.satisfies("^cmake@3.21:"):
|
||||
cmake_flag = CMakeBuilder.define(
|
||||
"CMAKE_HIP_ARCHITECTURES", pkg.spec.variants["amdgpu_target"].value
|
||||
)
|
||||
|
||||
return cmake_flag
|
||||
|
||||
@staticmethod
|
||||
def define(cmake_var, value):
|
||||
"""Return a CMake command line argument that defines a variable.
|
||||
|
@@ -102,10 +102,11 @@ def cuda_flags(arch_list):
|
||||
|
||||
depends_on("cuda@11.0:", when="cuda_arch=80")
|
||||
depends_on("cuda@11.1:", when="cuda_arch=86")
|
||||
depends_on("cuda@11.4:", when="cuda_arch=87")
|
||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||
depends_on("cuda@11.4:", when="cuda_arch=87")
|
||||
|
||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||
depends_on("cuda@11.8:", when="cuda_arch=90")
|
||||
|
||||
# From the NVIDIA install guide we know of conflicts for particular
|
||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||
|
@@ -209,5 +209,5 @@ def install(self, pkg, spec, prefix):
|
||||
def check(self):
|
||||
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
self.pkg._if_ninja_target_execute("test")
|
||||
self.pkg._if_ninja_target_execute("check")
|
||||
self._if_ninja_target_execute("test")
|
||||
self._if_ninja_target_execute("check")
|
||||
|
@@ -121,7 +121,7 @@ def setup_run_environment(self, env):
|
||||
$ source {prefix}/{component}/{version}/env/vars.sh
|
||||
"""
|
||||
# Only if environment modifications are desired (default is +envmods)
|
||||
if "~envmods" not in self.spec:
|
||||
if "+envmods" in self.spec:
|
||||
env.extend(
|
||||
EnvironmentModifications.from_sourcing_file(
|
||||
join_path(self.component_prefix, "env", "vars.sh")
|
||||
@@ -175,7 +175,7 @@ def libs(self):
|
||||
return find_libraries("*", root=lib_path, shared=True, recursive=True)
|
||||
|
||||
|
||||
class IntelOneApiStaticLibraryList:
|
||||
class IntelOneApiStaticLibraryList(object):
|
||||
"""Provides ld_flags when static linking is needed
|
||||
|
||||
Oneapi puts static and dynamic libraries in the same directory, so
|
||||
|
@@ -23,14 +23,13 @@
|
||||
import spack.store
|
||||
from spack.directives import build_system, depends_on, extends, maintainers
|
||||
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
||||
from spack.install_test import test_part
|
||||
from spack.version import Version
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
|
||||
class PythonExtension(spack.package_base.PackageBase):
|
||||
maintainers("adamjstewart")
|
||||
maintainers("adamjstewart", "pradyunsg")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -168,65 +167,18 @@ def remove_files_from_view(self, view, merge_map):
|
||||
|
||||
view.remove_files(to_remove)
|
||||
|
||||
def test_imports(self):
|
||||
def test(self):
|
||||
"""Attempts to import modules of the installed package."""
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = inspect.getmodule(self).python
|
||||
for module in self.import_modules:
|
||||
with test_part(
|
||||
self,
|
||||
f"test_imports_{module}",
|
||||
purpose=f"checking import of {module}",
|
||||
self.run_test(
|
||||
inspect.getmodule(self).python.path,
|
||||
["-c", "import {0}".format(module)],
|
||||
purpose="checking import of {0}".format(module),
|
||||
work_dir="spack-test",
|
||||
):
|
||||
python("-c", f"import {module}")
|
||||
|
||||
def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
|
||||
If another package in the DAG depends on python, we use that
|
||||
python for the dependency of the external. If not, we assume
|
||||
that the external PythonPackage is installed into the same
|
||||
directory as the python it depends on.
|
||||
"""
|
||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||
if "python" not in self.spec:
|
||||
if extendee_spec:
|
||||
python = extendee_spec
|
||||
elif "python" in self.spec.root:
|
||||
python = self.spec.root["python"]
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.PATH.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.target("default_target")
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = "default_os"
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
# Ensure compiler information is present
|
||||
if not python.compiler:
|
||||
python.compiler = self.spec.compiler
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"), virtuals=())
|
||||
)
|
||||
|
||||
|
||||
class PythonPackage(PythonExtension):
|
||||
@@ -273,6 +225,51 @@ def list_url(cls):
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
|
||||
def update_external_dependencies(self, extendee_spec=None):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
|
||||
If another package in the DAG depends on python, we use that
|
||||
python for the dependency of the external. If not, we assume
|
||||
that the external PythonPackage is installed into the same
|
||||
directory as the python it depends on.
|
||||
"""
|
||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||
if "python" not in self.spec:
|
||||
if extendee_spec:
|
||||
python = extendee_spec
|
||||
elif "python" in self.spec.root:
|
||||
python = self.spec.root["python"]
|
||||
else:
|
||||
python = self.get_external_python_for_prefix()
|
||||
if not python.concrete:
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
|
||||
# Ensure architecture information is present
|
||||
if not python.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
host_os = host_platform.operating_system("default_os")
|
||||
host_target = host_platform.target("default_target")
|
||||
python.architecture = spack.spec.ArchSpec(
|
||||
(str(host_platform), str(host_os), str(host_target))
|
||||
)
|
||||
else:
|
||||
if not python.architecture.platform:
|
||||
python.architecture.platform = spack.platforms.host()
|
||||
if not python.architecture.os:
|
||||
python.architecture.os = "default_os"
|
||||
if not python.architecture.target:
|
||||
python.architecture.target = archspec.cpu.host().family.name
|
||||
|
||||
# Ensure compiler information is present
|
||||
if not python.compiler:
|
||||
python.compiler = self.spec.compiler
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"))
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
For an external package that extends python, find the most likely spec for the python
|
||||
@@ -286,7 +283,7 @@ def get_external_python_for_prefix(self):
|
||||
spack.spec.Spec: The external Spec for python most likely to be compatible with self.spec
|
||||
"""
|
||||
python_externals_installed = [
|
||||
s for s in spack.store.STORE.db.query("python") if s.prefix == self.spec.external_path
|
||||
s for s in spack.store.db.query("python") if s.prefix == self.spec.external_path
|
||||
]
|
||||
if python_externals_installed:
|
||||
return python_externals_installed[0]
|
||||
@@ -301,7 +298,7 @@ def get_external_python_for_prefix(self):
|
||||
return python_externals_configured[0]
|
||||
|
||||
python_externals_detection = spack.detection.by_executable(
|
||||
[spack.repo.PATH.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
[spack.repo.path.get_pkg_class("python")], path_hints=[self.spec.external_path]
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
@@ -401,8 +398,7 @@ def build_directory(self):
|
||||
|
||||
def config_settings(self, spec, prefix):
|
||||
"""Configuration settings to be passed to the PEP 517 build backend.
|
||||
|
||||
Requires pip 22.1 or newer.
|
||||
Requires pip 22.1+, which requires Python 3.7+.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): build spec
|
||||
@@ -416,8 +412,6 @@ def config_settings(self, spec, prefix):
|
||||
def install_options(self, spec, prefix):
|
||||
"""Extra arguments to be supplied to the setup.py install command.
|
||||
|
||||
Requires pip 23.0 or older.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): build spec
|
||||
prefix (spack.util.prefix.Prefix): installation prefix
|
||||
@@ -431,8 +425,6 @@ def global_options(self, spec, prefix):
|
||||
"""Extra global options to be supplied to the setup.py call before the install
|
||||
or bdist_wheel command.
|
||||
|
||||
Deprecated in pip 23.1.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): build spec
|
||||
prefix (spack.util.prefix.Prefix): installation prefix
|
||||
|
@@ -28,7 +28,7 @@ class QMakePackage(spack.package_base.PackageBase):
|
||||
|
||||
build_system("qmake")
|
||||
|
||||
depends_on("qmake", type="build", when="build_system=qmake")
|
||||
depends_on("qt", type="build", when="build_system=qmake")
|
||||
|
||||
|
||||
@spack.builder.builder("qmake")
|
||||
|
@@ -140,6 +140,8 @@ class ROCmPackage(PackageBase):
|
||||
depends_on("hsa-rocr-dev", when="+rocm")
|
||||
depends_on("hip +rocm", when="+rocm")
|
||||
|
||||
conflicts("^blt@:0.3.6", when="+rocm")
|
||||
|
||||
# need amd gpu type for rocm builds
|
||||
conflicts("amdgpu_target=none", when="+rocm")
|
||||
|
||||
|
@@ -7,14 +7,13 @@
|
||||
import re
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import find, working_dir
|
||||
from llnl.util.filesystem import find, join_path, working_dir
|
||||
|
||||
import spack.builder
|
||||
import spack.install_test
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from ._checks import BaseBuilder, execute_install_time_tests
|
||||
|
||||
@@ -40,8 +39,9 @@ class SIPPackage(spack.package_base.PackageBase):
|
||||
build_system("sip")
|
||||
|
||||
with when("build_system=sip"):
|
||||
extends("python", type=("build", "link", "run"))
|
||||
depends_on("py-sip", type="build")
|
||||
extends("python")
|
||||
depends_on("qt")
|
||||
depends_on("py-sip")
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -113,13 +113,13 @@ class SIPBuilder(BaseBuilder):
|
||||
* install
|
||||
|
||||
The configure phase already adds a set of default flags. To see more
|
||||
options, run ``sip-build --help``.
|
||||
options, run ``python configure.py --help``.
|
||||
"""
|
||||
|
||||
phases = ("configure", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("configure_args", "build_args", "install_args")
|
||||
legacy_methods = ("configure_file", "configure_args", "build_args", "install_args")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = (
|
||||
@@ -130,17 +130,34 @@ class SIPBuilder(BaseBuilder):
|
||||
"build_directory",
|
||||
)
|
||||
|
||||
build_directory = "build"
|
||||
def configure_file(self):
|
||||
"""Returns the name of the configure file to use."""
|
||||
return "configure.py"
|
||||
|
||||
def configure(self, pkg, spec, prefix):
|
||||
"""Configure the package."""
|
||||
configure = self.configure_file()
|
||||
|
||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
|
||||
args.extend(self.configure_args())
|
||||
args = self.configure_args()
|
||||
|
||||
sip_build = Executable(spec["py-sip"].prefix.bin.join("sip-build"))
|
||||
sip_build(*args)
|
||||
args.extend(
|
||||
[
|
||||
"--verbose",
|
||||
"--confirm-license",
|
||||
"--qmake",
|
||||
spec["qt"].prefix.bin.qmake,
|
||||
"--sip",
|
||||
spec["py-sip"].prefix.bin.sip,
|
||||
"--sip-incdir",
|
||||
join_path(spec["py-sip"].prefix, spec["python"].package.include),
|
||||
"--bindir",
|
||||
prefix.bin,
|
||||
"--destdir",
|
||||
inspect.getmodule(self.pkg).python_platlib,
|
||||
]
|
||||
)
|
||||
|
||||
self.pkg.python(configure, *args)
|
||||
|
||||
def configure_args(self):
|
||||
"""Arguments to pass to configure."""
|
||||
@@ -150,8 +167,7 @@ def build(self, pkg, spec, prefix):
|
||||
"""Build the package."""
|
||||
args = self.build_args()
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make(*args)
|
||||
inspect.getmodule(self.pkg).make(*args)
|
||||
|
||||
def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
@@ -161,11 +177,21 @@ def install(self, pkg, spec, prefix):
|
||||
"""Install the package."""
|
||||
args = self.install_args()
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make("install", *args)
|
||||
inspect.getmodule(self.pkg).make("install", parallel=False, *args)
|
||||
|
||||
def install_args(self):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
||||
@spack.builder.run_after("install")
|
||||
def extend_path_setup(self):
|
||||
# See github issue #14121 and PR #15297
|
||||
module = self.pkg.spec["py-sip"].variants["module"].value
|
||||
if module != "sip":
|
||||
module = module.split(".")[0]
|
||||
with working_dir(inspect.getmodule(self.pkg).python_platlib):
|
||||
with open(os.path.join(module, "__init__.py"), "a") as f:
|
||||
f.write("from pkgutil import extend_path\n")
|
||||
f.write("__path__ = extend_path(__path__, __name__)\n")
|
||||
|
@@ -63,7 +63,7 @@ def create(pkg):
|
||||
return _BUILDERS[id(pkg)]
|
||||
|
||||
|
||||
class _PhaseAdapter:
|
||||
class _PhaseAdapter(object):
|
||||
def __init__(self, builder, phase_fn):
|
||||
self.builder = builder
|
||||
self.phase_fn = phase_fn
|
||||
@@ -115,7 +115,7 @@ class hierarchy (look at AspellDictPackage for an example of that)
|
||||
# package. The semantic should be the same as the method in the base builder were still
|
||||
# present in the base class of the package.
|
||||
|
||||
class _ForwardToBaseBuilder:
|
||||
class _ForwardToBaseBuilder(object):
|
||||
def __init__(self, wrapped_pkg_object, root_builder):
|
||||
self.wrapped_package_object = wrapped_pkg_object
|
||||
self.root_builder = root_builder
|
||||
@@ -188,7 +188,7 @@ def __init__(self, pkg):
|
||||
# Attribute containing the package wrapped in dispatcher with a `__getattr__`
|
||||
# method that will forward certain calls to the default builder.
|
||||
self.pkg_with_dispatcher = _ForwardToBaseBuilder(pkg, root_builder=self)
|
||||
super().__init__(pkg)
|
||||
super(Adapter, self).__init__(pkg)
|
||||
|
||||
# These two methods don't follow the (self, spec, prefix) signature of phases nor
|
||||
# the (self) signature of methods, so they are added explicitly to avoid using a
|
||||
@@ -388,7 +388,7 @@ def __new__(mcs, name, bases, attr_dict):
|
||||
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||
|
||||
|
||||
class InstallationPhase:
|
||||
class InstallationPhase(object):
|
||||
"""Manages a single phase of the installation.
|
||||
|
||||
This descriptor stores at creation time the name of the method it should
|
||||
@@ -530,9 +530,9 @@ def setup_build_environment(self, env):
|
||||
modifications to be applied when the package is built. Package authors
|
||||
can call methods on it to alter the build environment.
|
||||
"""
|
||||
if not hasattr(super(), "setup_build_environment"):
|
||||
if not hasattr(super(Builder, self), "setup_build_environment"):
|
||||
return
|
||||
super().setup_build_environment(env)
|
||||
super(Builder, self).setup_build_environment(env)
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
"""Sets up the build environment of packages that depend on this one.
|
||||
@@ -563,9 +563,9 @@ def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
the dependent's state. Note that *this* package's spec is
|
||||
available as ``self.spec``
|
||||
"""
|
||||
if not hasattr(super(), "setup_dependent_build_environment"):
|
||||
if not hasattr(super(Builder, self), "setup_dependent_build_environment"):
|
||||
return
|
||||
super().setup_dependent_build_environment(env, dependent_spec)
|
||||
super(Builder, self).setup_dependent_build_environment(env, dependent_spec)
|
||||
|
||||
def __getitem__(self, idx):
|
||||
key = self.phases[idx]
|
||||
|
@@ -20,9 +20,9 @@
|
||||
|
||||
|
||||
def misc_cache_location():
|
||||
"""The ``MISC_CACHE`` is Spack's cache for small data.
|
||||
"""The ``misc_cache`` is Spack's cache for small data.
|
||||
|
||||
Currently the ``MISC_CACHE`` stores indexes for virtual dependency
|
||||
Currently the ``misc_cache`` stores indexes for virtual dependency
|
||||
providers and for which packages provide which tags.
|
||||
"""
|
||||
path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
|
||||
@@ -35,7 +35,7 @@ def _misc_cache():
|
||||
|
||||
|
||||
#: Spack's cache for small data
|
||||
MISC_CACHE: Union[
|
||||
misc_cache: Union[
|
||||
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_misc_cache)
|
||||
|
||||
@@ -58,7 +58,7 @@ def _fetch_cache():
|
||||
return spack.fetch_strategy.FsCache(path)
|
||||
|
||||
|
||||
class MirrorCache:
|
||||
class MirrorCache(object):
|
||||
def __init__(self, root, skip_unstable_versions):
|
||||
self.root = os.path.abspath(root)
|
||||
self.skip_unstable_versions = skip_unstable_versions
|
||||
@@ -91,6 +91,6 @@ def symlink(self, mirror_ref):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
FETCH_CACHE: Union[
|
||||
fetch_cache: Union[
|
||||
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_fetch_cache)
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
@@ -147,7 +149,7 @@ def get_command(cmd_name):
|
||||
return getattr(get_module(cmd_name), pname)
|
||||
|
||||
|
||||
class _UnquotedFlags:
|
||||
class _UnquotedFlags(object):
|
||||
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
|
||||
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
|
||||
|
||||
@@ -273,9 +275,9 @@ def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True, fir
|
||||
See ``spack.database.Database._query`` for details.
|
||||
"""
|
||||
if local:
|
||||
matching_specs = spack.store.STORE.db.query_local(spec, hashes=hashes, installed=installed)
|
||||
matching_specs = spack.store.db.query_local(spec, hashes=hashes, installed=installed)
|
||||
else:
|
||||
matching_specs = spack.store.STORE.db.query(spec, hashes=hashes, installed=installed)
|
||||
matching_specs = spack.store.db.query(spec, hashes=hashes, installed=installed)
|
||||
if not matching_specs:
|
||||
tty.die("Spec '%s' matches no installed packages." % spec)
|
||||
|
||||
@@ -291,7 +293,7 @@ def ensure_single_spec_or_die(spec, matching_specs):
|
||||
if len(matching_specs) <= 1:
|
||||
return
|
||||
|
||||
format_string = "{name}{@version}{%compiler.name}{@compiler.version}{arch=architecture}"
|
||||
format_string = "{name}{@version}{%compiler}{arch=architecture}"
|
||||
args = ["%s matches multiple packages." % spec, "Matching packages:"]
|
||||
args += [
|
||||
colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string) for s in matching_specs
|
||||
@@ -383,7 +385,7 @@ def display_specs(specs, args=None, **kwargs):
|
||||
deps (bool): Display dependencies with specs
|
||||
long (bool): Display short hashes with specs
|
||||
very_long (bool): Display full hashes with specs (supersedes ``long``)
|
||||
namespaces (bool): Print namespaces along with names
|
||||
namespace (bool): Print namespaces along with names
|
||||
show_flags (bool): Show compiler flags with specs
|
||||
variants (bool): Show variants with specs
|
||||
indent (int): indent each line this much
|
||||
@@ -407,7 +409,7 @@ def get_arg(name, default=None):
|
||||
paths = get_arg("paths", False)
|
||||
deps = get_arg("deps", False)
|
||||
hashes = get_arg("long", False)
|
||||
namespaces = get_arg("namespaces", False)
|
||||
namespace = get_arg("namespace", False)
|
||||
flags = get_arg("show_flags", False)
|
||||
full_compiler = get_arg("show_full_compiler", False)
|
||||
variants = get_arg("variants", False)
|
||||
@@ -428,7 +430,7 @@ def get_arg(name, default=None):
|
||||
|
||||
format_string = get_arg("format", None)
|
||||
if format_string is None:
|
||||
nfmt = "{fullname}" if namespaces else "{name}"
|
||||
nfmt = "{fullname}" if namespace else "{name}"
|
||||
ffmt = ""
|
||||
if full_compiler or flags:
|
||||
ffmt += "{%compiler.name}"
|
||||
@@ -473,7 +475,7 @@ def format_list(specs):
|
||||
out = ""
|
||||
# getting lots of prefixes requires DB lookups. Ensure
|
||||
# all spec.prefix calls are in one transaction.
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
with spack.store.db.read_transaction():
|
||||
for string, spec in formatted:
|
||||
if not string:
|
||||
# print newline from above
|
||||
@@ -545,7 +547,7 @@ class PythonNameError(spack.error.SpackError):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
super().__init__("{0} is not a permissible Python name.".format(name))
|
||||
super(PythonNameError, self).__init__("{0} is not a permissible Python name.".format(name))
|
||||
|
||||
|
||||
class CommandNameError(spack.error.SpackError):
|
||||
@@ -553,7 +555,9 @@ class CommandNameError(spack.error.SpackError):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
super().__init__("{0} is not a permissible Spack command name.".format(name))
|
||||
super(CommandNameError, self).__init__(
|
||||
"{0} is not a permissible Spack command name.".format(name)
|
||||
)
|
||||
|
||||
|
||||
########################################
|
||||
@@ -584,14 +588,14 @@ def require_active_env(cmd_name):
|
||||
|
||||
if env:
|
||||
return env
|
||||
|
||||
tty.die(
|
||||
"`spack %s` requires an environment" % cmd_name,
|
||||
"activate an environment first:",
|
||||
" spack env activate ENV",
|
||||
"or use:",
|
||||
" spack -e ENV %s ..." % cmd_name,
|
||||
)
|
||||
else:
|
||||
tty.die(
|
||||
"`spack %s` requires an environment" % cmd_name,
|
||||
"activate an environment first:",
|
||||
" spack env activate ENV",
|
||||
"or use:",
|
||||
" spack -e ENV %s ..." % cmd_name,
|
||||
)
|
||||
|
||||
|
||||
def find_environment(args):
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
|
||||
import archspec.cpu
|
||||
|
@@ -47,7 +47,7 @@ def configs(parser, args):
|
||||
|
||||
|
||||
def packages(parser, args):
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
@@ -57,7 +57,7 @@ def packages_https(parser, args):
|
||||
if not args.check_all and not args.name:
|
||||
tty.die("Please specify one or more packages to audit, or --all.")
|
||||
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
pkgs = args.name or spack.repo.path.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
_process_reports(reports)
|
||||
|
||||
|
@@ -59,7 +59,7 @@ def setup_parser(subparser):
|
||||
|
||||
subparser.add_argument(
|
||||
"package_or_file",
|
||||
help="name of package to show contributions for, or path to a file in the spack repo",
|
||||
help="name of package to show contributions for, " "or path to a file in the spack repo",
|
||||
)
|
||||
|
||||
|
||||
@@ -126,7 +126,7 @@ def blame(parser, args):
|
||||
blame_file = path
|
||||
|
||||
if not blame_file:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package_or_file)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package_or_file)
|
||||
blame_file = pkg_cls.module.__file__.rstrip("c") # .pyc -> .py
|
||||
|
||||
# get git blame for the package
|
||||
|
@@ -2,9 +2,10 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import llnl.util.filesystem
|
||||
@@ -69,10 +70,11 @@
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
metavar=scopes_metavar,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -169,7 +171,7 @@ def _reset(args):
|
||||
if not ok_to_continue:
|
||||
raise RuntimeError("Aborting")
|
||||
|
||||
for scope in spack.config.CONFIG.file_scopes:
|
||||
for scope in spack.config.config.file_scopes:
|
||||
# The default scope should stay untouched
|
||||
if scope.name == "defaults":
|
||||
continue
|
||||
@@ -186,7 +188,7 @@ def _reset(args):
|
||||
if os.path.exists(bootstrap_yaml):
|
||||
shutil.move(bootstrap_yaml, backup_file)
|
||||
|
||||
spack.config.CONFIG.clear_caches()
|
||||
spack.config.config.clear_caches()
|
||||
|
||||
|
||||
def _root(args):
|
||||
@@ -326,7 +328,6 @@ def _status(args):
|
||||
if missing:
|
||||
print(llnl.util.tty.color.colorize(legend))
|
||||
print()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _add(args):
|
||||
|
@@ -2,14 +2,12 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
@@ -20,6 +18,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.relocate
|
||||
import spack.repo
|
||||
@@ -29,6 +28,7 @@
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.cmd import display_specs
|
||||
from spack.error import SpecError
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
from spack.stage import Stage
|
||||
from spack.util.string import plural
|
||||
@@ -38,55 +38,101 @@
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser: argparse.ArgumentParser):
|
||||
setattr(setup_parser, "parser", subparser)
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
subparsers = subparser.add_subparsers(help="buildcache sub-commands")
|
||||
|
||||
push = subparsers.add_parser("push", aliases=["create"], help=push_fn.__doc__)
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists")
|
||||
# TODO: remove from Spack 0.21
|
||||
push.add_argument(
|
||||
"-r",
|
||||
"--rel",
|
||||
action="store_true",
|
||||
help="make all rpaths relative before creating tarballs. (deprecated)",
|
||||
)
|
||||
push.add_argument("-f", "--force", action="store_true", help="overwrite tarball if it exists.")
|
||||
push.add_argument(
|
||||
"-u", "--unsigned", action="store_true", help="push unsigned buildcache tarballs"
|
||||
)
|
||||
push.add_argument(
|
||||
"--allow-root",
|
||||
"-a",
|
||||
"--allow-root",
|
||||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution",
|
||||
)
|
||||
push_sign = push.add_mutually_exclusive_group(required=False)
|
||||
push_sign.add_argument(
|
||||
"--unsigned", "-u", action="store_true", help="push unsigned buildcache tarballs"
|
||||
push.add_argument(
|
||||
"-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
|
||||
)
|
||||
push_sign.add_argument(
|
||||
"--key", "-k", metavar="key", type=str, default=None, help="key for signing"
|
||||
output = push.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"-d",
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="local directory where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
push.add_argument("mirror", type=str, help="mirror name, path, or URL")
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="name of the mirror where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
output.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the mirror where buildcaches will be written. (deprecated)",
|
||||
)
|
||||
# Unfortunately we cannot add this to the mutually exclusive group above,
|
||||
# because we have further positional arguments.
|
||||
# TODO: require from Spack 0.21
|
||||
push.add_argument("mirror", type=str, help="Mirror name, path, or URL.", nargs="?")
|
||||
push.add_argument(
|
||||
"--update-index",
|
||||
"--rebuild-index",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="regenerate buildcache index after building package(s)",
|
||||
help="Regenerate buildcache index after building package(s)",
|
||||
)
|
||||
push.add_argument(
|
||||
"--spec-file", default=None, help="create buildcache entry for spec from json or yaml file"
|
||||
"--spec-file", default=None, help="Create buildcache entry for spec from json or yaml file"
|
||||
)
|
||||
push.add_argument(
|
||||
"--only",
|
||||
default="package,dependencies",
|
||||
dest="things_to_install",
|
||||
choices=["package", "dependencies"],
|
||||
help="select the buildcache mode. "
|
||||
"The default is to build a cache for the package along with all its dependencies. "
|
||||
"Alternatively, one can decide to build a cache for only the package or only the "
|
||||
"dependencies",
|
||||
help=(
|
||||
"Select the buildcache mode. the default is to"
|
||||
" build a cache for the package along with all"
|
||||
" its dependencies. Alternatively, one can"
|
||||
" decide to build a cache for only the package"
|
||||
" or only the dependencies"
|
||||
),
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
|
||||
install = subparsers.add_parser("install", help=install_fn.__doc__)
|
||||
install.add_argument(
|
||||
"-f", "--force", action="store_true", help="overwrite install directory if it exists"
|
||||
"-f", "--force", action="store_true", help="overwrite install directory if it exists."
|
||||
)
|
||||
install.add_argument(
|
||||
"-m", "--multiple", action="store_true", help="allow all matching packages"
|
||||
"-m", "--multiple", action="store_true", help="allow all matching packages "
|
||||
)
|
||||
# TODO: remove from Spack 0.21
|
||||
install.add_argument(
|
||||
"-a",
|
||||
"--allow-root",
|
||||
action="store_true",
|
||||
help="allow install root string in binary files after RPATH substitution. (deprecated)",
|
||||
)
|
||||
install.add_argument(
|
||||
"-u",
|
||||
@@ -105,7 +151,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
install.set_defaults(func=install_fn)
|
||||
|
||||
listcache = subparsers.add_parser("list", help=list_fn.__doc__)
|
||||
arguments.add_common_arguments(listcache, ["long", "very_long", "namespaces"])
|
||||
arguments.add_common_arguments(listcache, ["long", "very_long"])
|
||||
listcache.add_argument(
|
||||
"-v",
|
||||
"--variants",
|
||||
@@ -140,49 +186,49 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
"-m",
|
||||
"--mirror-url",
|
||||
default=None,
|
||||
help="override any configured mirrors with this mirror URL",
|
||||
help="Override any configured mirrors with this mirror URL",
|
||||
)
|
||||
|
||||
check.add_argument(
|
||||
"-o", "--output-file", default=None, help="file where rebuild info should be written"
|
||||
"-o", "--output-file", default=None, help="File where rebuild info should be written"
|
||||
)
|
||||
|
||||
# used to construct scope arguments below
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
check.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope(),
|
||||
help="configuration scope containing mirrors to check",
|
||||
)
|
||||
check_spec_or_specfile = check.add_mutually_exclusive_group(required=True)
|
||||
check_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="check single spec instead of release specs file"
|
||||
|
||||
check.add_argument(
|
||||
"-s", "--spec", default=None, help="Check single spec instead of release specs file"
|
||||
)
|
||||
check_spec_or_specfile.add_argument(
|
||||
|
||||
check.add_argument(
|
||||
"--spec-file",
|
||||
help="check single spec from json or yaml file instead of release specs file",
|
||||
default=None,
|
||||
help=("Check single spec from json or yaml file instead of release specs file"),
|
||||
)
|
||||
|
||||
check.set_defaults(func=check_fn)
|
||||
|
||||
# Download tarball and specfile
|
||||
download = subparsers.add_parser("download", help=download_fn.__doc__)
|
||||
download_spec_or_specfile = download.add_mutually_exclusive_group(required=True)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="download built tarball for spec from mirror"
|
||||
)
|
||||
download_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="download built tarball for spec (from json or yaml file) from mirror"
|
||||
download.add_argument(
|
||||
"-s", "--spec", default=None, help="Download built tarball for spec from mirror"
|
||||
)
|
||||
download.add_argument(
|
||||
"-p",
|
||||
"--path",
|
||||
required=True,
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help="path to directory where tarball should be downloaded",
|
||||
help=("Download built tarball for spec (from json or yaml file) from mirror"),
|
||||
)
|
||||
download.add_argument(
|
||||
"-p", "--path", default=None, help="Path to directory where tarball should be downloaded"
|
||||
)
|
||||
download.set_defaults(func=download_fn)
|
||||
|
||||
@@ -190,53 +236,107 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
getbuildcachename = subparsers.add_parser(
|
||||
"get-buildcache-name", help=get_buildcache_name_fn.__doc__
|
||||
)
|
||||
getbuildcachename_spec_or_specfile = getbuildcachename.add_mutually_exclusive_group(
|
||||
required=True
|
||||
getbuildcachename.add_argument(
|
||||
"-s", "--spec", default=None, help="Spec string for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"-s", "--spec", help="spec string for which buildcache name is desired"
|
||||
)
|
||||
getbuildcachename_spec_or_specfile.add_argument(
|
||||
"--spec-file", help="path to spec json or yaml file for which buildcache name is desired"
|
||||
getbuildcachename.add_argument(
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help=("Path to spec json or yaml file for which buildcache name is desired"),
|
||||
)
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
||||
|
||||
# Given the root spec, save the yaml of the dependent spec to a file
|
||||
savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
|
||||
savespecfile_spec_or_specfile = savespecfile.add_mutually_exclusive_group(required=True)
|
||||
savespecfile_spec_or_specfile.add_argument("--root-spec", help="root spec of dependent spec")
|
||||
savespecfile_spec_or_specfile.add_argument(
|
||||
"--root-specfile", help="path to json or yaml file containing root spec of dependent spec"
|
||||
savespecfile.add_argument("--root-spec", default=None, help="Root spec of dependent spec")
|
||||
savespecfile.add_argument(
|
||||
"--root-specfile",
|
||||
default=None,
|
||||
help="Path to json or yaml file containing root spec of dependent spec",
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"-s",
|
||||
"--specs",
|
||||
required=True,
|
||||
help="list of dependent specs for which saved yaml is desired",
|
||||
default=None,
|
||||
help="List of dependent specs for which saved yaml is desired",
|
||||
)
|
||||
savespecfile.add_argument(
|
||||
"--specfile-dir", required=True, help="path to directory where spec yamls should be saved"
|
||||
"--specfile-dir", default=None, help="Path to directory where spec yamls should be saved"
|
||||
)
|
||||
savespecfile.set_defaults(func=save_specfile_fn)
|
||||
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||
sync.add_argument(
|
||||
"--manifest-glob", help="a quoted glob pattern identifying copy manifest files"
|
||||
"--manifest-glob",
|
||||
default=None,
|
||||
help="A quoted glob pattern identifying copy manifest files",
|
||||
)
|
||||
sync.add_argument(
|
||||
source = sync.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-directory",
|
||||
metavar="DIRECTORY",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="Source mirror as a local file path (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-mirror-name",
|
||||
metavar="MIRROR_NAME",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="Name of the source mirror (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
source.add_argument(
|
||||
"--src-mirror-url",
|
||||
metavar="MIRROR_URL",
|
||||
dest="src_mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the source mirror (deprecated)",
|
||||
)
|
||||
# TODO: only support this in 0.21
|
||||
source.add_argument(
|
||||
"src_mirror",
|
||||
metavar="source mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Source mirror name, path, or URL",
|
||||
nargs="?",
|
||||
help="source mirror name, path, or URL",
|
||||
)
|
||||
sync.add_argument(
|
||||
dest = sync.add_mutually_exclusive_group(required=False)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-directory",
|
||||
metavar="DIRECTORY",
|
||||
dest="dest_mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="Destination mirror as a local file path (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-mirror-name",
|
||||
metavar="MIRROR_NAME",
|
||||
type=arguments.mirror_name,
|
||||
dest="dest_mirror_flag",
|
||||
help="Name of the destination mirror (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
dest.add_argument(
|
||||
"--dest-mirror-url",
|
||||
metavar="MIRROR_URL",
|
||||
dest="dest_mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the destination mirror (deprecated)",
|
||||
)
|
||||
# TODO: only support this in 0.21
|
||||
dest.add_argument(
|
||||
"dest_mirror",
|
||||
metavar="destination mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Destination mirror name, path, or URL",
|
||||
nargs="?",
|
||||
help="destination mirror name, path, or URL",
|
||||
)
|
||||
sync.set_defaults(func=sync_fn)
|
||||
|
||||
@@ -244,48 +344,124 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
update_index = subparsers.add_parser(
|
||||
"update-index", aliases=["rebuild-index"], help=update_index_fn.__doc__
|
||||
)
|
||||
update_index.add_argument(
|
||||
"mirror", type=arguments.mirror_name_or_url, help="destination mirror name, path, or URL"
|
||||
update_index_out = update_index.add_mutually_exclusive_group(required=True)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"-d",
|
||||
"--directory",
|
||||
metavar="directory",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_directory,
|
||||
help="local directory where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"-m",
|
||||
"--mirror-name",
|
||||
metavar="mirror-name",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_name,
|
||||
help="name of the mirror where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: remove in Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"--mirror-url",
|
||||
metavar="mirror-url",
|
||||
dest="mirror_flag",
|
||||
type=arguments.mirror_url,
|
||||
help="URL of the mirror where buildcaches will be written (deprecated)",
|
||||
)
|
||||
# TODO: require from Spack 0.21
|
||||
update_index_out.add_argument(
|
||||
"mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="Destination mirror name, path, or URL",
|
||||
nargs="?",
|
||||
)
|
||||
update_index.add_argument(
|
||||
"-k",
|
||||
"--keys",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="if provided, key index will be updated as well as package index",
|
||||
help="If provided, key index will be updated as well as package index",
|
||||
)
|
||||
update_index.set_defaults(func=update_index_fn)
|
||||
|
||||
|
||||
def _matching_specs(specs: List[Spec]) -> List[Spec]:
|
||||
"""Disambiguate specs and return a list of matching specs"""
|
||||
return [spack.cmd.disambiguate_spec(s, ev.active_environment(), installed=any) for s in specs]
|
||||
def _matching_specs(specs, spec_file):
|
||||
"""Return a list of matching specs read from either a spec file (JSON or YAML),
|
||||
a query over the store or a query over the active environment.
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
hashes = env.all_hashes() if env else None
|
||||
if spec_file:
|
||||
return spack.store.specfile_matches(spec_file, hashes=hashes)
|
||||
|
||||
if specs:
|
||||
constraints = spack.cmd.parse_specs(specs)
|
||||
return spack.store.find(constraints, hashes=hashes)
|
||||
|
||||
if env:
|
||||
return [concrete for _, concrete in env.concretized_specs()]
|
||||
|
||||
tty.die(
|
||||
"build cache file creation requires at least one"
|
||||
" installed package spec, an active environment,"
|
||||
" or else a path to a json or yaml file containing a spec"
|
||||
" to install"
|
||||
)
|
||||
|
||||
|
||||
def _concrete_spec_from_args(args):
|
||||
spec_str, specfile_path = args.spec, args.spec_file
|
||||
|
||||
if not spec_str and not specfile_path:
|
||||
tty.error("must provide either spec string or path to YAML or JSON specfile")
|
||||
sys.exit(1)
|
||||
|
||||
if spec_str:
|
||||
try:
|
||||
constraints = spack.cmd.parse_specs(spec_str)
|
||||
spec = spack.store.find(constraints)[0]
|
||||
spec.concretize()
|
||||
except SpecError as spec_error:
|
||||
tty.error("Unable to concretize spec {0}".format(spec_str))
|
||||
tty.debug(spec_error)
|
||||
sys.exit(1)
|
||||
|
||||
return spec
|
||||
|
||||
return Spec.from_specfile(specfile_path)
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use positional arguments instead."
|
||||
)
|
||||
|
||||
if args.specs or args.spec_file:
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.specs or args.spec_file))
|
||||
if args.mirror_flag:
|
||||
mirror = args.mirror_flag
|
||||
elif not args.mirror:
|
||||
raise ValueError("No mirror provided")
|
||||
else:
|
||||
specs = spack.cmd.require_active_env("buildcache push").all_specs()
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
mirror = arguments.mirror_name_or_url(args.mirror)
|
||||
|
||||
if args.allow_root:
|
||||
if args.mirror_flag:
|
||||
tty.warn(
|
||||
"The flag `--allow-root` is the default in Spack 0.21, will be removed in Spack 0.22"
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
|
||||
if args.rel:
|
||||
tty.warn("The --rel flag is deprecated and will be removed in Spack 0.21")
|
||||
|
||||
# TODO: remove this in 0.21. If we have mirror_flag, the first
|
||||
# spec is in the positional mirror arg due to argparse limitations.
|
||||
input_specs = args.specs
|
||||
if args.mirror_flag and args.mirror:
|
||||
input_specs.insert(0, args.mirror)
|
||||
|
||||
url = mirror.push_url
|
||||
|
||||
specs = bindist.specs_to_be_packaged(
|
||||
specs,
|
||||
_matching_specs(input_specs, args.spec_file),
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
@@ -310,7 +486,9 @@ def push_fn(args):
|
||||
url,
|
||||
bindist.PushOptions(
|
||||
force=args.force,
|
||||
relative=args.rel,
|
||||
unsigned=args.unsigned,
|
||||
allow_root=args.allow_root,
|
||||
key=args.key,
|
||||
regenerate_index=args.update_index,
|
||||
),
|
||||
@@ -346,6 +524,9 @@ def install_fn(args):
|
||||
if not args.specs:
|
||||
tty.die("a spec argument is required to install from a buildcache")
|
||||
|
||||
if args.allow_root:
|
||||
tty.warn("The --allow-root flag is deprecated and will be removed in Spack 0.21")
|
||||
|
||||
query = bindist.BinaryCacheQuery(all_architectures=args.otherarch)
|
||||
matches = spack.store.find(args.specs, multiple=args.multiple, query_fn=query)
|
||||
for match in matches:
|
||||
@@ -383,31 +564,32 @@ def keys_fn(args):
|
||||
|
||||
|
||||
def preview_fn(args):
|
||||
"""analyze an installed spec and reports whether executables and libraries are relocatable"""
|
||||
tty.warn(
|
||||
"`spack buildcache preview` is deprecated since `spack buildcache push --allow-root` is "
|
||||
"now the default. This command will be removed in Spack 0.22"
|
||||
)
|
||||
"""analyze an installed spec and reports whether executables
|
||||
and libraries are relocatable
|
||||
"""
|
||||
constraints = spack.cmd.parse_specs(args.specs)
|
||||
specs = spack.store.find(constraints, multiple=True)
|
||||
|
||||
# Cycle over the specs that match
|
||||
for spec in specs:
|
||||
print("Relocatable nodes")
|
||||
print("--------------------------------")
|
||||
print(spec.tree(status_fn=spack.relocate.is_relocatable))
|
||||
|
||||
|
||||
def check_fn(args):
|
||||
"""check specs against remote binary mirror(s) to see if any need to be rebuilt
|
||||
|
||||
this command uses the process exit code to indicate its result, specifically, if the
|
||||
exit code is non-zero, then at least one of the indicated specs needs to be rebuilt
|
||||
"""Check specs (either a single spec from --spec, or else the full set
|
||||
of release specs) against remote binary mirror(s) to see if any need
|
||||
to be rebuilt. This command uses the process exit code to indicate
|
||||
its result, specifically, if the exit code is non-zero, then at least
|
||||
one of the indicated specs needs to be rebuilt.
|
||||
"""
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --spec instead."
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec or args.spec_file)
|
||||
|
||||
if specs:
|
||||
specs = _matching_specs(specs, specs)
|
||||
if args.spec or args.spec_file:
|
||||
specs = [_concrete_spec_from_args(args)]
|
||||
else:
|
||||
specs = spack.cmd.require_active_env("buildcache check").all_specs()
|
||||
env = spack.cmd.require_active_env(cmd_name="buildcache")
|
||||
env.concretize()
|
||||
specs = env.all_specs()
|
||||
|
||||
if not specs:
|
||||
tty.msg("No specs provided, exiting.")
|
||||
@@ -431,61 +613,63 @@ def check_fn(args):
|
||||
|
||||
|
||||
def download_fn(args):
|
||||
"""download buildcache entry from a remote mirror to local folder
|
||||
"""Download buildcache entry from a remote mirror to local folder. This
|
||||
command uses the process exit code to indicate its result, specifically,
|
||||
a non-zero exit code indicates that the command failed to download at
|
||||
least one of the required buildcache components."""
|
||||
if not args.spec and not args.spec_file:
|
||||
tty.msg("No specs provided, exiting.")
|
||||
return
|
||||
|
||||
this command uses the process exit code to indicate its result, specifically, a non-zero exit
|
||||
code indicates that the command failed to download at least one of the required buildcache
|
||||
components
|
||||
"""
|
||||
if args.spec_file:
|
||||
tty.warn(
|
||||
"The flag `--spec-file` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --spec instead."
|
||||
)
|
||||
if not args.path:
|
||||
tty.msg("No download path provided, exiting")
|
||||
return
|
||||
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
spec = _concrete_spec_from_args(args)
|
||||
result = bindist.download_single_spec(spec, args.path)
|
||||
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to download from a buildcache")
|
||||
|
||||
if not bindist.download_single_spec(specs[0], args.path):
|
||||
if not result:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_buildcache_name_fn(args):
|
||||
"""get name (prefix) of buildcache entries for this spec"""
|
||||
tty.warn("This command is deprecated and will be removed in Spack 0.22.")
|
||||
specs = _matching_specs(spack.cmd.parse_specs(args.spec or args.spec_file))
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to get buildcache name")
|
||||
print(bindist.tarball_name(specs[0], ""))
|
||||
"""Get name (prefix) of buildcache entries for this spec"""
|
||||
spec = _concrete_spec_from_args(args)
|
||||
buildcache_name = bindist.tarball_name(spec, "")
|
||||
print("{0}".format(buildcache_name))
|
||||
|
||||
|
||||
def save_specfile_fn(args):
|
||||
"""get full spec for dependencies and write them to files in the specified output directory
|
||||
|
||||
uses exit code to signal success or failure. an exit code of zero means the command was likely
|
||||
successful. if any errors or exceptions are encountered, or if expected command-line arguments
|
||||
are not provided, then the exit code will be non-zero
|
||||
"""Get full spec for dependencies, relative to root spec, and write them
|
||||
to files in the specified output directory. Uses exit code to signal
|
||||
success or failure. An exit code of zero means the command was likely
|
||||
successful. If any errors or exceptions are encountered, or if expected
|
||||
command-line arguments are not provided, then the exit code will be
|
||||
non-zero.
|
||||
"""
|
||||
if not args.root_spec and not args.root_specfile:
|
||||
tty.msg("No root spec provided, exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
if not args.specs:
|
||||
tty.msg("No dependent specs provided, exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
if not args.specfile_dir:
|
||||
tty.msg("No yaml directory provided, exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
if args.root_specfile:
|
||||
tty.warn(
|
||||
"The flag `--root-specfile` is deprecated and will be removed in Spack 0.22. "
|
||||
"Use --root-spec instead."
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.root_spec or args.root_specfile)
|
||||
|
||||
if len(specs) != 1:
|
||||
tty.die("a single spec argument is required to save specfile")
|
||||
|
||||
root = specs[0]
|
||||
|
||||
if not root.concrete:
|
||||
root.concretize()
|
||||
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
spec_format = "yaml" if args.root_specfile.endswith("yaml") else "json"
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||
spec_format = "json"
|
||||
save_dependency_specfiles(
|
||||
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format
|
||||
)
|
||||
|
||||
|
||||
@@ -515,19 +699,32 @@ def copy_buildcache_file(src_url, dest_url, local_path=None):
|
||||
|
||||
|
||||
def sync_fn(args):
|
||||
"""sync binaries (and associated metadata) from one mirror to another
|
||||
"""Syncs binaries (and associated metadata) from one mirror to another.
|
||||
Requires an active environment in order to know which specs to sync.
|
||||
|
||||
requires an active environment in order to know which specs to sync
|
||||
Args:
|
||||
src (str): Source mirror URL
|
||||
dest (str): Destination mirror URL
|
||||
"""
|
||||
if args.manifest_glob:
|
||||
manifest_copy(glob.glob(args.manifest_glob))
|
||||
return 0
|
||||
|
||||
if args.src_mirror is None or args.dest_mirror is None:
|
||||
tty.die("Provide mirrors to sync from and to.")
|
||||
# If no manifest_glob, require a source and dest mirror.
|
||||
# TODO: Simplify in Spack 0.21
|
||||
if not (args.src_mirror_flag or args.src_mirror) or not (
|
||||
args.dest_mirror_flag or args.dest_mirror
|
||||
):
|
||||
raise ValueError("Source and destination mirror are required.")
|
||||
|
||||
src_mirror = args.src_mirror
|
||||
dest_mirror = args.dest_mirror
|
||||
if args.src_mirror_flag or args.dest_mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
|
||||
src_mirror = args.src_mirror_flag if args.src_mirror_flag else args.src_mirror
|
||||
dest_mirror = args.dest_mirror_flag if args.dest_mirror_flag else args.dest_mirror
|
||||
|
||||
src_mirror_url = src_mirror.fetch_url
|
||||
dest_mirror_url = dest_mirror.push_url
|
||||
@@ -605,8 +802,14 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
|
||||
|
||||
def update_index_fn(args):
|
||||
"""update a buildcache index"""
|
||||
update_index(args.mirror, update_keys=args.keys)
|
||||
"""Update a buildcache index."""
|
||||
if args.mirror_flag:
|
||||
tty.warn(
|
||||
"Using flags to specify mirrors is deprecated and will be removed in "
|
||||
"Spack 0.21, use positional arguments instead."
|
||||
)
|
||||
mirror = args.mirror_flag if args.mirror_flag else args.mirror
|
||||
update_index(mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
def buildcache(parser, args):
|
||||
|
@@ -3,22 +3,21 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.util import tty
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
from spack.cmd.common import arguments
|
||||
from spack.package_base import PackageBase, deprecated_version, preferred_version
|
||||
from spack.package_base import deprecated_version, preferred_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import valid_fully_qualified_module_name
|
||||
from spack.version import Version
|
||||
|
||||
@@ -34,38 +33,35 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="don't clean up staging area when command completes",
|
||||
)
|
||||
subparser.add_argument(
|
||||
sp = subparser.add_mutually_exclusive_group()
|
||||
sp.add_argument(
|
||||
"-b",
|
||||
"--batch",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="don't ask which versions to checksum",
|
||||
)
|
||||
subparser.add_argument(
|
||||
sp.add_argument(
|
||||
"-l",
|
||||
"--latest",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="checksum the latest available version",
|
||||
help="checksum the latest available version only",
|
||||
)
|
||||
subparser.add_argument(
|
||||
sp.add_argument(
|
||||
"-p",
|
||||
"--preferred",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="checksum the known Spack preferred version",
|
||||
help="checksum the preferred version only",
|
||||
)
|
||||
modes_parser = subparser.add_mutually_exclusive_group()
|
||||
modes_parser.add_argument(
|
||||
subparser.add_argument(
|
||||
"-a",
|
||||
"--add-to-package",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="add new versions to package",
|
||||
)
|
||||
modes_parser.add_argument(
|
||||
"--verify", action="store_true", default=False, help="verify known package checksums"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["package"])
|
||||
subparser.add_argument(
|
||||
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
|
||||
@@ -83,174 +79,89 @@ def checksum(parser, args):
|
||||
tty.die("`spack checksum` accepts package names, not URLs.")
|
||||
|
||||
# Get the package we're going to generate checksums for
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(args.package)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(args.package)
|
||||
pkg = pkg_cls(spack.spec.Spec(args.package))
|
||||
|
||||
# Build a list of versions to checksum
|
||||
versions = [Version(v) for v in args.versions]
|
||||
|
||||
# Define placeholder for remote versions.
|
||||
# This'll help reduce redundant work if we need to check for the existance
|
||||
# of remote versions more than once.
|
||||
remote_versions = None
|
||||
|
||||
# Add latest version if requested
|
||||
if args.latest:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if len(remote_versions) > 0:
|
||||
latest_version = sorted(remote_versions.keys(), reverse=True)[0]
|
||||
versions.append(latest_version)
|
||||
|
||||
# Add preferred version if requested
|
||||
if args.preferred:
|
||||
versions.append(preferred_version(pkg))
|
||||
|
||||
# Store a dict of the form version -> URL
|
||||
url_dict = {}
|
||||
if not args.versions and args.preferred:
|
||||
versions = [preferred_version(pkg)]
|
||||
else:
|
||||
versions = [Version(v) for v in args.versions]
|
||||
|
||||
for version in versions:
|
||||
if deprecated_version(pkg, version):
|
||||
tty.warn(f"Version {version} is deprecated")
|
||||
if versions:
|
||||
remote_versions = None
|
||||
for version in versions:
|
||||
if deprecated_version(pkg, version):
|
||||
tty.warn("Version {0} is deprecated".format(version))
|
||||
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
if url is not None:
|
||||
url_dict[version] = url
|
||||
continue
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
|
||||
if len(versions) <= 0:
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
url_dict = remote_versions
|
||||
url = pkg.find_valid_url_for_version(version)
|
||||
if url is not None:
|
||||
url_dict[version] = url
|
||||
continue
|
||||
# if we get here, it's because no valid url was provided by the package
|
||||
# do expensive fallback to try to recover
|
||||
if remote_versions is None:
|
||||
remote_versions = pkg.fetch_remote_versions()
|
||||
if version in remote_versions:
|
||||
url_dict[version] = remote_versions[version]
|
||||
else:
|
||||
url_dict = pkg.fetch_remote_versions()
|
||||
|
||||
if not url_dict:
|
||||
tty.die(f"Could not find any remote versions for {pkg.name}")
|
||||
tty.die("Could not find any remote versions for {0}".format(pkg.name))
|
||||
|
||||
# print an empty line to create a new output section block
|
||||
print()
|
||||
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
version_lines = spack.stage.get_checksums_for_versions(
|
||||
url_dict,
|
||||
pkg.name,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(versions) > 0 or len(url_dict) == 1),
|
||||
batch=(args.batch or len(args.versions) > 0 or len(url_dict) == 1),
|
||||
latest=args.latest,
|
||||
fetch_options=pkg.fetch_options,
|
||||
)
|
||||
|
||||
if args.verify:
|
||||
print_checksum_status(pkg, version_hashes)
|
||||
sys.exit(0)
|
||||
|
||||
# convert dict into package.py version statements
|
||||
version_lines = get_version_lines(version_hashes, url_dict)
|
||||
print()
|
||||
print(version_lines)
|
||||
print()
|
||||
|
||||
if args.add_to_package:
|
||||
add_versions_to_package(pkg, version_lines)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||
# Make sure we also have a newline after the last version
|
||||
versions = [v + "\n" for v in version_lines.splitlines()]
|
||||
versions.append("\n")
|
||||
# We need to insert the versions in reversed order
|
||||
versions.reverse()
|
||||
versions.append(" # FIXME: Added by `spack checksum`\n")
|
||||
version_line = None
|
||||
|
||||
with open(filename, "r") as f:
|
||||
lines = f.readlines()
|
||||
for i in range(len(lines)):
|
||||
# Black is drunk, so this is what it looks like for now
|
||||
# See https://github.com/psf/black/issues/2156 for more information
|
||||
if lines[i].startswith(" # FIXME: Added by `spack checksum`") or lines[
|
||||
i
|
||||
].startswith(" version("):
|
||||
version_line = i
|
||||
break
|
||||
|
||||
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||
"""
|
||||
Verify checksums present in version_hashes against those present
|
||||
in the package's instructions.
|
||||
if version_line is not None:
|
||||
for v in versions:
|
||||
lines.insert(version_line, v)
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
||||
version_hashes (dict): A dictionary of the form: version -> checksum.
|
||||
with open(filename, "w") as f:
|
||||
f.writelines(lines)
|
||||
|
||||
"""
|
||||
results = []
|
||||
num_verified = 0
|
||||
failed = False
|
||||
msg = "opening editor to verify"
|
||||
|
||||
max_len = max(len(str(v)) for v in version_hashes)
|
||||
num_total = len(version_hashes)
|
||||
if not sys.stdout.isatty():
|
||||
msg = "please verify"
|
||||
|
||||
for version, sha in version_hashes.items():
|
||||
if version not in pkg.versions:
|
||||
msg = "No previous checksum"
|
||||
status = "-"
|
||||
|
||||
elif sha == pkg.versions[version]["sha256"]:
|
||||
msg = "Correct"
|
||||
status = "="
|
||||
num_verified += 1
|
||||
tty.info(
|
||||
"Added {0} new versions to {1}, "
|
||||
"{2}.".format(len(versions) - 2, args.package, msg)
|
||||
)
|
||||
|
||||
if sys.stdout.isatty():
|
||||
editor(filename)
|
||||
else:
|
||||
msg = sha
|
||||
status = "x"
|
||||
failed = True
|
||||
|
||||
results.append("{0:{1}} {2} {3}".format(str(version), max_len, f"[{status}]", msg))
|
||||
|
||||
# Display table of checksum results.
|
||||
tty.msg(f"Verified {num_verified} of {num_total}", "", *llnl.util.lang.elide_list(results), "")
|
||||
|
||||
# Terminate at the end of function to prevent additional output.
|
||||
if failed:
|
||||
print()
|
||||
tty.die("Invalid checksums found.")
|
||||
|
||||
|
||||
def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
"""
|
||||
Add checksumed versions to a package's instructions and open a user's
|
||||
editor so they may double check the work of the function.
|
||||
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
|
||||
version_lines (str): A string of rendered version lines.
|
||||
|
||||
"""
|
||||
# Get filename and path for package
|
||||
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
|
||||
num_versions_added = 0
|
||||
|
||||
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
|
||||
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
|
||||
|
||||
# Split rendered version lines into tuple of (version, version_line)
|
||||
# We reverse sort here to make sure the versions match the version_lines
|
||||
new_versions = []
|
||||
for ver_line in version_lines.split("\n"):
|
||||
match = version_re.match(ver_line)
|
||||
if match:
|
||||
new_versions.append((Version(match.group(1)), ver_line))
|
||||
|
||||
with open(filename, "r+") as f:
|
||||
contents = f.read()
|
||||
split_contents = version_statement_re.split(contents)
|
||||
|
||||
for i, subsection in enumerate(split_contents):
|
||||
# If there are no more versions to add we should exit
|
||||
if len(new_versions) <= 0:
|
||||
break
|
||||
|
||||
# Check if the section contains a version
|
||||
contents_version = version_re.match(subsection)
|
||||
if contents_version is not None:
|
||||
parsed_version = Version(contents_version.group(1))
|
||||
|
||||
if parsed_version < new_versions[0][0]:
|
||||
split_contents[i:i] = [new_versions.pop(0)[1], " # FIX ME", "\n"]
|
||||
num_versions_added += 1
|
||||
|
||||
elif parsed_version == new_versions[0][0]:
|
||||
new_versions.pop(0)
|
||||
|
||||
# Seek back to the start of the file so we can rewrite the file contents.
|
||||
f.seek(0)
|
||||
f.writelines("".join(split_contents))
|
||||
|
||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
||||
tty.msg(f"Open {filename} to review the additions.")
|
||||
|
||||
if sys.stdout.isatty():
|
||||
editor(filename)
|
||||
tty.warn("Could not add new versions to {0}.".format(args.package))
|
||||
|
@@ -18,7 +18,6 @@
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.mirror
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
@@ -48,36 +47,40 @@ def setup_parser(subparser):
|
||||
generate.add_argument(
|
||||
"--output-file",
|
||||
default=None,
|
||||
help="pathname for the generated gitlab ci yaml file\n\n"
|
||||
"path to the file where generated jobs file should be written. "
|
||||
"default is .gitlab-ci.yml in the root of the repository",
|
||||
help="""pathname for the generated gitlab ci yaml file
|
||||
Path to the file where generated jobs file should
|
||||
be written. Default is .gitlab-ci.yml in the root of
|
||||
the repository.""",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--copy-to",
|
||||
default=None,
|
||||
help="path to additional directory for job files\n\n"
|
||||
"this option provides an absolute path to a directory where the generated "
|
||||
"jobs yaml file should be copied. default is not to copy",
|
||||
help="""path to additional directory for job files
|
||||
This option provides an absolute path to a directory
|
||||
where the generated jobs yaml file should be copied.
|
||||
Default is not to copy.""",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--optimize",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(experimental) optimize the gitlab yaml file for size\n\n"
|
||||
"run the generated document through a series of optimization passes "
|
||||
"designed to reduce the size of the generated file",
|
||||
help="""(Experimental) optimize the gitlab yaml file for size
|
||||
Run the generated document through a series of
|
||||
optimization passes designed to reduce the size
|
||||
of the generated file.""",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="(experimental) disable DAG scheduling (use 'plain' dependencies)",
|
||||
help="(Experimental) disable DAG scheduling; use " ' "plain" dependencies.',
|
||||
)
|
||||
generate.add_argument(
|
||||
"--buildcache-destination",
|
||||
default=None,
|
||||
help="override the mirror configured in the environment\n\n"
|
||||
"allows for pushing binaries from the generated pipeline to a different location",
|
||||
help="Override the mirror configured in the environment (spack.yaml) "
|
||||
+ "in order to push binaries from the generated pipeline to a "
|
||||
+ "different location.",
|
||||
)
|
||||
prune_group = generate.add_mutually_exclusive_group()
|
||||
prune_group.add_argument(
|
||||
@@ -85,37 +88,45 @@ def setup_parser(subparser):
|
||||
action="store_true",
|
||||
dest="prune_dag",
|
||||
default=True,
|
||||
help="skip up-to-date specs\n\n"
|
||||
"do not generate jobs for specs that are up-to-date on the mirror",
|
||||
help="""skip up-to-date specs
|
||||
Do not generate jobs for specs that are up-to-date
|
||||
on the mirror.""",
|
||||
)
|
||||
prune_group.add_argument(
|
||||
"--no-prune-dag",
|
||||
action="store_false",
|
||||
dest="prune_dag",
|
||||
default=True,
|
||||
help="process up-to-date specs\n\n"
|
||||
"generate jobs for specs even when they are up-to-date on the mirror",
|
||||
help="""process up-to-date specs
|
||||
Generate jobs for specs even when they are up-to-date
|
||||
on the mirror.""",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--check-index-only",
|
||||
action="store_true",
|
||||
dest="index_only",
|
||||
default=False,
|
||||
help="only check spec state from buildcache indices\n\n"
|
||||
"Spack always checks specs against configured binary mirrors, regardless of the DAG "
|
||||
"pruning option. if enabled, Spack will assume all remote buildcache indices are "
|
||||
"up-to-date when assessing whether the spec on the mirror, if present, is up-to-date. "
|
||||
"this has the benefit of reducing pipeline generation time but at the potential cost of "
|
||||
"needlessly rebuilding specs when the indices are outdated. if not enabled, Spack will "
|
||||
"fetch remote spec files directly to assess whether the spec on the mirror is up-to-date",
|
||||
help="""only check spec state from buildcache indices
|
||||
Spack always checks specs against configured binary
|
||||
mirrors, regardless of the DAG pruning option.
|
||||
If enabled, Spack will assume all remote buildcache
|
||||
indices are up-to-date when assessing whether the spec
|
||||
on the mirror, if present, is up-to-date. This has the
|
||||
benefit of reducing pipeline generation time but at the
|
||||
potential cost of needlessly rebuilding specs when the
|
||||
indices are outdated.
|
||||
If not enabled, Spack will fetch remote spec files
|
||||
directly to assess whether the spec on the mirror is
|
||||
up-to-date.""",
|
||||
)
|
||||
generate.add_argument(
|
||||
"--artifacts-root",
|
||||
default=None,
|
||||
help="path to the root of the artifacts directory\n\n"
|
||||
"if provided, concrete environment files (spack.yaml, spack.lock) will be generated under "
|
||||
"this directory. their location will be passed to generated child jobs through the "
|
||||
"SPACK_CONCRETE_ENVIRONMENT_PATH variable",
|
||||
help="""path to the root of the artifacts directory
|
||||
If provided, concrete environment files (spack.yaml,
|
||||
spack.lock) will be generated under this directory.
|
||||
Their location will be passed to generated child jobs
|
||||
through the SPACK_CONCRETE_ENVIRONMENT_PATH variable.""",
|
||||
)
|
||||
generate.set_defaults(func=ci_generate)
|
||||
|
||||
@@ -139,13 +150,13 @@ def setup_parser(subparser):
|
||||
"--tests",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="run stand-alone tests after the build",
|
||||
help="""run stand-alone tests after the build""",
|
||||
)
|
||||
rebuild.add_argument(
|
||||
"--fail-fast",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="stop stand-alone tests after the first failure",
|
||||
help="""stop stand-alone tests after the first failure""",
|
||||
)
|
||||
rebuild.set_defaults(func=ci_rebuild)
|
||||
|
||||
@@ -155,39 +166,23 @@ def setup_parser(subparser):
|
||||
description=deindent(ci_reproduce.__doc__),
|
||||
help=spack.cmd.first_line(ci_reproduce.__doc__),
|
||||
)
|
||||
reproduce.add_argument("job_url", help="URL of job artifacts bundle")
|
||||
reproduce.add_argument(
|
||||
"--runtime",
|
||||
help="Container runtime to use.",
|
||||
default="docker",
|
||||
choices=["docker", "podman"],
|
||||
)
|
||||
reproduce.add_argument("job_url", help="Url of job artifacts bundle")
|
||||
reproduce.add_argument(
|
||||
"--working-dir",
|
||||
help="where to unpack artifacts",
|
||||
help="Where to unpack artifacts",
|
||||
default=os.path.join(os.getcwd(), "ci_reproduction"),
|
||||
)
|
||||
reproduce.add_argument(
|
||||
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
|
||||
)
|
||||
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
|
||||
gpg_group.add_argument(
|
||||
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
|
||||
)
|
||||
gpg_group.add_argument(
|
||||
"--gpg-url", help="URL to public GPG key for validating binary cache installs"
|
||||
)
|
||||
|
||||
reproduce.set_defaults(func=ci_reproduce)
|
||||
|
||||
|
||||
def ci_generate(args):
|
||||
"""generate jobs file from a CI-aware spack file
|
||||
"""Generate jobs file from a CI-aware spack file.
|
||||
|
||||
if you want to report the results on CDash, you will need to set the SPACK_CDASH_AUTH_TOKEN
|
||||
before invoking this command. the value must be the CDash authorization token needed to create
|
||||
a build group and register all generated jobs under it
|
||||
"""
|
||||
If you want to report the results on CDash, you will need to set
|
||||
the SPACK_CDASH_AUTH_TOKEN before invoking this command. The
|
||||
value must be the CDash authorization token needed to create a
|
||||
build group and register all generated jobs under it."""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||
|
||||
output_file = args.output_file
|
||||
@@ -228,13 +223,12 @@ def ci_generate(args):
|
||||
|
||||
|
||||
def ci_reindex(args):
|
||||
"""rebuild the buildcache index for the remote mirror
|
||||
"""Rebuild the buildcache index for the remote mirror.
|
||||
|
||||
use the active, gitlab-enabled environment to rebuild the buildcache index for the associated
|
||||
mirror
|
||||
"""
|
||||
Use the active, gitlab-enabled environment to rebuild the buildcache
|
||||
index for the associated mirror."""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
|
||||
yaml_root = env.manifest[ev.TOP_LEVEL_KEY]
|
||||
yaml_root = ev.config_dict(env.manifest)
|
||||
|
||||
if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
|
||||
tty.die("spack ci rebuild-index requires an env containing a mirror")
|
||||
@@ -248,11 +242,10 @@ def ci_reindex(args):
|
||||
|
||||
|
||||
def ci_rebuild(args):
|
||||
"""rebuild a spec if it is not on the remote mirror
|
||||
"""Rebuild a spec if it is not on the remote mirror.
|
||||
|
||||
check a single spec against the remote mirror, and rebuild it from source if the mirror does
|
||||
not contain the hash
|
||||
"""
|
||||
Check a single spec against the remote mirror, and rebuild it from
|
||||
source if the mirror does not contain the hash."""
|
||||
env = spack.cmd.require_active_env(cmd_name="ci rebuild")
|
||||
|
||||
# Make sure the environment is "gitlab-enabled", or else there's nothing
|
||||
@@ -281,23 +274,13 @@ def ci_rebuild(args):
|
||||
signing_key = os.environ.get("SPACK_SIGNING_KEY")
|
||||
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
|
||||
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
|
||||
compiler_action = os.environ.get("SPACK_COMPILER_ACTION")
|
||||
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
|
||||
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
|
||||
spack_ci_stack_name = os.environ.get("SPACK_CI_STACK_NAME")
|
||||
shared_pr_mirror_url = os.environ.get("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||
rebuild_everything = os.environ.get("SPACK_REBUILD_EVERYTHING")
|
||||
require_signing = os.environ.get("SPACK_REQUIRE_SIGNING")
|
||||
|
||||
# If signing key was provided via "SPACK_SIGNING_KEY", then try to import it.
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Fail early if signing is required but we don't have a signing key
|
||||
sign_binaries = require_signing is not None and require_signing.lower() == "true"
|
||||
if sign_binaries and not spack_ci.can_sign_binaries():
|
||||
gpg_util.list(False, True)
|
||||
tty.die("SPACK_REQUIRE_SIGNING=True => spack must have exactly one signing key")
|
||||
|
||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||
ci_project_dir = os.environ.get("CI_PROJECT_DIR")
|
||||
@@ -312,6 +295,7 @@ def ci_rebuild(args):
|
||||
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
|
||||
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
|
||||
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
|
||||
tty.debug("compiler_action = {0}".format(compiler_action))
|
||||
|
||||
# Query the environment manifest to find out whether we're reporting to a
|
||||
# CDash instance, and if so, gather some information from the manifest to
|
||||
@@ -422,6 +406,19 @@ def ci_rebuild(args):
|
||||
dst_file = os.path.join(repro_dir, file_name)
|
||||
shutil.copyfile(src_file, dst_file)
|
||||
|
||||
# If signing key was provided via "SPACK_SIGNING_KEY", then try to
|
||||
# import it.
|
||||
if signing_key:
|
||||
spack_ci.import_signing_key(signing_key)
|
||||
|
||||
# Depending on the specifics of this job, we might need to turn on the
|
||||
# "config:install_missing compilers" option (to build this job spec
|
||||
# with a bootstrapped compiler), or possibly run "spack compiler find"
|
||||
# (to build a bootstrap compiler or one of its deps in a
|
||||
# compiler-agnostic way), or maybe do nothing at all (to build a spec
|
||||
# using a compiler already installed on the target system).
|
||||
spack_ci.configure_compilers(compiler_action)
|
||||
|
||||
# Write this job's spec json into the reproduction directory, and it will
|
||||
# also be used in the generated "spack install" command to install the spec
|
||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||
@@ -619,7 +616,7 @@ def ci_rebuild(args):
|
||||
)
|
||||
reports_dir = fs.join_path(os.getcwd(), "cdash_report")
|
||||
if args.tests and broken_tests:
|
||||
tty.warn("Unable to run stand-alone tests since listed in ci's 'broken-tests-packages'")
|
||||
tty.warn("Unable to run stand-alone tests since listed in " "ci's 'broken-tests-packages'")
|
||||
if cdash_handler:
|
||||
msg = "Package is listed in ci's broken-tests-packages"
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
@@ -662,7 +659,7 @@ def ci_rebuild(args):
|
||||
tty.warn("No recognized test results reporting option")
|
||||
|
||||
else:
|
||||
tty.warn("Unable to run stand-alone tests due to unsuccessful installation")
|
||||
tty.warn("Unable to run stand-alone tests due to unsuccessful " "installation")
|
||||
if cdash_handler:
|
||||
msg = "Failed to install the package"
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
@@ -678,7 +675,7 @@ def ci_rebuild(args):
|
||||
input_spec=job_spec,
|
||||
buildcache_mirror_url=buildcache_mirror_url,
|
||||
pipeline_mirror_url=pipeline_mirror_url,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
pr_pipeline=spack_is_pr_pipeline,
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
@@ -722,7 +719,7 @@ def ci_rebuild(args):
|
||||
|
||||
\033[34mTo reproduce this build locally, run:
|
||||
|
||||
spack ci reproduce-build {0} [--working-dir <dir>] [--autostart]
|
||||
spack ci reproduce-build {0} [--working-dir <dir>]
|
||||
|
||||
If this project does not have public pipelines, you will need to first:
|
||||
|
||||
@@ -741,25 +738,14 @@ def ci_rebuild(args):
|
||||
|
||||
|
||||
def ci_reproduce(args):
|
||||
"""generate instructions for reproducing the spec rebuild job
|
||||
"""Generate instructions for reproducing the spec rebuild job.
|
||||
|
||||
artifacts of the provided gitlab pipeline rebuild job's URL will be used to derive
|
||||
instructions for reproducing the build locally
|
||||
"""
|
||||
Artifacts of the provided gitlab pipeline rebuild job's URL will be
|
||||
used to derive instructions for reproducing the build locally."""
|
||||
job_url = args.job_url
|
||||
work_dir = args.working_dir
|
||||
autostart = args.autostart
|
||||
runtime = args.runtime
|
||||
|
||||
# Allow passing GPG key for reprocuding protected CI jobs
|
||||
if args.gpg_file:
|
||||
gpg_key_url = url_util.path_to_file_url(args.gpg_file)
|
||||
elif args.gpg_url:
|
||||
gpg_key_url = args.gpg_url
|
||||
else:
|
||||
gpg_key_url = None
|
||||
|
||||
return spack_ci.reproduce_ci_job(job_url, work_dir, autostart, gpg_key_url, runtime)
|
||||
return spack_ci.reproduce_ci_job(job_url, work_dir)
|
||||
|
||||
|
||||
def ci(parser, args):
|
||||
|
@@ -17,7 +17,6 @@
|
||||
import spack.config
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.util.path
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
@@ -115,18 +114,22 @@ def clean(parser, args):
|
||||
if args.stage:
|
||||
tty.msg("Removing all temporary build stages")
|
||||
spack.stage.purge()
|
||||
|
||||
# Temp directory where buildcaches are extracted
|
||||
extract_tmp = os.path.join(spack.store.layout.root, ".tmp")
|
||||
if os.path.exists(extract_tmp):
|
||||
tty.debug("Removing {0}".format(extract_tmp))
|
||||
shutil.rmtree(extract_tmp)
|
||||
if args.downloads:
|
||||
tty.msg("Removing cached downloads")
|
||||
spack.caches.FETCH_CACHE.destroy()
|
||||
spack.caches.fetch_cache.destroy()
|
||||
|
||||
if args.failures:
|
||||
tty.msg("Removing install failure marks")
|
||||
spack.store.STORE.failure_tracker.clear_all()
|
||||
spack.installer.clear_failures()
|
||||
|
||||
if args.misc_cache:
|
||||
tty.msg("Removing cached information on repositories")
|
||||
spack.caches.MISC_CACHE.destroy()
|
||||
spack.caches.misc_cache.destroy()
|
||||
|
||||
if args.python_cache:
|
||||
tty.msg("Removing python cache files")
|
||||
|
@@ -48,7 +48,7 @@ def get_origin_info(remote):
|
||||
)
|
||||
except ProcessError:
|
||||
origin_url = _SPACK_UPSTREAM
|
||||
tty.warn("No git repository found; using default upstream URL: %s" % origin_url)
|
||||
tty.warn("No git repository found; " "using default upstream URL: %s" % origin_url)
|
||||
return (origin_url.strip(), branch.strip())
|
||||
|
||||
|
||||
@@ -69,7 +69,7 @@ def clone(parser, args):
|
||||
files_in_the_way = os.listdir(prefix)
|
||||
if files_in_the_way:
|
||||
tty.die(
|
||||
"There are already files there! Delete these files before boostrapping spack.",
|
||||
"There are already files there! " "Delete these files before boostrapping spack.",
|
||||
*files_in_the_way,
|
||||
)
|
||||
|
||||
|
@@ -3,17 +3,17 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import copy
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command
|
||||
from llnl.util.argparsewriter import ArgparseCompletionWriter, ArgparseRstWriter, ArgparseWriter
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
@@ -27,46 +27,28 @@
|
||||
|
||||
|
||||
#: list of command formatters
|
||||
formatters: Dict[str, Callable[[Namespace, IO], None]] = {}
|
||||
formatters = {}
|
||||
|
||||
|
||||
#: standard arguments for updating completion scripts
|
||||
#: we iterate through these when called with --update-completion
|
||||
update_completion_args: Dict[str, Dict[str, Any]] = {
|
||||
update_completion_args = {
|
||||
"bash": {
|
||||
"aliases": True,
|
||||
"format": "bash",
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.bash"),
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.bash"),
|
||||
},
|
||||
"fish": {
|
||||
"aliases": True,
|
||||
"format": "fish",
|
||||
"header": os.path.join(spack.paths.share_path, "fish", "spack-completion.fish"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.fish"),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def formatter(func: Callable[[Namespace, IO], None]) -> Callable[[Namespace, IO], None]:
|
||||
"""Decorator used to register formatters.
|
||||
|
||||
Args:
|
||||
func: Formatting function.
|
||||
|
||||
Returns:
|
||||
The same function.
|
||||
"""
|
||||
def formatter(func):
|
||||
"""Decorator used to register formatters"""
|
||||
formatters[func.__name__] = func
|
||||
return func
|
||||
|
||||
|
||||
def setup_parser(subparser: ArgumentParser) -> None:
|
||||
"""Set up the argument parser.
|
||||
|
||||
Args:
|
||||
subparser: Preliminary argument parser.
|
||||
"""
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--update-completion",
|
||||
action="store_true",
|
||||
@@ -109,34 +91,18 @@ class SpackArgparseRstWriter(ArgparseRstWriter):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
prog: str,
|
||||
out: IO = sys.stdout,
|
||||
aliases: bool = False,
|
||||
documented_commands: Set[str] = set(),
|
||||
rst_levels: Sequence[str] = ["-", "-", "^", "~", ":", "`"],
|
||||
prog,
|
||||
out=None,
|
||||
aliases=False,
|
||||
documented_commands=[],
|
||||
rst_levels=["-", "-", "^", "~", ":", "`"],
|
||||
):
|
||||
"""Initialize a new SpackArgparseRstWriter instance.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
out: File object to write to.
|
||||
aliases: Whether or not to include subparsers for aliases.
|
||||
documented_commands: Set of commands with additional documentation.
|
||||
rst_levels: List of characters for rst section headings.
|
||||
"""
|
||||
super().__init__(prog, out, aliases, rst_levels)
|
||||
out = sys.stdout if out is None else out
|
||||
super(SpackArgparseRstWriter, self).__init__(prog, out, aliases, rst_levels)
|
||||
self.documented = documented_commands
|
||||
|
||||
def usage(self, usage: str) -> str:
|
||||
"""Example usage of a command.
|
||||
|
||||
Args:
|
||||
usage: Command usage.
|
||||
|
||||
Returns:
|
||||
Usage of a command.
|
||||
"""
|
||||
string = super().usage(usage)
|
||||
def usage(self, *args):
|
||||
string = super(SpackArgparseRstWriter, self).usage(*args)
|
||||
|
||||
cmd = self.parser.prog.replace(" ", "-")
|
||||
if cmd in self.documented:
|
||||
@@ -146,21 +112,11 @@ def usage(self, usage: str) -> str:
|
||||
|
||||
|
||||
class SubcommandWriter(ArgparseWriter):
|
||||
"""Write argparse output as a list of subcommands."""
|
||||
|
||||
def format(self, cmd: Command) -> str:
|
||||
"""Return the string representation of a single node in the parser tree.
|
||||
|
||||
Args:
|
||||
cmd: Parsed information about a command or subcommand.
|
||||
|
||||
Returns:
|
||||
String representation of this subcommand.
|
||||
"""
|
||||
def format(self, cmd):
|
||||
return " " * self.level + cmd.prog + "\n"
|
||||
|
||||
|
||||
_positional_to_subroutine: Dict[str, str] = {
|
||||
_positional_to_subroutine = {
|
||||
"package": "_all_packages",
|
||||
"spec": "_all_packages",
|
||||
"filter": "_all_packages",
|
||||
@@ -179,76 +135,10 @@ def format(self, cmd: Command) -> str:
|
||||
}
|
||||
|
||||
|
||||
class BashCompletionWriter(ArgparseWriter):
|
||||
class BashCompletionWriter(ArgparseCompletionWriter):
|
||||
"""Write argparse output as bash programmable tab completion."""
|
||||
|
||||
def format(self, cmd: Command) -> str:
|
||||
"""Return the string representation of a single node in the parser tree.
|
||||
|
||||
Args:
|
||||
cmd: Parsed information about a command or subcommand.
|
||||
|
||||
Returns:
|
||||
String representation of this subcommand.
|
||||
"""
|
||||
|
||||
assert cmd.optionals # we should always at least have -h, --help
|
||||
assert not (cmd.positionals and cmd.subcommands) # one or the other
|
||||
|
||||
# We only care about the arguments/flags, not the help messages
|
||||
positionals: Tuple[str, ...] = ()
|
||||
if cmd.positionals:
|
||||
positionals, _, _, _ = zip(*cmd.positionals)
|
||||
optionals, _, _, _, _ = zip(*cmd.optionals)
|
||||
subcommands: Tuple[str, ...] = ()
|
||||
if cmd.subcommands:
|
||||
_, subcommands, _ = zip(*cmd.subcommands)
|
||||
|
||||
# Flatten lists of lists
|
||||
optionals = [x for xx in optionals for x in xx]
|
||||
|
||||
return (
|
||||
self.start_function(cmd.prog)
|
||||
+ self.body(positionals, optionals, subcommands)
|
||||
+ self.end_function(cmd.prog)
|
||||
)
|
||||
|
||||
def start_function(self, prog: str) -> str:
|
||||
"""Return the syntax needed to begin a function definition.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
|
||||
Returns:
|
||||
Function definition beginning.
|
||||
"""
|
||||
name = prog.replace("-", "_").replace(" ", "_")
|
||||
return "\n_{0}() {{".format(name)
|
||||
|
||||
def end_function(self, prog: str) -> str:
|
||||
"""Return the syntax needed to end a function definition.
|
||||
|
||||
Args:
|
||||
prog: Program name
|
||||
|
||||
Returns:
|
||||
Function definition ending.
|
||||
"""
|
||||
return "}\n"
|
||||
|
||||
def body(
|
||||
self, positionals: Sequence[str], optionals: Sequence[str], subcommands: Sequence[str]
|
||||
) -> str:
|
||||
"""Return the body of the function.
|
||||
|
||||
Args:
|
||||
positionals: List of positional arguments.
|
||||
optionals: List of optional arguments.
|
||||
subcommands: List of subcommand parsers.
|
||||
|
||||
Returns:
|
||||
Function body.
|
||||
"""
|
||||
def body(self, positionals, optionals, subcommands):
|
||||
if positionals:
|
||||
return """
|
||||
if $list_options
|
||||
@@ -278,15 +168,7 @@ def body(
|
||||
self.optionals(optionals)
|
||||
)
|
||||
|
||||
def positionals(self, positionals: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting positional arguments.
|
||||
|
||||
Args:
|
||||
positionals: List of positional arguments.
|
||||
|
||||
Returns:
|
||||
Syntax for positional arguments.
|
||||
"""
|
||||
def positionals(self, positionals):
|
||||
# If match found, return function name
|
||||
for positional in positionals:
|
||||
for key, value in _positional_to_subroutine.items():
|
||||
@@ -296,439 +178,22 @@ def positionals(self, positionals: Sequence[str]) -> str:
|
||||
# If no matches found, return empty list
|
||||
return 'SPACK_COMPREPLY=""'
|
||||
|
||||
def optionals(self, optionals: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting optional flags.
|
||||
|
||||
Args:
|
||||
optionals: List of optional arguments.
|
||||
|
||||
Returns:
|
||||
Syntax for optional flags.
|
||||
"""
|
||||
def optionals(self, optionals):
|
||||
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(optionals))
|
||||
|
||||
def subcommands(self, subcommands: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting subcommands.
|
||||
|
||||
Args:
|
||||
subcommands: List of subcommand parsers.
|
||||
|
||||
Returns:
|
||||
Syntax for subcommand parsers
|
||||
"""
|
||||
def subcommands(self, subcommands):
|
||||
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(subcommands))
|
||||
|
||||
|
||||
# Map argument destination names to their complete commands
|
||||
# Earlier items in the list have higher precedence
|
||||
_dest_to_fish_complete = {
|
||||
("activate", "view"): "-f -a '(__fish_complete_directories)'",
|
||||
("bootstrap root", "path"): "-f -a '(__fish_complete_directories)'",
|
||||
("mirror add", "mirror"): "-f",
|
||||
("repo add", "path"): "-f -a '(__fish_complete_directories)'",
|
||||
("test find", "filter"): "-f -a '(__fish_spack_tests)'",
|
||||
("bootstrap", "name"): "-f -a '(__fish_spack_bootstrap_names)'",
|
||||
("buildcache create", "key"): "-f -a '(__fish_spack_gpg_keys)'",
|
||||
("build-env", r"spec \[--\].*"): "-f -a '(__fish_spack_build_env_spec)'",
|
||||
("checksum", "package"): "-f -a '(__fish_spack_packages)'",
|
||||
(
|
||||
"checksum",
|
||||
"versions",
|
||||
): "-f -a '(__fish_spack_package_versions $__fish_spack_argparse_argv[1])'",
|
||||
("config", "path"): "-f -a '(__fish_spack_colon_path)'",
|
||||
("config", "section"): "-f -a '(__fish_spack_config_sections)'",
|
||||
("develop", "specs?"): "-f -k -a '(__fish_spack_specs_or_id)'",
|
||||
("diff", "specs?"): "-f -a '(__fish_spack_installed_specs)'",
|
||||
("gpg sign", "output"): "-f -a '(__fish_complete_directories)'",
|
||||
("gpg", "keys?"): "-f -a '(__fish_spack_gpg_keys)'",
|
||||
("graph", "specs?"): "-f -k -a '(__fish_spack_specs_or_id)'",
|
||||
("help", "help_command"): "-f -a '(__fish_spack_commands)'",
|
||||
("list", "filter"): "-f -a '(__fish_spack_packages)'",
|
||||
("mirror", "mirror"): "-f -a '(__fish_spack_mirrors)'",
|
||||
("pkg", "package"): "-f -a '(__fish_spack_pkg_packages)'",
|
||||
("remove", "specs?"): "-f -a '(__fish_spack_installed_specs)'",
|
||||
("repo", "namespace_or_path"): "$__fish_spack_force_files -a '(__fish_spack_repos)'",
|
||||
("restage", "specs?"): "-f -k -a '(__fish_spack_specs_or_id)'",
|
||||
("rm", "specs?"): "-f -a '(__fish_spack_installed_specs)'",
|
||||
("solve", "specs?"): "-f -k -a '(__fish_spack_specs_or_id)'",
|
||||
("spec", "specs?"): "-f -k -a '(__fish_spack_specs_or_id)'",
|
||||
("stage", "specs?"): "-f -k -a '(__fish_spack_specs_or_id)'",
|
||||
("test-env", r"spec \[--\].*"): "-f -a '(__fish_spack_build_env_spec)'",
|
||||
("test", r"\[?name.*"): "-f -a '(__fish_spack_tests)'",
|
||||
("undevelop", "specs?"): "-f -k -a '(__fish_spack_specs_or_id)'",
|
||||
("verify", "specs_or_files"): "$__fish_spack_force_files -a '(__fish_spack_installed_specs)'",
|
||||
("view", "path"): "-f -a '(__fish_complete_directories)'",
|
||||
("", "comment"): "-f",
|
||||
("", "compiler_spec"): "-f -a '(__fish_spack_installed_compilers)'",
|
||||
("", "config_scopes"): "-f -a '(__fish_complete_directories)'",
|
||||
("", "extendable"): "-f -a '(__fish_spack_extensions)'",
|
||||
("", "installed_specs?"): "-f -a '(__fish_spack_installed_specs)'",
|
||||
("", "job_url"): "-f",
|
||||
("", "location_env"): "-f -a '(__fish_complete_directories)'",
|
||||
("", "pytest_args"): "-f -a '(__fish_spack_unit_tests)'",
|
||||
("", "package_or_file"): "$__fish_spack_force_files -a '(__fish_spack_packages)'",
|
||||
("", "package_or_user"): "-f -a '(__fish_spack_packages)'",
|
||||
("", "package"): "-f -a '(__fish_spack_packages)'",
|
||||
("", "PKG"): "-f -a '(__fish_spack_packages)'",
|
||||
("", "prefix"): "-f -a '(__fish_complete_directories)'",
|
||||
("", r"rev\d?"): "-f -a '(__fish_spack_git_rev)'",
|
||||
("", "specs?"): "-f -k -a '(__fish_spack_specs)'",
|
||||
("", "tags?"): "-f -a '(__fish_spack_tags)'",
|
||||
("", "virtual_package"): "-f -a '(__fish_spack_providers)'",
|
||||
("", "working_dir"): "-f -a '(__fish_complete_directories)'",
|
||||
("", r"(\w*_)?env"): "-f -a '(__fish_spack_environments)'",
|
||||
("", r"(\w*_)?dir(ectory)?"): "-f -a '(__fish_spack_environments)'",
|
||||
("", r"(\w*_)?mirror_name"): "-f -a '(__fish_spack_mirrors)'",
|
||||
}
|
||||
|
||||
|
||||
def _fish_dest_get_complete(prog: str, dest: str) -> Optional[str]:
|
||||
"""Map from subcommand to autocompletion argument.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
dest: Destination.
|
||||
|
||||
Returns:
|
||||
Autocompletion argument.
|
||||
"""
|
||||
s = prog.split(None, 1)
|
||||
subcmd = s[1] if len(s) == 2 else ""
|
||||
|
||||
for (prog_key, pos_key), value in _dest_to_fish_complete.items():
|
||||
if subcmd.startswith(prog_key) and re.match("^" + pos_key + "$", dest):
|
||||
return value
|
||||
return None
|
||||
|
||||
|
||||
class FishCompletionWriter(ArgparseWriter):
|
||||
"""Write argparse output as bash programmable tab completion."""
|
||||
|
||||
def format(self, cmd: Command) -> str:
|
||||
"""Return the string representation of a single node in the parser tree.
|
||||
|
||||
Args:
|
||||
cmd: Parsed information about a command or subcommand.
|
||||
|
||||
Returns:
|
||||
String representation of a node.
|
||||
"""
|
||||
assert cmd.optionals # we should always at least have -h, --help
|
||||
assert not (cmd.positionals and cmd.subcommands) # one or the other
|
||||
|
||||
# We also need help messages and how arguments are used
|
||||
# So we pass everything to completion writer
|
||||
positionals = cmd.positionals
|
||||
optionals = cmd.optionals
|
||||
subcommands = cmd.subcommands
|
||||
|
||||
return (
|
||||
self.prog_comment(cmd.prog)
|
||||
+ self.optspecs(cmd.prog, optionals)
|
||||
+ self.complete(cmd.prog, positionals, optionals, subcommands)
|
||||
)
|
||||
|
||||
def _quote(self, string: str) -> str:
|
||||
"""Quote string and escape special characters if necessary.
|
||||
|
||||
Args:
|
||||
string: Input string.
|
||||
|
||||
Returns:
|
||||
Quoted string.
|
||||
"""
|
||||
# Goal here is to match fish_indent behavior
|
||||
|
||||
# Strings without spaces (or other special characters) do not need to be escaped
|
||||
if not any([sub in string for sub in [" ", "'", '"']]):
|
||||
return string
|
||||
|
||||
string = string.replace("'", r"\'")
|
||||
return f"'{string}'"
|
||||
|
||||
def optspecs(
|
||||
self,
|
||||
prog: str,
|
||||
optionals: List[Tuple[Sequence[str], List[str], str, Union[int, str, None], str]],
|
||||
) -> str:
|
||||
"""Read the optionals and return the command to set optspec.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
optionals: List of optional arguments.
|
||||
|
||||
Returns:
|
||||
Command to set optspec variable.
|
||||
"""
|
||||
# Variables of optspecs
|
||||
optspec_var = "__fish_spack_optspecs_" + prog.replace(" ", "_").replace("-", "_")
|
||||
|
||||
if optionals is None:
|
||||
return "set -g %s\n" % optspec_var
|
||||
|
||||
# Build optspec by iterating over options
|
||||
args = []
|
||||
|
||||
for flags, dest, _, nargs, _ in optionals:
|
||||
if len(flags) == 0:
|
||||
continue
|
||||
|
||||
required = ""
|
||||
|
||||
# Because nargs '?' is treated differently in fish, we treat it as required.
|
||||
# Because multi-argument options are not supported, we treat it like one argument.
|
||||
required = "="
|
||||
if nargs == 0:
|
||||
required = ""
|
||||
|
||||
# Pair short options with long options
|
||||
|
||||
# We need to do this because fish doesn't support multiple short
|
||||
# or long options.
|
||||
# However, since we are paring options only, this is fine
|
||||
|
||||
short = [f[1:] for f in flags if f.startswith("-") and len(f) == 2]
|
||||
long = [f[2:] for f in flags if f.startswith("--")]
|
||||
|
||||
while len(short) > 0 and len(long) > 0:
|
||||
arg = "%s/%s%s" % (short.pop(), long.pop(), required)
|
||||
while len(short) > 0:
|
||||
arg = "%s/%s" % (short.pop(), required)
|
||||
while len(long) > 0:
|
||||
arg = "%s%s" % (long.pop(), required)
|
||||
|
||||
args.append(arg)
|
||||
|
||||
# Even if there is no option, we still set variable.
|
||||
# In fish such variable is an empty array, we use it to
|
||||
# indicate that such subcommand exists.
|
||||
args = " ".join(args)
|
||||
|
||||
return "set -g %s %s\n" % (optspec_var, args)
|
||||
|
||||
@staticmethod
|
||||
def complete_head(
|
||||
prog: str, index: Optional[int] = None, nargs: Optional[Union[int, str]] = None
|
||||
) -> str:
|
||||
"""Return the head of the completion command.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
index: Index of positional argument.
|
||||
nargs: Number of arguments.
|
||||
|
||||
Returns:
|
||||
Head of the completion command.
|
||||
"""
|
||||
# Split command and subcommand
|
||||
s = prog.split(None, 1)
|
||||
subcmd = s[1] if len(s) == 2 else ""
|
||||
|
||||
if index is None:
|
||||
return "complete -c %s -n '__fish_spack_using_command %s'" % (s[0], subcmd)
|
||||
elif nargs in [argparse.ZERO_OR_MORE, argparse.ONE_OR_MORE, argparse.REMAINDER]:
|
||||
head = "complete -c %s -n '__fish_spack_using_command_pos_remainder %d %s'"
|
||||
else:
|
||||
head = "complete -c %s -n '__fish_spack_using_command_pos %d %s'"
|
||||
return head % (s[0], index, subcmd)
|
||||
|
||||
def complete(
|
||||
self,
|
||||
prog: str,
|
||||
positionals: List[Tuple[str, Optional[Iterable[Any]], Union[int, str, None], str]],
|
||||
optionals: List[Tuple[Sequence[str], List[str], str, Union[int, str, None], str]],
|
||||
subcommands: List[Tuple[ArgumentParser, str, str]],
|
||||
) -> str:
|
||||
"""Return all the completion commands.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
positionals: List of positional arguments.
|
||||
optionals: List of optional arguments.
|
||||
subcommands: List of subcommand parsers.
|
||||
|
||||
Returns:
|
||||
Completion command.
|
||||
"""
|
||||
commands = []
|
||||
|
||||
if positionals:
|
||||
commands.append(self.positionals(prog, positionals))
|
||||
|
||||
if subcommands:
|
||||
commands.append(self.subcommands(prog, subcommands))
|
||||
|
||||
if optionals:
|
||||
commands.append(self.optionals(prog, optionals))
|
||||
|
||||
return "".join(commands)
|
||||
|
||||
def positionals(
|
||||
self,
|
||||
prog: str,
|
||||
positionals: List[Tuple[str, Optional[Iterable[Any]], Union[int, str, None], str]],
|
||||
) -> str:
|
||||
"""Return the completion for positional arguments.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
positionals: List of positional arguments.
|
||||
|
||||
Returns:
|
||||
Completion command.
|
||||
"""
|
||||
commands = []
|
||||
|
||||
for idx, (args, choices, nargs, help) in enumerate(positionals):
|
||||
# Make sure we always get same order of output
|
||||
if isinstance(choices, dict):
|
||||
choices = sorted(choices.keys())
|
||||
elif isinstance(choices, (set, frozenset)):
|
||||
choices = sorted(choices)
|
||||
|
||||
# Remove platform-specific choices to avoid hard-coding the platform.
|
||||
if choices is not None:
|
||||
valid_choices = []
|
||||
for choice in choices:
|
||||
if spack.platforms.host().name not in choice:
|
||||
valid_choices.append(choice)
|
||||
choices = valid_choices
|
||||
|
||||
head = self.complete_head(prog, idx, nargs)
|
||||
|
||||
if choices is not None:
|
||||
# If there are choices, we provide a completion for all possible values.
|
||||
commands.append(head + " -f -a %s" % self._quote(" ".join(choices)))
|
||||
else:
|
||||
# Otherwise, we try to find a predefined completion for it
|
||||
value = _fish_dest_get_complete(prog, args)
|
||||
if value is not None:
|
||||
commands.append(head + " " + value)
|
||||
|
||||
return "\n".join(commands) + "\n"
|
||||
|
||||
def prog_comment(self, prog: str) -> str:
|
||||
"""Return a comment line for the command.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
|
||||
Returns:
|
||||
Comment line.
|
||||
"""
|
||||
return "\n# %s\n" % prog
|
||||
|
||||
def optionals(
|
||||
self,
|
||||
prog: str,
|
||||
optionals: List[Tuple[Sequence[str], List[str], str, Union[int, str, None], str]],
|
||||
) -> str:
|
||||
"""Return the completion for optional arguments.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
optionals: List of optional arguments.
|
||||
|
||||
Returns:
|
||||
Completion command.
|
||||
"""
|
||||
commands = []
|
||||
head = self.complete_head(prog)
|
||||
|
||||
for flags, dest, _, nargs, help in optionals:
|
||||
# Make sure we always get same order of output
|
||||
if isinstance(dest, dict):
|
||||
dest = sorted(dest.keys())
|
||||
elif isinstance(dest, (set, frozenset)):
|
||||
dest = sorted(dest)
|
||||
|
||||
# Remove platform-specific choices to avoid hard-coding the platform.
|
||||
if dest is not None:
|
||||
valid_choices = []
|
||||
for choice in dest:
|
||||
if spack.platforms.host().name not in choice:
|
||||
valid_choices.append(choice)
|
||||
dest = valid_choices
|
||||
|
||||
# To provide description for optionals, and also possible values,
|
||||
# we need to use two split completion command.
|
||||
# Otherwise, each option will have same description.
|
||||
prefix = head
|
||||
|
||||
# Add all flags to the completion
|
||||
for f in flags:
|
||||
if f.startswith("--"):
|
||||
long = f[2:]
|
||||
prefix += " -l %s" % long
|
||||
elif f.startswith("-"):
|
||||
short = f[1:]
|
||||
assert len(short) == 1
|
||||
prefix += " -s %s" % short
|
||||
|
||||
# Check if option require argument.
|
||||
# Currently multi-argument options are not supported, so we treat it like one argument.
|
||||
if nargs != 0:
|
||||
prefix += " -r"
|
||||
|
||||
if dest is not None:
|
||||
# If there are choices, we provide a completion for all possible values.
|
||||
commands.append(prefix + " -f -a %s" % self._quote(" ".join(dest)))
|
||||
else:
|
||||
# Otherwise, we try to find a predefined completion for it
|
||||
value = _fish_dest_get_complete(prog, dest)
|
||||
if value is not None:
|
||||
commands.append(prefix + " " + value)
|
||||
|
||||
if help:
|
||||
commands.append(prefix + " -d %s" % self._quote(help))
|
||||
|
||||
return "\n".join(commands) + "\n"
|
||||
|
||||
def subcommands(self, prog: str, subcommands: List[Tuple[ArgumentParser, str, str]]) -> str:
|
||||
"""Return the completion for subcommands.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
subcommands: List of subcommand parsers.
|
||||
|
||||
Returns:
|
||||
Completion command.
|
||||
"""
|
||||
commands = []
|
||||
head = self.complete_head(prog, 0)
|
||||
|
||||
for _, subcommand, help in subcommands:
|
||||
command = head + " -f -a %s" % self._quote(subcommand)
|
||||
|
||||
if help is not None and len(help) > 0:
|
||||
help = help.split("\n")[0]
|
||||
command += " -d %s" % self._quote(help)
|
||||
|
||||
commands.append(command)
|
||||
|
||||
return "\n".join(commands) + "\n"
|
||||
|
||||
|
||||
@formatter
|
||||
def subcommands(args: Namespace, out: IO) -> None:
|
||||
"""Hierarchical tree of subcommands.
|
||||
|
||||
args:
|
||||
args: Command-line arguments.
|
||||
out: File object to write to.
|
||||
"""
|
||||
def subcommands(args, out):
|
||||
parser = spack.main.make_argument_parser()
|
||||
spack.main.add_all_commands(parser)
|
||||
writer = SubcommandWriter(parser.prog, out, args.aliases)
|
||||
writer.write(parser)
|
||||
|
||||
|
||||
def rst_index(out: IO) -> None:
|
||||
"""Generate an index of all commands.
|
||||
|
||||
Args:
|
||||
out: File object to write to.
|
||||
"""
|
||||
def rst_index(out):
|
||||
out.write("\n")
|
||||
|
||||
index = spack.main.index_commands()
|
||||
@@ -756,19 +221,13 @@ def rst_index(out: IO) -> None:
|
||||
|
||||
|
||||
@formatter
|
||||
def rst(args: Namespace, out: IO) -> None:
|
||||
"""ReStructuredText documentation of subcommands.
|
||||
|
||||
args:
|
||||
args: Command-line arguments.
|
||||
out: File object to write to.
|
||||
"""
|
||||
def rst(args, out):
|
||||
# create a parser with all commands
|
||||
parser = spack.main.make_argument_parser()
|
||||
spack.main.add_all_commands(parser)
|
||||
|
||||
# extract cross-refs of the form `_cmd-spack-<cmd>:` from rst files
|
||||
documented_commands: Set[str] = set()
|
||||
documented_commands = set()
|
||||
for filename in args.rst_files:
|
||||
with open(filename) as f:
|
||||
for line in f:
|
||||
@@ -786,13 +245,7 @@ def rst(args: Namespace, out: IO) -> None:
|
||||
|
||||
|
||||
@formatter
|
||||
def names(args: Namespace, out: IO) -> None:
|
||||
"""Simple list of top-level commands.
|
||||
|
||||
args:
|
||||
args: Command-line arguments.
|
||||
out: File object to write to.
|
||||
"""
|
||||
def names(args, out):
|
||||
commands = copy.copy(spack.cmd.all_commands())
|
||||
|
||||
if args.aliases:
|
||||
@@ -802,13 +255,7 @@ def names(args: Namespace, out: IO) -> None:
|
||||
|
||||
|
||||
@formatter
|
||||
def bash(args: Namespace, out: IO) -> None:
|
||||
"""Bash tab-completion script.
|
||||
|
||||
args:
|
||||
args: Command-line arguments.
|
||||
out: File object to write to.
|
||||
"""
|
||||
def bash(args, out):
|
||||
parser = spack.main.make_argument_parser()
|
||||
spack.main.add_all_commands(parser)
|
||||
|
||||
@@ -816,22 +263,7 @@ def bash(args: Namespace, out: IO) -> None:
|
||||
writer.write(parser)
|
||||
|
||||
|
||||
@formatter
|
||||
def fish(args, out):
|
||||
parser = spack.main.make_argument_parser()
|
||||
spack.main.add_all_commands(parser)
|
||||
|
||||
writer = FishCompletionWriter(parser.prog, out, args.aliases)
|
||||
writer.write(parser)
|
||||
|
||||
|
||||
def prepend_header(args: Namespace, out: IO) -> None:
|
||||
"""Prepend header text at the beginning of a file.
|
||||
|
||||
Args:
|
||||
args: Command-line arguments.
|
||||
out: File object to write to.
|
||||
"""
|
||||
def prepend_header(args, out):
|
||||
if not args.header:
|
||||
return
|
||||
|
||||
@@ -839,14 +271,10 @@ def prepend_header(args: Namespace, out: IO) -> None:
|
||||
out.write(header.read())
|
||||
|
||||
|
||||
def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||
def _commands(parser, args):
|
||||
"""This is the 'regular' command, which can be called multiple times.
|
||||
|
||||
See ``commands()`` below for ``--update-completion`` handling.
|
||||
|
||||
Args:
|
||||
parser: Argument parser.
|
||||
args: Command-line arguments.
|
||||
"""
|
||||
formatter = formatters[args.format]
|
||||
|
||||
@@ -868,15 +296,12 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||
formatter(args, sys.stdout)
|
||||
|
||||
|
||||
def update_completion(parser: ArgumentParser, args: Namespace) -> None:
|
||||
def update_completion(parser, args):
|
||||
"""Iterate through the shells and update the standard completion files.
|
||||
|
||||
This is a convenience method to avoid calling this command many
|
||||
times, and to simplify completion update for developers.
|
||||
|
||||
Args:
|
||||
parser: Argument parser.
|
||||
args: Command-line arguments.
|
||||
"""
|
||||
for shell, shell_args in update_completion_args.items():
|
||||
for attr, value in shell_args.items():
|
||||
@@ -884,20 +309,14 @@ def update_completion(parser: ArgumentParser, args: Namespace) -> None:
|
||||
_commands(parser, args)
|
||||
|
||||
|
||||
def commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||
"""Main function that calls formatter functions.
|
||||
|
||||
Args:
|
||||
parser: Argument parser.
|
||||
args: Command-line arguments.
|
||||
"""
|
||||
def commands(parser, args):
|
||||
if args.update_completion:
|
||||
if args.format != "names" or any([args.aliases, args.update, args.header]):
|
||||
tty.die("--update-completion can only be specified alone.")
|
||||
|
||||
# this runs the command multiple times with different arguments
|
||||
update_completion(parser, args)
|
||||
return update_completion(parser, args)
|
||||
|
||||
else:
|
||||
# run commands normally
|
||||
_commands(parser, args)
|
||||
return _commands(parser, args)
|
||||
|
@@ -36,10 +36,7 @@ def shell_init_instructions(cmd, equivalent):
|
||||
" source %s/setup-env.fish" % spack.paths.share_path,
|
||||
"",
|
||||
color.colorize("@*c{For Windows batch:}"),
|
||||
" %s\\spack_cmd.bat" % spack.paths.bin_path,
|
||||
"",
|
||||
color.colorize("@*c{For PowerShell:}"),
|
||||
" %s\\setup-env.ps1" % spack.paths.share_path,
|
||||
" source %s/spack_cmd.bat" % spack.paths.share_path,
|
||||
"",
|
||||
"Or, if you do not want to use shell support, run "
|
||||
+ ("one of these" if shell_specific else "this")
|
||||
@@ -53,7 +50,6 @@ def shell_init_instructions(cmd, equivalent):
|
||||
equivalent.format(sh_arg="--csh ") + " # csh/tcsh",
|
||||
equivalent.format(sh_arg="--fish") + " # fish",
|
||||
equivalent.format(sh_arg="--bat ") + " # batch",
|
||||
equivalent.format(sh_arg="--pwsh") + " # powershell",
|
||||
]
|
||||
else:
|
||||
msg += [" " + equivalent]
|
||||
|
@@ -82,12 +82,12 @@ def _specs(self, **kwargs):
|
||||
|
||||
# return everything for an empty query.
|
||||
if not qspecs:
|
||||
return spack.store.STORE.db.query(**kwargs)
|
||||
return spack.store.db.query(**kwargs)
|
||||
|
||||
# Return only matching stuff otherwise.
|
||||
specs = {}
|
||||
for spec in qspecs:
|
||||
for s in spack.store.STORE.db.query(spec, **kwargs):
|
||||
for s in spack.store.db.query(spec, **kwargs):
|
||||
# This is fast for already-concrete specs
|
||||
specs[s.dag_hash()] = s
|
||||
|
||||
@@ -265,7 +265,7 @@ def recurse_dependents():
|
||||
"--dependents",
|
||||
action="store_true",
|
||||
dest="dependents",
|
||||
help="also uninstall any packages that depend on the ones given via command line",
|
||||
help="also uninstall any packages that depend on the ones given " "via command line",
|
||||
)
|
||||
|
||||
|
||||
@@ -286,7 +286,7 @@ def deptype():
|
||||
"--deptype",
|
||||
action=DeptypeAction,
|
||||
default=dep.all_deptypes,
|
||||
help="comma-separated list of deptypes to traverse\n\ndefault=%s"
|
||||
help="comma-separated list of deptypes to traverse\ndefault=%s"
|
||||
% ",".join(dep.all_deptypes),
|
||||
)
|
||||
|
||||
@@ -331,17 +331,6 @@ def tags():
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def namespaces():
|
||||
return Args(
|
||||
"-N",
|
||||
"--namespaces",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="show fully qualified package names",
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def jobs():
|
||||
return Args(
|
||||
@@ -360,25 +349,14 @@ def install_status():
|
||||
"-I",
|
||||
"--install-status",
|
||||
action="store_true",
|
||||
default=True,
|
||||
help="show install status of packages\n\npackages can be: "
|
||||
default=False,
|
||||
help="show install status of packages. packages can be: "
|
||||
"installed [+], missing and needed by an installed package [-], "
|
||||
"installed in an upstream instance [^], "
|
||||
"installed in and upstream instance [^], "
|
||||
"or not installed (no annotation)",
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def no_install_status():
|
||||
return Args(
|
||||
"--no-install-status",
|
||||
dest="install_status",
|
||||
action="store_false",
|
||||
default=True,
|
||||
help="do not show install status annotations",
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def no_checksum():
|
||||
return Args(
|
||||
@@ -404,23 +382,24 @@ def add_cdash_args(subparser, add_help):
|
||||
cdash_help = {}
|
||||
if add_help:
|
||||
cdash_help["upload-url"] = "CDash URL where reports will be uploaded"
|
||||
cdash_help["build"] = (
|
||||
"name of the build that will be reported to CDash\n\n"
|
||||
"defaults to spec of the package to operate on"
|
||||
)
|
||||
cdash_help["site"] = (
|
||||
"site name that will be reported to CDash\n\n" "defaults to current system hostname"
|
||||
)
|
||||
cdash_help["track"] = (
|
||||
"results will be reported to this group on CDash\n\n" "defaults to Experimental"
|
||||
)
|
||||
cdash_help["buildstamp"] = (
|
||||
"use custom buildstamp\n\n"
|
||||
"instead of letting the CDash reporter prepare the "
|
||||
"buildstamp which, when combined with build name, site and project, "
|
||||
"uniquely identifies the build, provide this argument to identify "
|
||||
"the build yourself. format: %%Y%%m%%d-%%H%%M-[cdash-track]"
|
||||
)
|
||||
cdash_help[
|
||||
"build"
|
||||
] = """The name of the build that will be reported to CDash.
|
||||
Defaults to spec of the package to operate on."""
|
||||
cdash_help[
|
||||
"site"
|
||||
] = """The site name that will be reported to CDash.
|
||||
Defaults to current system hostname."""
|
||||
cdash_help[
|
||||
"track"
|
||||
] = """Results will be reported to this group on CDash.
|
||||
Defaults to Experimental."""
|
||||
cdash_help[
|
||||
"buildstamp"
|
||||
] = """Instead of letting the CDash reporter prepare the
|
||||
buildstamp which, when combined with build name, site and project,
|
||||
uniquely identifies the build, provide this argument to identify
|
||||
the build yourself. Format: %%Y%%m%%d-%%H%%M-[cdash-track]"""
|
||||
else:
|
||||
cdash_help["upload-url"] = argparse.SUPPRESS
|
||||
cdash_help["build"] = argparse.SUPPRESS
|
||||
@@ -489,7 +468,7 @@ def __init__(
|
||||
# substituting '_' for ':'.
|
||||
dest = dest.replace(":", "_")
|
||||
|
||||
super().__init__(
|
||||
super(ConfigSetAction, self).__init__(
|
||||
option_strings=option_strings,
|
||||
dest=dest,
|
||||
nargs=0,
|
||||
@@ -552,16 +531,16 @@ def add_s3_connection_args(subparser, add_help):
|
||||
"--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-access-key-secret", help="secret string to use to connect to this S3 mirror"
|
||||
"--s3-access-key-secret", help="Secret string to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-access-token", help="access token to use to connect to this S3 mirror"
|
||||
"--s3-access-token", help="Access Token to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-profile", help="S3 profile name to use to connect to this S3 mirror", default=None
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror"
|
||||
"--s3-endpoint-url", help="Endpoint URL to use to connect to this S3 mirror"
|
||||
)
|
||||
|
||||
|
||||
|
@@ -2,6 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
||||
@@ -106,7 +108,7 @@ def emulate_env_utility(cmd_name, context, args):
|
||||
visitor = AreDepsInstalledVisitor(context=context)
|
||||
|
||||
# Mass install check needs read transaction.
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
with spack.store.db.read_transaction():
|
||||
traverse.traverse_breadth_first_with_visitor([spec], traverse.CoverNodesVisitor(visitor))
|
||||
|
||||
if visitor.has_uninstalled_deps:
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
@@ -24,6 +26,7 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="compiler_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser(
|
||||
@@ -35,7 +38,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope("compilers"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -49,8 +52,8 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
default=None,
|
||||
metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope("compilers"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
|
||||
@@ -59,7 +62,7 @@ def setup_parser(subparser):
|
||||
list_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
metavar=scopes_metavar,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -70,7 +73,7 @@ def setup_parser(subparser):
|
||||
info_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
metavar=scopes_metavar,
|
||||
default=spack.config.default_list_scope(),
|
||||
help="configuration scope to read from",
|
||||
)
|
||||
@@ -92,7 +95,7 @@ def compiler_find(args):
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
|
||||
config = spack.config.CONFIG
|
||||
config = spack.config.config
|
||||
filename = config.get_config_filename(args.scope, "compilers")
|
||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||
@@ -103,21 +106,19 @@ def compiler_find(args):
|
||||
|
||||
|
||||
def compiler_remove(args):
|
||||
compiler_spec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
candidate_compilers = spack.compilers.compilers_for_spec(compiler_spec, scope=args.scope)
|
||||
|
||||
if not candidate_compilers:
|
||||
tty.die("No compilers match spec %s" % compiler_spec)
|
||||
|
||||
if not args.all and len(candidate_compilers) > 1:
|
||||
tty.error(f"Multiple compilers match spec {compiler_spec}. Choose one:")
|
||||
colify(reversed(sorted([c.spec.display_str for c in candidate_compilers])), indent=4)
|
||||
cspec = spack.spec.CompilerSpec(args.compiler_spec)
|
||||
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
|
||||
if not compilers:
|
||||
tty.die("No compilers match spec %s" % cspec)
|
||||
elif not args.all and len(compilers) > 1:
|
||||
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
|
||||
colify(reversed(sorted([c.spec.display_str for c in compilers])), indent=4)
|
||||
tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
|
||||
sys.exit(1)
|
||||
|
||||
for current_compiler in candidate_compilers:
|
||||
spack.compilers.remove_compiler_from_config(current_compiler.spec, scope=args.scope)
|
||||
tty.msg(f"{current_compiler.spec.display_str} has been removed")
|
||||
for compiler in compilers:
|
||||
spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
|
||||
tty.msg("Removed compiler %s" % compiler.spec.display_str)
|
||||
|
||||
|
||||
def compiler_info(args):
|
||||
|
@@ -13,11 +13,12 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
metavar=scopes_metavar,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
|
@@ -14,16 +14,18 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="re-concretize even if already concretized"
|
||||
"-f", "--force", action="store_true", help="Re-concretize even if already concretized."
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--test",
|
||||
default=None,
|
||||
choices=["root", "all"],
|
||||
help="concretize with test dependencies of only root packages or all packages",
|
||||
help="""Concretize with test dependencies. When 'root' is chosen, test
|
||||
dependencies are only added for the environment's root specs. When 'all' is
|
||||
chosen, test dependencies are enabled for all packages in the environment.""",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-q", "--quiet", action="store_true", help="don't print concretized specs"
|
||||
"-q", "--quiet", action="store_true", help="Don't print concretized specs"
|
||||
)
|
||||
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
@@ -2,6 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
import os
|
||||
import shutil
|
||||
@@ -27,12 +29,13 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
# User can only choose one
|
||||
subparser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
metavar=scopes_metavar,
|
||||
help="configuration scope to read/modify",
|
||||
)
|
||||
|
||||
@@ -41,10 +44,10 @@ def setup_parser(subparser):
|
||||
get_parser = sp.add_parser("get", help="print configuration values")
|
||||
get_parser.add_argument(
|
||||
"section",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
help="configuration section to print. " "options: %(choices)s",
|
||||
nargs="?",
|
||||
metavar="section",
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
choices=spack.config.section_schemas,
|
||||
)
|
||||
|
||||
blame_parser = sp.add_parser(
|
||||
@@ -52,18 +55,18 @@ def setup_parser(subparser):
|
||||
)
|
||||
blame_parser.add_argument(
|
||||
"section",
|
||||
help="configuration section to print\n\noptions: %(choices)s",
|
||||
help="configuration section to print. " "options: %(choices)s",
|
||||
metavar="section",
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
choices=spack.config.section_schemas,
|
||||
)
|
||||
|
||||
edit_parser = sp.add_parser("edit", help="edit configuration file")
|
||||
edit_parser.add_argument(
|
||||
"section",
|
||||
help="configuration section to edit\n\noptions: %(choices)s",
|
||||
help="configuration section to edit. " "options: %(choices)s",
|
||||
metavar="section",
|
||||
nargs="?",
|
||||
choices=spack.config.SECTION_SCHEMAS,
|
||||
choices=spack.config.section_schemas,
|
||||
)
|
||||
edit_parser.add_argument(
|
||||
"--print-file", action="store_true", help="print the file name that would be edited"
|
||||
@@ -75,7 +78,7 @@ def setup_parser(subparser):
|
||||
add_parser.add_argument(
|
||||
"path",
|
||||
nargs="?",
|
||||
help="colon-separated path to config that should be added, e.g. 'config:default:true'",
|
||||
help="colon-separated path to config that should be added," " e.g. 'config:default:true'",
|
||||
)
|
||||
add_parser.add_argument("-f", "--file", help="file from which to set all config values")
|
||||
|
||||
@@ -87,7 +90,7 @@ def setup_parser(subparser):
|
||||
"--local",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="set packages preferences based on local installs, rather than upstream",
|
||||
help="Set packages preferences based on local installs, rather " "than upstream.",
|
||||
)
|
||||
|
||||
remove_parser = sp.add_parser("remove", aliases=["rm"], help="remove configuration parameters")
|
||||
@@ -145,10 +148,10 @@ def config_get(args):
|
||||
scope, section = _get_scope_and_section(args)
|
||||
|
||||
if section is not None:
|
||||
spack.config.CONFIG.print_section(section)
|
||||
spack.config.config.print_section(section)
|
||||
|
||||
elif scope and scope.startswith("env:"):
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
if os.path.exists(config_file):
|
||||
with open(config_file) as f:
|
||||
print(f.read())
|
||||
@@ -156,12 +159,12 @@ def config_get(args):
|
||||
tty.die("environment has no %s file" % ev.manifest_name)
|
||||
|
||||
else:
|
||||
tty.die("`spack config get` requires a section argument or an active environment.")
|
||||
tty.die("`spack config get` requires a section argument " "or an active environment.")
|
||||
|
||||
|
||||
def config_blame(args):
|
||||
"""Print out line-by-line blame of merged YAML."""
|
||||
spack.config.CONFIG.print_section(args.section, blame=True)
|
||||
spack.config.config.print_section(args.section, blame=True)
|
||||
|
||||
|
||||
def config_edit(args):
|
||||
@@ -179,8 +182,8 @@ def config_edit(args):
|
||||
# If we aren't editing a spack.yaml file, get config path from scope.
|
||||
scope, section = _get_scope_and_section(args)
|
||||
if not scope and not section:
|
||||
tty.die("`spack config edit` requires a section argument or an active environment.")
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
tty.die("`spack config edit` requires a section argument " "or an active environment.")
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
|
||||
if args.print_file:
|
||||
print(config_file)
|
||||
@@ -193,7 +196,7 @@ def config_list(args):
|
||||
|
||||
Used primarily for shell tab completion scripts.
|
||||
"""
|
||||
print(" ".join(list(spack.config.SECTION_SCHEMAS)))
|
||||
print(" ".join(list(spack.config.section_schemas)))
|
||||
|
||||
|
||||
def config_add(args):
|
||||
@@ -250,19 +253,19 @@ def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||
|
||||
def config_update(args):
|
||||
# Read the configuration files
|
||||
spack.config.CONFIG.get_config(args.section, scope=args.scope)
|
||||
spack.config.config.get_config(args.section, scope=args.scope)
|
||||
updates: List[spack.config.ConfigScope] = list(
|
||||
filter(
|
||||
lambda s: not isinstance(
|
||||
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||
),
|
||||
spack.config.CONFIG.format_updates[args.section],
|
||||
spack.config.config.format_updates[args.section],
|
||||
)
|
||||
)
|
||||
|
||||
cannot_overwrite, skip_system_scope = [], False
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
can_be_updated = _can_update_config_file(scope, cfg_file)
|
||||
if not can_be_updated:
|
||||
if scope.name == "system":
|
||||
@@ -301,7 +304,7 @@ def config_update(args):
|
||||
" the latest schema format:\n\n"
|
||||
)
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope.name, args.section)
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
msg += "\t[scope={0}, file={1}]\n".format(scope.name, cfg_file)
|
||||
msg += (
|
||||
"\nIf the configuration files are updated, versions of Spack "
|
||||
@@ -324,7 +327,7 @@ def config_update(args):
|
||||
# Make a backup copy and rewrite the file
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
shutil.copy(cfg_file, bkp_file)
|
||||
spack.config.CONFIG.update_config(args.section, data, scope=scope.name, force=True)
|
||||
spack.config.config.update_config(args.section, data, scope=scope.name, force=True)
|
||||
tty.msg(f'File "{cfg_file}" update [backup={bkp_file}]')
|
||||
|
||||
|
||||
@@ -336,13 +339,13 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
||||
|
||||
|
||||
def config_revert(args):
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.CONFIG.file_scopes]
|
||||
scopes = [args.scope] if args.scope else [x.name for x in spack.config.config.file_scopes]
|
||||
|
||||
# Search for backup files in the configuration scopes
|
||||
Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
|
||||
to_be_restored, cannot_overwrite = [], []
|
||||
for scope in scopes:
|
||||
cfg_file = spack.config.CONFIG.get_config_filename(scope, args.section)
|
||||
cfg_file = spack.config.config.get_config_filename(scope, args.section)
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
|
||||
# If the backup files doesn't exist move to the next scope
|
||||
@@ -373,7 +376,7 @@ def config_revert(args):
|
||||
|
||||
proceed = True
|
||||
if not args.yes_to_all:
|
||||
msg = "The following scopes will be restored from the corresponding backup files:\n"
|
||||
msg = "The following scopes will be restored from the corresponding" " backup files:\n"
|
||||
for entry in to_be_restored:
|
||||
msg += "\t[scope={0.scope}, bkp={0.bkp}]\n".format(entry)
|
||||
msg += "This operation cannot be undone."
|
||||
@@ -398,8 +401,8 @@ def config_prefer_upstream(args):
|
||||
if scope is None:
|
||||
scope = spack.config.default_modify_scope("packages")
|
||||
|
||||
all_specs = set(spack.store.STORE.db.query(installed=True))
|
||||
local_specs = set(spack.store.STORE.db.query_local(installed=True))
|
||||
all_specs = set(spack.store.db.query(installed=True))
|
||||
local_specs = set(spack.store.db.query_local(installed=True))
|
||||
pref_specs = local_specs if args.local else all_specs - local_specs
|
||||
|
||||
conflicting_variants = set()
|
||||
@@ -456,7 +459,7 @@ def config_prefer_upstream(args):
|
||||
existing = spack.config.get("packages", scope=scope)
|
||||
new = spack.config.merge_yaml(existing, pkgs)
|
||||
spack.config.set("packages", new, scope)
|
||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||
config_file = spack.config.config.get_config_filename(scope, section)
|
||||
|
||||
tty.msg("Updated config at {0}".format(config_file))
|
||||
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import spack.container
|
||||
import spack.container.images
|
||||
|
||||
description = "creates recipes to build images for different container runtimes"
|
||||
description = "creates recipes to build images for different" " container runtimes"
|
||||
section = "container"
|
||||
level = "long"
|
||||
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
import urllib.parse
|
||||
@@ -17,7 +19,6 @@
|
||||
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
|
||||
from spack.util.editor import editor
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||
|
||||
description = "create a new package file"
|
||||
@@ -70,7 +71,7 @@ class {class_name}({base_class_name}):
|
||||
'''
|
||||
|
||||
|
||||
class BundlePackageTemplate:
|
||||
class BundlePackageTemplate(object):
|
||||
"""
|
||||
Provides the default values to be used for a bundle package file template.
|
||||
"""
|
||||
@@ -121,7 +122,7 @@ def install(self, spec, prefix):
|
||||
url_line = ' url = "{url}"'
|
||||
|
||||
def __init__(self, name, url, versions):
|
||||
super().__init__(name, versions)
|
||||
super(PackageTemplate, self).__init__(name, versions)
|
||||
|
||||
self.url_def = self.url_line.format(url=url)
|
||||
|
||||
@@ -199,7 +200,7 @@ def __init__(self, name, url, *args, **kwargs):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to lua-{0}".format(name))
|
||||
name = "lua-{0}".format(name)
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
super(LuaPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class MesonPackageTemplate(PackageTemplate):
|
||||
@@ -307,7 +308,7 @@ def __init__(self, name, url, *args, **kwargs):
|
||||
tty.msg("Changing package name from {0} to rkt-{0}".format(name))
|
||||
name = "rkt-{0}".format(name)
|
||||
self.body_def = self.body_def.format(name[4:])
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
super(RacketPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class PythonPackageTemplate(PackageTemplate):
|
||||
@@ -326,7 +327,6 @@ class PythonPackageTemplate(PackageTemplate):
|
||||
# FIXME: Add a build backend, usually defined in pyproject.toml. If no such file
|
||||
# exists, use setuptools.
|
||||
# depends_on("py-setuptools", type="build")
|
||||
# depends_on("py-hatchling", type="build")
|
||||
# depends_on("py-flit-core", type="build")
|
||||
# depends_on("py-poetry-core", type="build")
|
||||
|
||||
@@ -334,11 +334,17 @@ class PythonPackageTemplate(PackageTemplate):
|
||||
# depends_on("py-foo", type=("build", "run"))"""
|
||||
|
||||
body_def = """\
|
||||
def config_settings(self, spec, prefix):
|
||||
# FIXME: Add configuration settings to be passed to the build backend
|
||||
def global_options(self, spec, prefix):
|
||||
# FIXME: Add options to pass to setup.py
|
||||
# FIXME: If not needed, delete this function
|
||||
settings = {}
|
||||
return settings"""
|
||||
options = []
|
||||
return options
|
||||
|
||||
def install_options(self, spec, prefix):
|
||||
# FIXME: Add options to pass to setup.py install
|
||||
# FIXME: If not needed, delete this function
|
||||
options = []
|
||||
return options"""
|
||||
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
# If the user provided `--name py-numpy`, don't rename it py-py-numpy
|
||||
@@ -394,7 +400,7 @@ def __init__(self, name, url, *args, **kwargs):
|
||||
+ self.url_line
|
||||
)
|
||||
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
super(PythonPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class RPackageTemplate(PackageTemplate):
|
||||
@@ -433,7 +439,7 @@ def __init__(self, name, url, *args, **kwargs):
|
||||
if bioc:
|
||||
self.url_line = ' url = "{0}"\n' ' bioc = "{1}"'.format(url, r_name)
|
||||
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
super(RPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class PerlmakePackageTemplate(PackageTemplate):
|
||||
@@ -460,7 +466,7 @@ def __init__(self, name, *args, **kwargs):
|
||||
tty.msg("Changing package name from {0} to perl-{0}".format(name))
|
||||
name = "perl-{0}".format(name)
|
||||
|
||||
super().__init__(name, *args, **kwargs)
|
||||
super(PerlmakePackageTemplate, self).__init__(name, *args, **kwargs)
|
||||
|
||||
|
||||
class PerlbuildPackageTemplate(PerlmakePackageTemplate):
|
||||
@@ -493,7 +499,7 @@ def __init__(self, name, *args, **kwargs):
|
||||
tty.msg("Changing package name from {0} to octave-{0}".format(name))
|
||||
name = "octave-{0}".format(name)
|
||||
|
||||
super().__init__(name, *args, **kwargs)
|
||||
super(OctavePackageTemplate, self).__init__(name, *args, **kwargs)
|
||||
|
||||
|
||||
class RubyPackageTemplate(PackageTemplate):
|
||||
@@ -521,7 +527,7 @@ def __init__(self, name, *args, **kwargs):
|
||||
tty.msg("Changing package name from {0} to ruby-{0}".format(name))
|
||||
name = "ruby-{0}".format(name)
|
||||
|
||||
super().__init__(name, *args, **kwargs)
|
||||
super(RubyPackageTemplate, self).__init__(name, *args, **kwargs)
|
||||
|
||||
|
||||
class MakefilePackageTemplate(PackageTemplate):
|
||||
@@ -566,7 +572,7 @@ def __init__(self, name, *args, **kwargs):
|
||||
tty.msg("Changing package name from {0} to py-{0}".format(name))
|
||||
name = "py-{0}".format(name)
|
||||
|
||||
super().__init__(name, *args, **kwargs)
|
||||
super(SIPPackageTemplate, self).__init__(name, *args, **kwargs)
|
||||
|
||||
|
||||
templates = {
|
||||
@@ -608,7 +614,7 @@ def setup_parser(subparser):
|
||||
"--template",
|
||||
metavar="TEMPLATE",
|
||||
choices=sorted(templates.keys()),
|
||||
help="build system template to use\n\noptions: %(choices)s",
|
||||
help="build system template to use. options: %(choices)s",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-r", "--repo", help="path to a repository where the package should be created"
|
||||
@@ -616,7 +622,7 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"-N",
|
||||
"--namespace",
|
||||
help="specify a namespace for the package\n\nmust be the namespace of "
|
||||
help="specify a namespace for the package. must be the namespace of "
|
||||
"a repository registered with Spack",
|
||||
)
|
||||
subparser.add_argument(
|
||||
@@ -709,7 +715,7 @@ def __call__(self, stage, url):
|
||||
output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
|
||||
except ProcessError:
|
||||
output = ""
|
||||
lines = output.splitlines()
|
||||
lines = output.split("\n")
|
||||
|
||||
# Determine the build system based on the files contained
|
||||
# in the archive.
|
||||
@@ -833,15 +839,13 @@ def get_versions(args, name):
|
||||
version = parse_version(args.url)
|
||||
url_dict = {version: args.url}
|
||||
|
||||
version_hashes = spack.stage.get_checksums_for_versions(
|
||||
versions = spack.stage.get_checksums_for_versions(
|
||||
url_dict,
|
||||
name,
|
||||
first_stage_function=guesser,
|
||||
keep_stage=args.keep_stage,
|
||||
batch=(args.batch or len(url_dict) == 1),
|
||||
)
|
||||
|
||||
versions = get_version_lines(version_hashes, url_dict)
|
||||
else:
|
||||
versions = unhashed_versions
|
||||
|
||||
@@ -876,7 +880,7 @@ def get_build_system(template, url, guesser):
|
||||
# Use whatever build system the guesser detected
|
||||
selected_template = guesser.build_system
|
||||
if selected_template == "generic":
|
||||
tty.warn("Unable to detect a build system. Using a generic package template.")
|
||||
tty.warn("Unable to detect a build system. " "Using a generic package template.")
|
||||
else:
|
||||
msg = "This package looks like it uses the {0} build system"
|
||||
tty.msg(msg.format(selected_template))
|
||||
@@ -915,11 +919,11 @@ def get_repository(args, name):
|
||||
)
|
||||
else:
|
||||
if spec.namespace:
|
||||
repo = spack.repo.PATH.get_repo(spec.namespace, None)
|
||||
repo = spack.repo.path.get_repo(spec.namespace, None)
|
||||
if not repo:
|
||||
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
|
||||
else:
|
||||
repo = spack.repo.PATH.first_repo()
|
||||
repo = spack.repo.path.first_repo()
|
||||
|
||||
# Set the namespace on the spec if it's not there already
|
||||
if not spec.namespace:
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
@@ -60,16 +62,16 @@ def create_db_tarball(args):
|
||||
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
|
||||
tarball_path = os.path.abspath(tarball_name)
|
||||
|
||||
base = os.path.basename(str(spack.store.STORE.root))
|
||||
base = os.path.basename(str(spack.store.root))
|
||||
transform_args = []
|
||||
if "GNU" in tar("--version", output=str):
|
||||
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
|
||||
else:
|
||||
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
|
||||
|
||||
wd = os.path.dirname(str(spack.store.STORE.root))
|
||||
wd = os.path.dirname(str(spack.store.root))
|
||||
with working_dir(wd):
|
||||
files = [spack.store.STORE.db._index_path]
|
||||
files = [spack.store.db._index_path]
|
||||
files += glob("%s/*/*/*/.spack/spec.json" % base)
|
||||
files += glob("%s/*/*/*/.spack/spec.yaml" % base)
|
||||
files = [os.path.relpath(f) for f in files]
|
||||
|
@@ -26,8 +26,8 @@ def setup_parser(subparser):
|
||||
"--installed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="list installed dependencies of an installed spec "
|
||||
"instead of possible dependencies of a package",
|
||||
help="List installed dependencies of an installed spec, "
|
||||
"instead of possible dependencies of a package.",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t",
|
||||
@@ -60,7 +60,7 @@ def dependencies(parser, args):
|
||||
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
||||
if sys.stdout.isatty():
|
||||
tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
|
||||
deps = spack.store.STORE.db.installed_relatives(
|
||||
deps = spack.store.db.installed_relatives(
|
||||
spec, "children", args.transitive, deptype=args.deptype
|
||||
)
|
||||
if deps:
|
||||
|
@@ -25,15 +25,15 @@ def setup_parser(subparser):
|
||||
"--installed",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="list installed dependents of an installed spec "
|
||||
"instead of possible dependents of a package",
|
||||
help="List installed dependents of an installed spec, "
|
||||
"instead of possible dependents of a package.",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t",
|
||||
"--transitive",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="show all transitive dependents",
|
||||
help="Show all transitive dependents.",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
@@ -47,14 +47,14 @@ def inverted_dependencies():
|
||||
actual dependents.
|
||||
"""
|
||||
dag = {}
|
||||
for pkg_cls in spack.repo.PATH.all_package_classes():
|
||||
for pkg_cls in spack.repo.path.all_package_classes():
|
||||
dag.setdefault(pkg_cls.name, set())
|
||||
for dep in pkg_cls.dependencies:
|
||||
deps = [dep]
|
||||
|
||||
# expand virtuals if necessary
|
||||
if spack.repo.PATH.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.PATH.providers_for(dep)]
|
||||
if spack.repo.path.is_virtual(dep):
|
||||
deps += [s.name for s in spack.repo.path.providers_for(dep)]
|
||||
|
||||
for d in deps:
|
||||
dag.setdefault(d, set()).add(pkg_cls.name)
|
||||
@@ -96,7 +96,7 @@ def dependents(parser, args):
|
||||
format_string = "{name}{@version}{%compiler}{/hash:7}"
|
||||
if sys.stdout.isatty():
|
||||
tty.msg("Dependents of %s" % spec.cformat(format_string))
|
||||
deps = spack.store.STORE.db.installed_relatives(spec, "parents", args.transitive)
|
||||
deps = spack.store.db.installed_relatives(spec, "parents", args.transitive)
|
||||
if deps:
|
||||
spack.cmd.display_specs(deps, long=True)
|
||||
else:
|
||||
|
@@ -13,6 +13,8 @@
|
||||
It is up to the user to ensure binary compatibility between the deprecated
|
||||
installation and its deprecator.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
||||
@@ -26,7 +28,7 @@
|
||||
from spack.database import InstallStatuses
|
||||
from spack.error import SpackError
|
||||
|
||||
description = "replace one package with another via symlinks"
|
||||
description = "Replace one package with another via symlinks"
|
||||
section = "admin"
|
||||
level = "long"
|
||||
|
||||
@@ -46,7 +48,7 @@ def setup_parser(sp):
|
||||
action="store_true",
|
||||
default=True,
|
||||
dest="dependencies",
|
||||
help="deprecate dependencies (default)",
|
||||
help="Deprecate dependencies (default)",
|
||||
)
|
||||
deps.add_argument(
|
||||
"-D",
|
||||
@@ -54,7 +56,7 @@ def setup_parser(sp):
|
||||
action="store_false",
|
||||
default=True,
|
||||
dest="dependencies",
|
||||
help="do not deprecate dependencies",
|
||||
help="Do not deprecate dependencies",
|
||||
)
|
||||
|
||||
install = sp.add_mutually_exclusive_group()
|
||||
@@ -64,7 +66,7 @@ def setup_parser(sp):
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="install",
|
||||
help="concretize and install deprecator spec",
|
||||
help="Concretize and install deprecator spec",
|
||||
)
|
||||
install.add_argument(
|
||||
"-I",
|
||||
@@ -72,7 +74,7 @@ def setup_parser(sp):
|
||||
action="store_false",
|
||||
default=False,
|
||||
dest="install",
|
||||
help="deprecator spec must already be installed (default)",
|
||||
help="Deprecator spec must already be installed (default)",
|
||||
)
|
||||
|
||||
sp.add_argument(
|
||||
@@ -81,7 +83,7 @@ def setup_parser(sp):
|
||||
type=str,
|
||||
default="soft",
|
||||
choices=["soft", "hard"],
|
||||
help="type of filesystem link to use for deprecation (default soft)",
|
||||
help="Type of filesystem link to use for deprecation (default soft)",
|
||||
)
|
||||
|
||||
sp.add_argument(
|
||||
@@ -130,7 +132,7 @@ def deprecate(parser, args):
|
||||
already_deprecated = []
|
||||
already_deprecated_for = []
|
||||
for spec in all_deprecate:
|
||||
deprecated_for = spack.store.STORE.db.deprecator(spec)
|
||||
deprecated_for = spack.store.db.deprecator(spec)
|
||||
if deprecated_for:
|
||||
already_deprecated.append(spec)
|
||||
already_deprecated_for.append(deprecated_for)
|
||||
|
@@ -25,14 +25,14 @@ def setup_parser(subparser):
|
||||
"--source-path",
|
||||
dest="source_path",
|
||||
default=None,
|
||||
help="path to source directory (defaults to the current directory)",
|
||||
help="path to source directory. defaults to the current directory",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-i",
|
||||
"--ignore-dependencies",
|
||||
action="store_true",
|
||||
dest="ignore_deps",
|
||||
help="do not try to install dependencies of requested packages",
|
||||
help="don't try to install dependencies of requested packages",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"])
|
||||
subparser.add_argument(
|
||||
@@ -55,13 +55,16 @@ def setup_parser(subparser):
|
||||
type=str,
|
||||
dest="shell",
|
||||
default=None,
|
||||
help="drop into a build environment in a new shell, e.g., bash",
|
||||
help="drop into a build environment in a new shell, e.g. bash, zsh",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--test",
|
||||
default=None,
|
||||
choices=["root", "all"],
|
||||
help="run tests on only root packages or all packages",
|
||||
help="""If 'root' is chosen, run package tests during
|
||||
installation for top-level packages (but skip tests for dependencies).
|
||||
if 'all' is chosen, run package tests during installation for all
|
||||
packages. If neither are chosen, don't run tests for any packages.""",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
|
||||
@@ -98,7 +101,7 @@ def dev_build(self, args):
|
||||
tty.die("spack dev-build only takes one spec.")
|
||||
|
||||
spec = specs[0]
|
||||
if not spack.repo.PATH.exists(spec.name):
|
||||
if not spack.repo.path.exists(spec.name):
|
||||
tty.die(
|
||||
"No package for '{0}' was found.".format(spec.name),
|
||||
" Use `spack create` to create a new package",
|
||||
|
@@ -20,7 +20,7 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument("-p", "--path", help="source location of package")
|
||||
subparser.add_argument("-p", "--path", help="Source location of package")
|
||||
|
||||
clone_group = subparser.add_mutually_exclusive_group()
|
||||
clone_group.add_argument(
|
||||
@@ -28,18 +28,18 @@ def setup_parser(subparser):
|
||||
action="store_false",
|
||||
dest="clone",
|
||||
default=None,
|
||||
help="do not clone, the package already exists at the source path",
|
||||
help="Do not clone. The package already exists at the source path",
|
||||
)
|
||||
clone_group.add_argument(
|
||||
"--clone",
|
||||
action="store_true",
|
||||
dest="clone",
|
||||
default=None,
|
||||
help="clone the package even if the path already exists",
|
||||
help="Clone the package even if the path already exists",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-f", "--force", help="remove any files or directories that block cloning source code"
|
||||
"-f", "--force", help="Remove any files or directories that block cloning source code"
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ["spec"])
|
||||
@@ -66,7 +66,8 @@ def develop(parser, args):
|
||||
# Both old syntax `spack develop pkg@x` and new syntax `spack develop pkg@=x`
|
||||
# are currently supported.
|
||||
spec = spack.spec.parse_with_version_concrete(entry["spec"])
|
||||
env.develop(spec=spec, path=path, clone=True)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
|
||||
pkg_cls(spec).stage.steal_source(abspath)
|
||||
|
||||
if not env.dev_specs:
|
||||
tty.warn("No develop specs to download")
|
||||
|
@@ -29,7 +29,7 @@ def setup_parser(subparser):
|
||||
action="store_true",
|
||||
default=False,
|
||||
dest="dump_json",
|
||||
help="dump json output instead of pretty printing",
|
||||
help="Dump json output instead of pretty printing.",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--first",
|
||||
|
@@ -31,9 +31,9 @@ def edit_package(name, repo_path, namespace):
|
||||
if repo_path:
|
||||
repo = spack.repo.Repo(repo_path)
|
||||
elif namespace:
|
||||
repo = spack.repo.PATH.get_repo(namespace)
|
||||
repo = spack.repo.path.get_repo(namespace)
|
||||
else:
|
||||
repo = spack.repo.PATH
|
||||
repo = spack.repo.path
|
||||
path = repo.filename_for_package_name(name)
|
||||
|
||||
spec = Spec(name)
|
||||
@@ -62,7 +62,7 @@ def setup_parser(subparser):
|
||||
dest="path",
|
||||
action="store_const",
|
||||
const=spack.paths.build_systems_path,
|
||||
help="edit the build system with the supplied name",
|
||||
help="Edit the build system with the supplied name.",
|
||||
)
|
||||
excl_args.add_argument(
|
||||
"-c",
|
||||
|
@@ -86,13 +86,6 @@ def env_activate_setup_parser(subparser):
|
||||
const="bat",
|
||||
help="print bat commands to activate the environment",
|
||||
)
|
||||
shells.add_argument(
|
||||
"--pwsh",
|
||||
action="store_const",
|
||||
dest="shell",
|
||||
const="pwsh",
|
||||
help="print powershell commands to activate environment",
|
||||
)
|
||||
|
||||
view_options = subparser.add_mutually_exclusive_group()
|
||||
view_options.add_argument(
|
||||
@@ -102,7 +95,7 @@ def env_activate_setup_parser(subparser):
|
||||
dest="with_view",
|
||||
const=True,
|
||||
default=True,
|
||||
help="update PATH, etc., with associated view",
|
||||
help="update PATH etc. with associated view",
|
||||
)
|
||||
view_options.add_argument(
|
||||
"-V",
|
||||
@@ -111,7 +104,7 @@ def env_activate_setup_parser(subparser):
|
||||
dest="with_view",
|
||||
const=False,
|
||||
default=True,
|
||||
help="do not update PATH, etc., with associated view",
|
||||
help="do not update PATH etc. with associated view",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
@@ -161,7 +154,7 @@ def env_activate(args):
|
||||
|
||||
# Error out when -e, -E, -D flags are given, cause they are ambiguous.
|
||||
if args.env or args.no_env or args.env_dir:
|
||||
tty.die("Calling spack env activate with --env, --env-dir and --no-env is ambiguous")
|
||||
tty.die("Calling spack env activate with --env, --env-dir and --no-env " "is ambiguous")
|
||||
|
||||
env_name_or_dir = args.activate_env or args.dir
|
||||
|
||||
@@ -250,7 +243,7 @@ def env_deactivate(args):
|
||||
|
||||
# Error out when -e, -E, -D flags are given, cause they are ambiguous.
|
||||
if args.env or args.no_env or args.env_dir:
|
||||
tty.die("Calling spack env deactivate with --env, --env-dir and --no-env is ambiguous")
|
||||
tty.die("Calling spack env deactivate with --env, --env-dir and --no-env " "is ambiguous")
|
||||
|
||||
if ev.active_environment() is None:
|
||||
tty.die("No environment is currently active.")
|
||||
@@ -290,7 +283,7 @@ def env_create_setup_parser(subparser):
|
||||
"envfile",
|
||||
nargs="?",
|
||||
default=None,
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file.",
|
||||
)
|
||||
|
||||
|
||||
@@ -418,7 +411,7 @@ def env_list(args):
|
||||
colify(color_names, indent=4)
|
||||
|
||||
|
||||
class ViewAction:
|
||||
class ViewAction(object):
|
||||
regenerate = "regenerate"
|
||||
enable = "enable"
|
||||
disable = "disable"
|
||||
@@ -608,16 +601,16 @@ def env_depfile_setup_parser(subparser):
|
||||
"--make-target-prefix",
|
||||
default=None,
|
||||
metavar="TARGET",
|
||||
help="prefix Makefile targets (and variables) with <TARGET>/<name>\n\nby default "
|
||||
help="prefix Makefile targets (and variables) with <TARGET>/<name>. By default "
|
||||
"the absolute path to the directory makedeps under the environment metadata dir is "
|
||||
"used. can be set to an empty string --make-prefix ''",
|
||||
"used. Can be set to an empty string --make-prefix ''.",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--make-disable-jobserver",
|
||||
default=True,
|
||||
action="store_false",
|
||||
dest="jobserver",
|
||||
help="disable POSIX jobserver support",
|
||||
help="disable POSIX jobserver support.",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--use-buildcache",
|
||||
@@ -625,8 +618,8 @@ def env_depfile_setup_parser(subparser):
|
||||
type=arguments.use_buildcache,
|
||||
default="package:auto,dependencies:auto",
|
||||
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
|
||||
help="when using `only`, redundant build dependencies are pruned from the DAG\n\n"
|
||||
"this flag is passed on to the generated spack install commands",
|
||||
help="When using `only`, redundant build dependencies are pruned from the DAG. "
|
||||
"This flag is passed on to the generated spack install commands.",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-o",
|
||||
@@ -640,7 +633,7 @@ def env_depfile_setup_parser(subparser):
|
||||
"--generator",
|
||||
default="make",
|
||||
choices=("make",),
|
||||
help="specify the depfile type\n\ncurrently only make is supported",
|
||||
help="specify the depfile type. Currently only make is supported.",
|
||||
)
|
||||
subparser.add_argument(
|
||||
metavar="specs",
|
||||
|
@@ -22,7 +22,7 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.epilog = (
|
||||
"If called without argument returns the list of all valid extendable packages"
|
||||
"If called without argument returns " "the list of all valid extendable packages"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long"])
|
||||
subparser.add_argument(
|
||||
@@ -58,7 +58,7 @@ def extensions(parser, args):
|
||||
|
||||
extendable_pkgs = []
|
||||
for name in spack.repo.all_package_names():
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(name)
|
||||
pkg_cls = spack.repo.path.get_pkg_class(name)
|
||||
if pkg_cls.extendable:
|
||||
extendable_pkgs.append(name)
|
||||
|
||||
@@ -81,7 +81,7 @@ def extensions(parser, args):
|
||||
|
||||
if args.show in ("packages", "all"):
|
||||
# List package names of extensions
|
||||
extensions = spack.repo.PATH.extensions_for(spec)
|
||||
extensions = spack.repo.path.extensions_for(spec)
|
||||
if not extensions:
|
||||
tty.msg("%s has no extensions." % spec.cshort_spec)
|
||||
else:
|
||||
@@ -91,7 +91,7 @@ def extensions(parser, args):
|
||||
|
||||
if args.show in ("installed", "all"):
|
||||
# List specs of installed extensions.
|
||||
installed = [s.spec for s in spack.store.STORE.db.installed_extensions_for(spec)]
|
||||
installed = [s.spec for s in spack.store.db.installed_extensions_for(spec)]
|
||||
|
||||
if args.show == "all":
|
||||
print
|
||||
|
@@ -2,6 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import errno
|
||||
import os
|
||||
@@ -13,7 +15,6 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.cray_manifest as cray_manifest
|
||||
import spack.detection
|
||||
import spack.error
|
||||
@@ -28,6 +29,7 @@ def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="external_command")
|
||||
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
|
||||
find_parser = sp.add_parser("find", help="add external packages to packages.yaml")
|
||||
find_parser.add_argument(
|
||||
@@ -42,12 +44,12 @@ def setup_parser(subparser):
|
||||
"--path",
|
||||
default=None,
|
||||
action="append",
|
||||
help="one or more alternative search paths for finding externals",
|
||||
help="Alternative search paths for finding externals. May be repeated",
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"--scope",
|
||||
choices=scopes,
|
||||
metavar=spack.config.SCOPES_METAVAR,
|
||||
metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope("packages"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
@@ -66,8 +68,10 @@ def setup_parser(subparser):
|
||||
|
||||
read_cray_manifest = sp.add_parser(
|
||||
"read-cray-manifest",
|
||||
help="consume a Spack-compatible description of externally-installed packages, including "
|
||||
"dependency relationships",
|
||||
help=(
|
||||
"consume a Spack-compatible description of externally-installed "
|
||||
"packages, including dependency relationships"
|
||||
),
|
||||
)
|
||||
read_cray_manifest.add_argument(
|
||||
"--file", default=None, help="specify a location other than the default"
|
||||
@@ -90,7 +94,7 @@ def setup_parser(subparser):
|
||||
read_cray_manifest.add_argument(
|
||||
"--fail-on-error",
|
||||
action="store_true",
|
||||
help="if a manifest file cannot be parsed, fail and report the full stack trace",
|
||||
help=("if a manifest file cannot be parsed, fail and report the " "full stack trace"),
|
||||
)
|
||||
|
||||
|
||||
@@ -109,14 +113,14 @@ def external_find(args):
|
||||
# For most exceptions, just print a warning and continue.
|
||||
# Note that KeyboardInterrupt does not subclass Exception
|
||||
# (so CTRL-C will terminate the program as expected).
|
||||
skip_msg = "Skipping manifest and continuing with other external checks"
|
||||
skip_msg = "Skipping manifest and continuing with other external " "checks"
|
||||
if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
|
||||
errno.EPERM,
|
||||
errno.EACCES,
|
||||
]:
|
||||
# The manifest file does not have sufficient permissions enabled:
|
||||
# print a warning and keep going
|
||||
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)
|
||||
tty.warn("Unable to read manifest due to insufficient " "permissions.", skip_msg)
|
||||
else:
|
||||
tty.warn("Unable to read manifest, unexpected error: {0}".format(str(e)), skip_msg)
|
||||
|
||||
@@ -133,9 +137,9 @@ def external_find(args):
|
||||
|
||||
# Add the packages that have been required explicitly
|
||||
if args.packages:
|
||||
pkg_cls_to_check = [spack.repo.PATH.get_pkg_class(pkg) for pkg in args.packages]
|
||||
pkg_cls_to_check = [spack.repo.path.get_pkg_class(pkg) for pkg in args.packages]
|
||||
if args.tags:
|
||||
allowed = set(spack.repo.PATH.packages_with_tags(*args.tags))
|
||||
allowed = set(spack.repo.path.packages_with_tags(*args.tags))
|
||||
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
|
||||
|
||||
if args.tags and not pkg_cls_to_check:
|
||||
@@ -144,15 +148,15 @@ def external_find(args):
|
||||
# Since tags are cached it's much faster to construct what we need
|
||||
# to search directly, rather than filtering after the fact
|
||||
pkg_cls_to_check = [
|
||||
spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
spack.repo.path.get_pkg_class(pkg_name)
|
||||
for tag in args.tags
|
||||
for pkg_name in spack.repo.PATH.packages_with_tags(tag)
|
||||
for pkg_name in spack.repo.path.packages_with_tags(tag)
|
||||
]
|
||||
pkg_cls_to_check = list(set(pkg_cls_to_check))
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not pkg_cls_to_check:
|
||||
pkg_cls_to_check = list(spack.repo.PATH.all_package_classes())
|
||||
pkg_cls_to_check = list(spack.repo.path.all_package_classes())
|
||||
|
||||
# If the user specified any packages to exclude from external find, add them here
|
||||
if args.exclude:
|
||||
@@ -165,8 +169,8 @@ def external_find(args):
|
||||
detected_packages, scope=args.scope, buildable=not args.not_buildable
|
||||
)
|
||||
if new_entries:
|
||||
path = spack.config.CONFIG.get_config_filename(args.scope, "packages")
|
||||
msg = "The following specs have been detected on this system and added to {0}"
|
||||
path = spack.config.config.get_config_filename(args.scope, "packages")
|
||||
msg = "The following specs have been detected on this system " "and added to {0}"
|
||||
tty.msg(msg.format(path))
|
||||
spack.cmd.display_specs(new_entries)
|
||||
else:
|
||||
@@ -234,12 +238,12 @@ def _collect_and_consume_cray_manifest_files(
|
||||
if fail_on_error:
|
||||
raise
|
||||
else:
|
||||
tty.warn("Failure reading manifest file: {0}\n\t{1}".format(path, str(e)))
|
||||
tty.warn("Failure reading manifest file: {0}" "\n\t{1}".format(path, str(e)))
|
||||
|
||||
|
||||
def external_list(args):
|
||||
# Trigger a read of all packages, might take a long time.
|
||||
list(spack.repo.PATH.all_package_classes())
|
||||
list(spack.repo.path.all_package_classes())
|
||||
# Print all the detectable packages
|
||||
tty.msg("Detectable packages per repository")
|
||||
for namespace, pkgs in sorted(spack.package_base.detectable_packages.items()):
|
||||
|
@@ -10,7 +10,6 @@
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.traverse
|
||||
|
||||
description = "fetch archives for packages"
|
||||
section = "build"
|
||||
@@ -37,12 +36,6 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def fetch(parser, args):
|
||||
if args.no_checksum:
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
else:
|
||||
@@ -58,21 +51,24 @@ def fetch(parser, args):
|
||||
else:
|
||||
specs = env.all_specs()
|
||||
if specs == []:
|
||||
tty.die("No uninstalled specs in environment. Did you run `spack concretize` yet?")
|
||||
tty.die(
|
||||
"No uninstalled specs in environment. Did you " "run `spack concretize` yet?"
|
||||
)
|
||||
else:
|
||||
tty.die("fetch requires at least one spec argument")
|
||||
|
||||
if args.dependencies or args.missing:
|
||||
to_be_fetched = spack.traverse.traverse_nodes(specs, key=spack.traverse.by_dag_hash)
|
||||
else:
|
||||
to_be_fetched = specs
|
||||
if args.no_checksum:
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
for spec in to_be_fetched:
|
||||
if args.missing and spec.installed:
|
||||
continue
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
|
||||
pkg = spec.package
|
||||
for spec in specs:
|
||||
if args.missing or args.dependencies:
|
||||
for s in spec.traverse(root=False):
|
||||
# Skip already-installed packages with --missing
|
||||
if args.missing and s.installed:
|
||||
continue
|
||||
|
||||
pkg.stage.keep = True
|
||||
with pkg.stage:
|
||||
pkg.do_fetch()
|
||||
s.package.do_fetch()
|
||||
spec.package.do_fetch()
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import copy
|
||||
import sys
|
||||
|
||||
@@ -30,14 +32,6 @@ def setup_parser(subparser):
|
||||
default=None,
|
||||
help="output specs with the specified format string",
|
||||
)
|
||||
format_group.add_argument(
|
||||
"-H",
|
||||
"--hashes",
|
||||
action="store_const",
|
||||
dest="format",
|
||||
const="{/hash}",
|
||||
help="same as '--format {/hash}'; use with xargs or $()",
|
||||
)
|
||||
format_group.add_argument(
|
||||
"--json",
|
||||
action="store_true",
|
||||
@@ -67,7 +61,7 @@ def setup_parser(subparser):
|
||||
help="do not group specs by arch/compiler",
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-c",
|
||||
@@ -140,6 +134,9 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-N", "--namespace", action="store_true", help="show fully qualified package names"
|
||||
)
|
||||
|
||||
subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
|
||||
subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
|
||||
@@ -227,7 +224,7 @@ def display_env(env, args, decorator, results):
|
||||
env.user_specs,
|
||||
root_args,
|
||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||
namespaces=True,
|
||||
namespace=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
variants=True,
|
||||
@@ -268,7 +265,7 @@ def find(parser, args):
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
|
||||
if args.loaded:
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user