Compare commits
6 Commits
fix-linux-
...
bugfix/env
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5d0bb647a3 | ||
|
|
bfb72c7fd6 | ||
|
|
08a5a59b21 | ||
|
|
cebd482300 | ||
|
|
84a1fc415d | ||
|
|
394f2a58d3 |
8
.github/ISSUE_TEMPLATE/build_error.yml
vendored
8
.github/ISSUE_TEMPLATE/build_error.yml
vendored
@@ -9,7 +9,7 @@ body:
|
||||
Thanks for taking the time to report this build failure. To proceed with the report please:
|
||||
1. Title the issue `Installation issue: <name-of-the-package>`.
|
||||
2. Provide the information required below.
|
||||
|
||||
|
||||
We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively!
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
@@ -29,9 +29,7 @@ body:
|
||||
description: |
|
||||
Please post the error message from spack inside the `<details>` tag below:
|
||||
value: |
|
||||
<details><summary>Error message</summary>
|
||||
|
||||
<pre>
|
||||
<details><summary>Error message</summary><pre>
|
||||
...
|
||||
</pre></details>
|
||||
validations:
|
||||
@@ -55,7 +53,7 @@ body:
|
||||
Please upload the following files:
|
||||
* **`spack-build-out.txt`**
|
||||
* **`spack-build-env.txt`**
|
||||
|
||||
|
||||
They should be present in the stage directory of the failing build. Also upload any `config.log` or similar file if one exists.
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/test_error.yml
vendored
4
.github/ISSUE_TEMPLATE/test_error.yml
vendored
@@ -21,9 +21,7 @@ body:
|
||||
description: |
|
||||
Please post the error message from spack inside the `<details>` tag below:
|
||||
value: |
|
||||
<details><summary>Error message</summary>
|
||||
|
||||
<pre>
|
||||
<details><summary>Error message</summary><pre>
|
||||
...
|
||||
</pre></details>
|
||||
validations:
|
||||
|
||||
10
.github/dependabot.yml
vendored
10
.github/dependabot.yml
vendored
@@ -5,13 +5,3 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to build documentation
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/lib/spack/docs"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
# Requirements to run style checks
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/.github/workflows/style"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
|
||||
17
.github/workflows/audit.yaml
vendored
17
.github/workflows/audit.yaml
vendored
@@ -17,24 +17,20 @@ concurrency:
|
||||
jobs:
|
||||
# Run audits on all the packages in the built-in repository
|
||||
package-audits:
|
||||
runs-on: ${{ matrix.operating_system }}
|
||||
strategy:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml]
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]
|
||||
- name: Package audits (with coverage)
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage run $(which spack) audit externals
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
@@ -42,8 +38,7 @@ jobs:
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
$(which spack) audit externals
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
flags: unittests,linux,audits
|
||||
|
||||
26
.github/workflows/bootstrap.yml
vendored
26
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,11 +179,11 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -214,7 +214,7 @@ jobs:
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
22
.github/workflows/build-containers.yml
vendored
22
.github/workflows/build-containers.yml
vendored
@@ -45,18 +45,12 @@ jobs:
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-bionic, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:18.04'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -86,19 +80,19 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # @v1
|
||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # @v1
|
||||
uses: docker/setup-buildx-action@4b4e9c3e2d4531116a6f8ba8e71fc6e2cb6e6c8c # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -106,13 +100,13 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # @v1
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # @v2
|
||||
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
31
.github/workflows/nightly-win-builds.yml
vendored
31
.github/workflows/nightly-win-builds.yml
vendored
@@ -1,31 +0,0 @@
|
||||
name: Windows Paraview Nightly
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 2 * * *' # Run at 2 am
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
|
||||
|
||||
jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools coverage
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack external find cmake ninja win-sdk win-wdk wgl msmpi
|
||||
spack -d install -y --cdash-upload-url https://cdash.spack.io/submit.php?project=Spack+on+Windows --cdash-track Nightly --only dependencies paraview
|
||||
exit 0
|
||||
7
.github/workflows/style/requirements.txt
vendored
7
.github/workflows/style/requirements.txt
vendored
@@ -1,7 +0,0 @@
|
||||
black==23.9.1
|
||||
clingo==5.6.2
|
||||
flake8==6.1.0
|
||||
isort==5.12.0
|
||||
mypy==1.5.1
|
||||
types-six==1.16.21.9
|
||||
vermin==1.5.2
|
||||
38
.github/workflows/unit_tests.yaml
vendored
38
.github/workflows/unit_tests.yaml
vendored
@@ -47,10 +47,10 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
cmake bison libbison-dev kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
|
||||
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-xdist pytest-cov
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
@@ -87,17 +87,17 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
# Test shell integration
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -107,7 +107,7 @@ jobs:
|
||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-xdist
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-xdist
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
|
||||
@@ -133,11 +133,10 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
@@ -152,10 +151,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -164,8 +163,7 @@ jobs:
|
||||
sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist
|
||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-cov clingo pytest-xdist
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -177,7 +175,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # @v2.1.0
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
# Run unit tests on MacOS
|
||||
@@ -187,16 +185,16 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install --upgrade pytest coverage[toml] pytest-xdist pytest-cov
|
||||
pip install --upgrade pip six setuptools
|
||||
pip install --upgrade pytest codecov coverage[toml] pytest-xdist pytest-cov
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
@@ -212,6 +210,6 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
with:
|
||||
flags: unittests,macos
|
||||
|
||||
19
.github/workflows/valid-style.yml
vendored
19
.github/workflows/valid-style.yml
vendored
@@ -18,15 +18,15 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
@@ -35,17 +35,16 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools
|
||||
pip install -r .github/workflows/style/requirements.txt
|
||||
python3 -m pip install --upgrade pip six setuptools types-six black==23.1.0 mypy isort clingo flake8
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -69,11 +68,10 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
@@ -82,7 +80,6 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack debug report
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack unit-test -V
|
||||
|
||||
100
.github/workflows/windows_python.yml
vendored
100
.github/workflows/windows_python.yml
vendored
@@ -15,15 +15,15 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
@@ -33,21 +33,21 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
with:
|
||||
flags: unittests,windows
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools coverage pytest-cov clingo
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
@@ -57,23 +57,99 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
with:
|
||||
flags: unittests,windows
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip pywin32 setuptools coverage
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
spack -d external find cmake ninja
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack -d install abseil-cpp
|
||||
# TODO: johnwparent - reduce the size of the installer operations
|
||||
# make-installer:
|
||||
# runs-on: windows-latest
|
||||
# steps:
|
||||
# - name: Disable Windows Symlinks
|
||||
# run: |
|
||||
# git config --global core.symlinks false
|
||||
# shell:
|
||||
# powershell
|
||||
# - uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
# with:
|
||||
# fetch-depth: 0
|
||||
# - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
# with:
|
||||
# python-version: 3.9
|
||||
# - name: Install Python packages
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip six pywin32 setuptools
|
||||
# - name: Add Light and Candle to Path
|
||||
# run: |
|
||||
# $env:WIX >> $GITHUB_PATH
|
||||
# - name: Run Installer
|
||||
# run: |
|
||||
# ./share/spack/qa/setup_spack_installer.ps1
|
||||
# spack make-installer -s . -g SILENT pkg
|
||||
# echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
# env:
|
||||
# ProgressPreference: SilentlyContinue
|
||||
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
# with:
|
||||
# name: Windows Spack Installer Bundle
|
||||
# path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
# with:
|
||||
# name: Windows Spack Installer
|
||||
# path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
# execute-installer:
|
||||
# needs: make-installer
|
||||
# runs-on: windows-latest
|
||||
# defaults:
|
||||
# run:
|
||||
# shell: pwsh
|
||||
# steps:
|
||||
# - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
# with:
|
||||
# python-version: 3.9
|
||||
# - name: Install Python packages
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip six pywin32 setuptools
|
||||
# - name: Setup installer directory
|
||||
# run: |
|
||||
# mkdir -p spack_installer
|
||||
# echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
# - uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: Windows Spack Installer Bundle
|
||||
# path: ${{ env.spack_installer }}
|
||||
# - name: Execute Bundled Installer
|
||||
# run: |
|
||||
# $proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
# $handle = $proc.Handle # cache proc.Handle
|
||||
# $proc.WaitForExit();
|
||||
# $LASTEXITCODE
|
||||
# env:
|
||||
# ProgressPreference: SilentlyContinue
|
||||
# - uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: Windows Spack Installer
|
||||
# path: ${{ env.spack_installer }}
|
||||
# - name: Execute MSI
|
||||
# run: |
|
||||
# $proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
# $handle = $proc.Handle # cache proc.Handle
|
||||
# $proc.WaitForExit();
|
||||
# $LASTEXITCODE
|
||||
|
||||
@@ -1,16 +1,10 @@
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: "ubuntu-22.04"
|
||||
apt_packages:
|
||||
- graphviz
|
||||
tools:
|
||||
python: "3.11"
|
||||
|
||||
sphinx:
|
||||
configuration: lib/spack/docs/conf.py
|
||||
fail_on_warning: true
|
||||
|
||||
python:
|
||||
version: 3.7
|
||||
install:
|
||||
- requirements: lib/spack/docs/requirements.txt
|
||||
|
||||
236
CHANGELOG.md
236
CHANGELOG.md
@@ -1,239 +1,3 @@
|
||||
# v0.20.1 (2023-07-10)
|
||||
|
||||
## Spack Bugfixes
|
||||
|
||||
- Spec removed from an environment where not actually removed if `--force` was not given (#37877)
|
||||
- Speed-up module file generation (#37739)
|
||||
- Hotfix for a few recipes that treat CMake as a link dependency (#35816)
|
||||
- Fix re-running stand-alone test a second time, which was getting a trailing spurious failure (#37840)
|
||||
- Fixed reading JSON manifest on Cray, reporting non-concrete specs (#37909)
|
||||
- Fixed a few bugs when generating Dockerfiles from Spack (#37766,#37769)
|
||||
- Fixed a few long-standing bugs when generating module files (#36678,#38347,#38465,#38455)
|
||||
- Fixed issues with building Python extensions using an external Python (#38186)
|
||||
- Fixed compiler removal from command line (#38057)
|
||||
- Show external status as [e] (#33792)
|
||||
- Backported `archspec` fixes (#37793)
|
||||
- Improved a few error messages (#37791)
|
||||
|
||||
|
||||
# v0.20.0 (2023-05-21)
|
||||
|
||||
`v0.20.0` is a major feature release.
|
||||
|
||||
## Features in this release
|
||||
|
||||
1. **`requires()` directive and enhanced package requirements**
|
||||
|
||||
We've added some more enhancements to requirements in Spack (#36286).
|
||||
|
||||
There is a new `requires()` directive for packages. `requires()` is the opposite of
|
||||
`conflicts()`. You can use it to impose constraints on this package when certain
|
||||
conditions are met:
|
||||
|
||||
```python
|
||||
requires(
|
||||
"%apple-clang",
|
||||
when="platform=darwin",
|
||||
msg="This package builds only with clang on macOS"
|
||||
)
|
||||
```
|
||||
|
||||
More on this in [the docs](
|
||||
https://spack.rtfd.io/en/latest/packaging_guide.html#conflicts-and-requirements).
|
||||
|
||||
You can also now add a `when:` clause to `requires:` in your `packages.yaml`
|
||||
configuration or in an environment:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- any_of: ["%gcc"]
|
||||
when: "@:4.1.4"
|
||||
message: "Only OpenMPI 4.1.5 and up can build with fancy compilers"
|
||||
```
|
||||
|
||||
More details can be found [here](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#package-requirements)
|
||||
|
||||
2. **Exact versions**
|
||||
|
||||
Spack did not previously have a way to distinguish a version if it was a prefix of
|
||||
some other version. For example, `@3.2` would match `3.2`, `3.2.1`, `3.2.2`, etc. You
|
||||
can now match *exactly* `3.2` with `@=3.2`. This is useful, for example, if you need
|
||||
to patch *only* the `3.2` version of a package. The new syntax is described in [the docs](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#version-specifier).
|
||||
|
||||
Generally, when writing packages, you should prefer to use ranges like `@3.2` over
|
||||
the specific versions, as this allows the concretizer more leeway when selecting
|
||||
versions of dependencies. More details and recommendations are in the [packaging guide](
|
||||
https://spack.readthedocs.io/en/latest/packaging_guide.html#ranges-versus-specific-versions).
|
||||
|
||||
See #36273 for full details on the version refactor.
|
||||
|
||||
3. **New testing interface**
|
||||
|
||||
Writing package tests is now much simpler with a new [test interface](
|
||||
https://spack.readthedocs.io/en/latest/packaging_guide.html#stand-alone-tests).
|
||||
|
||||
Writing a test is now as easy as adding a method that starts with `test_`:
|
||||
|
||||
```python
|
||||
class MyPackage(Package):
|
||||
...
|
||||
|
||||
def test_always_fails(self):
|
||||
"""use assert to always fail"""
|
||||
assert False
|
||||
|
||||
def test_example(self):
|
||||
"""run installed example"""
|
||||
example = which(self.prefix.bin.example)
|
||||
example()
|
||||
```
|
||||
|
||||
You can use Python's native `assert` statement to implement your checks -- no more
|
||||
need to fiddle with `run_test` or other test framework methods. Spack will
|
||||
introspect the class and run `test_*` methods when you run `spack test`,
|
||||
|
||||
4. **More stable concretization**
|
||||
|
||||
* Now, `spack concretize` will *only* concretize the new portions of the environment
|
||||
and will not change existing parts of an environment unless you specify `--force`.
|
||||
This has always been true for `unify:false`, but not for `unify:true` and
|
||||
`unify:when_possible` environments. Now it is true for all of them (#37438, #37681).
|
||||
|
||||
* The concretizer has a new `--reuse-deps` argument that *only* reuses dependencies.
|
||||
That is, it will always treat the *roots* of your environment as it would with
|
||||
`--fresh`. This allows you to upgrade just the roots of your environment while
|
||||
keeping everything else stable (#30990).
|
||||
|
||||
5. **Weekly develop snapshot releases**
|
||||
|
||||
Since last year, we have maintained a buildcache of `develop` at
|
||||
https://binaries.spack.io/develop, but the cache can grow to contain so many builds
|
||||
as to be unwieldy. When we get a stable `develop` build, we snapshot the release and
|
||||
add a corresponding tag the Spack repository. So, you can use a stack from a specific
|
||||
day. There are now tags in the spack repository like:
|
||||
|
||||
* `develop-2023-05-14`
|
||||
* `develop-2023-05-18`
|
||||
|
||||
that correspond to build caches like:
|
||||
|
||||
* https://binaries.spack.io/develop-2023-05-14/e4s
|
||||
* https://binaries.spack.io/develop-2023-05-18/e4s
|
||||
|
||||
We plan to store these snapshot releases weekly.
|
||||
|
||||
6. **Specs in buildcaches can be referenced by hash.**
|
||||
|
||||
* Previously, you could run `spack buildcache list` and see the hashes in
|
||||
buildcaches, but referring to them by hash would fail.
|
||||
* You can now run commands like `spack spec` and `spack install` and refer to
|
||||
buildcache hashes directly, e.g. `spack install /abc123` (#35042)
|
||||
|
||||
7. **New package and buildcache index websites**
|
||||
|
||||
Our public websites for searching packages have been completely revamped and updated.
|
||||
You can check them out here:
|
||||
|
||||
* *Package Index*: https://packages.spack.io
|
||||
* *Buildcache Index*: https://cache.spack.io
|
||||
|
||||
Both are searchable and more interactive than before. Currently major releases are
|
||||
shown; UI for browsing `develop` snapshots is coming soon.
|
||||
|
||||
8. **Default CMake and Meson build types are now Release**
|
||||
|
||||
Spack has historically defaulted to building with optimization and debugging, but
|
||||
packages like `llvm` can be enormous with debug turned on. Our default build type for
|
||||
all Spack packages is now `Release` (#36679, #37436). This has a number of benefits:
|
||||
|
||||
* much smaller binaries;
|
||||
* higher default optimization level; and
|
||||
* defining `NDEBUG` disables assertions, which may lead to further speedups.
|
||||
|
||||
You can still get the old behavior back through requirements and package preferences.
|
||||
|
||||
## Other new commands and directives
|
||||
|
||||
* `spack checksum` can automatically add new versions to package (#24532)
|
||||
* new command: `spack pkg grep` to easily search package files (#34388)
|
||||
* New `maintainers` directive (#35083)
|
||||
* Add `spack buildcache push` (alias to `buildcache create`) (#34861)
|
||||
* Allow using `-j` to control the parallelism of concretization (#37608)
|
||||
* Add `--exclude` option to 'spack external find' (#35013)
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* editing: add higher-precedence `SPACK_EDITOR` environment variable
|
||||
* Many YAML formatting improvements from updating `ruamel.yaml` to the latest version
|
||||
supporting Python 3.6. (#31091, #24885, #37008).
|
||||
* Requirements and preferences should not define (non-git) versions (#37687, #37747)
|
||||
* Environments now store spack version/commit in `spack.lock` (#32801)
|
||||
* User can specify the name of the `packages` subdirectory in repositories (#36643)
|
||||
* Add container images supporting RHEL alternatives (#36713)
|
||||
* make version(...) kwargs explicit (#36998)
|
||||
|
||||
## Notable refactors
|
||||
|
||||
* buildcache create: reproducible tarballs (#35623)
|
||||
* Bootstrap most of Spack dependencies using environments (#34029)
|
||||
* Split `satisfies(..., strict=True/False)` into two functions (#35681)
|
||||
* spack install: simplify behavior when inside environments (#35206)
|
||||
|
||||
## Binary cache and stack updates
|
||||
|
||||
* Major simplification of CI boilerplate in stacks (#34272, #36045)
|
||||
* Many improvements to our CI pipeline's reliability
|
||||
|
||||
## Removals, Deprecations, and disablements
|
||||
* Module file generation is disabled by default; you'll need to enable it to use it (#37258)
|
||||
* Support for Python 2 was deprecated in `v0.19.0` and has been removed. `v0.20.0` only
|
||||
supports Python 3.6 and higher.
|
||||
* Deprecated target names are no longer recognized by Spack. Use generic names instead:
|
||||
* `graviton` is now `cortex_a72`
|
||||
* `graviton2` is now `neoverse_n1`
|
||||
* `graviton3` is now `neoverse_v1`
|
||||
* `blacklist` and `whitelist` in module configuration were deprecated in `v0.19.0` and are
|
||||
removed in this release. Use `exclude` and `include` instead.
|
||||
* The `ignore=` parameter of the `extends()` directive has been removed. It was not used by
|
||||
any builtin packages and is no longer needed to avoid conflicts in environment views (#35588).
|
||||
* Support for the old YAML buildcache format has been removed. It was deprecated in `v0.19.0` (#34347).
|
||||
* `spack find --bootstrap` has been removed. It was deprecated in `v0.19.0`. Use `spack
|
||||
--bootstrap find` instead (#33964).
|
||||
* `spack bootstrap trust` and `spack bootstrap untrust` are now removed, having been
|
||||
deprecated in `v0.19.0`. Use `spack bootstrap enable` and `spack bootstrap disable`.
|
||||
* The `--mirror-name`, `--mirror-url`, and `--directory` options to buildcache and
|
||||
mirror commands were deprecated in `v0.19.0` and have now been removed. They have been
|
||||
replaced by positional arguments (#37457).
|
||||
* Deprecate `env:` as top level environment key (#37424)
|
||||
* deprecate buildcache create --rel, buildcache install --allow-root (#37285)
|
||||
* Support for very old perl-like spec format strings (e.g., `$_$@$%@+$+$=`) has been
|
||||
removed (#37425). This was deprecated in in `v0.15` (#10556).
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* bugfix: don't fetch package metadata for unknown concrete specs (#36990)
|
||||
* Improve package source code context display on error (#37655)
|
||||
* Relax environment manifest filename requirements and lockfile identification criteria (#37413)
|
||||
* `installer.py`: drop build edges of installed packages by default (#36707)
|
||||
* Bugfix: package requirements with git commits (#35057, #36347)
|
||||
* Package requirements: allow single specs in requirement lists (#36258)
|
||||
* conditional variant values: allow boolean (#33939)
|
||||
* spack uninstall: follow run/link edges on --dependents (#34058)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 7,179 total packages, 499 new since `v0.19.0`
|
||||
* 329 new Python packages
|
||||
* 31 new R packages
|
||||
* 336 people contributed to this release
|
||||
* 317 committers to packages
|
||||
* 62 committers to core
|
||||
|
||||
|
||||
# v0.19.1 (2023-02-07)
|
||||
|
||||
### Spack Bugfixes
|
||||
|
||||
54
CITATION.cff
54
CITATION.cff
@@ -27,53 +27,12 @@
|
||||
# And here's the CITATION.cff format:
|
||||
#
|
||||
cff-version: 1.2.0
|
||||
type: software
|
||||
message: "If you are referencing Spack in a publication, please cite the paper below."
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
abstract: >-
|
||||
Large HPC centers spend considerable time supporting software for thousands of users, but the complexity of HPC software is quickly outpacing the capabilities of existing software management tools.
|
||||
Scientific applications require specific versions of compilers, MPI, and other dependency libraries, so using a single, standard software stack is infeasible.
|
||||
However, managing many configurations is difficult because the configuration space is combinatorial in size.
|
||||
We introduce Spack, a tool used at Lawrence Livermore National Laboratory to manage this complexity.
|
||||
Spack provides a novel, re- cursive specification syntax to invoke parametric builds of packages and dependencies.
|
||||
It allows any number of builds to coexist on the same system, and it ensures that installed packages can find their dependencies, regardless of the environment.
|
||||
We show through real-world use cases that Spack supports diverse and demanding applications, bringing order to HPC software chaos.
|
||||
preferred-citation:
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
type: conference-paper
|
||||
url: "https://tgamblin.github.io/pubs/spack-sc15.pdf"
|
||||
doi: "10.1145/2807591.2807623"
|
||||
url: "https://github.com/spack/spack"
|
||||
authors:
|
||||
- family-names: "Gamblin"
|
||||
given-names: "Todd"
|
||||
- family-names: "LeGendre"
|
||||
given-names: "Matthew"
|
||||
- family-names: "Collette"
|
||||
given-names: "Michael R."
|
||||
- family-names: "Lee"
|
||||
given-names: "Gregory L."
|
||||
- family-names: "Moody"
|
||||
given-names: "Adam"
|
||||
- family-names: "de Supinski"
|
||||
given-names: "Bronis R."
|
||||
- family-names: "Futral"
|
||||
given-names: "Scott"
|
||||
conference:
|
||||
name: "Supercomputing 2015 (SC’15)"
|
||||
city: "Austin"
|
||||
region: "Texas"
|
||||
country: "US"
|
||||
date-start: 2015-11-15
|
||||
date-end: 2015-11-20
|
||||
month: 11
|
||||
year: 2015
|
||||
identifiers:
|
||||
- description: "The concept DOI of the work."
|
||||
type: doi
|
||||
value: 10.1145/2807591.2807623
|
||||
- description: "The DOE Document Release Number of the work"
|
||||
type: other
|
||||
value: "LLNL-CONF-669890"
|
||||
authors:
|
||||
- family-names: "Gamblin"
|
||||
given-names: "Todd"
|
||||
- family-names: "LeGendre"
|
||||
@@ -88,3 +47,12 @@ authors:
|
||||
given-names: "Bronis R."
|
||||
- family-names: "Futral"
|
||||
given-names: "Scott"
|
||||
title: "The Spack Package Manager: Bringing Order to HPC Software Chaos"
|
||||
conference:
|
||||
name: "Supercomputing 2015 (SC’15)"
|
||||
city: "Austin"
|
||||
region: "Texas"
|
||||
country: "USA"
|
||||
month: November 15-20
|
||||
year: 2015
|
||||
notes: LLNL-CONF-669890
|
||||
|
||||
32
SECURITY.md
32
SECURITY.md
@@ -2,26 +2,24 @@
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We provide security updates for `develop` and for the last two
|
||||
stable (`0.x`) release series of Spack. Security updates will be
|
||||
made available as patch (`0.x.1`, `0.x.2`, etc.) releases.
|
||||
|
||||
We provide security updates for the following releases.
|
||||
For more on Spack's release structure, see
|
||||
[`README.md`](https://github.com/spack/spack#releases).
|
||||
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| develop | :white_check_mark: |
|
||||
| 0.19.x | :white_check_mark: |
|
||||
| 0.18.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
You can report a vulnerability using GitHub's private reporting
|
||||
feature:
|
||||
To report a vulnerability or other security
|
||||
issue, email maintainers@spack.io.
|
||||
|
||||
1. Go to [github.com/spack/spack/security](https://github.com/spack/spack/security).
|
||||
2. Click "Report a vulnerability" in the upper right corner of that page.
|
||||
3. Fill out the form and submit your draft security advisory.
|
||||
|
||||
More details are available in
|
||||
[GitHub's docs](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability).
|
||||
|
||||
You can expect to hear back about security issues within two days.
|
||||
If your security issue is accepted, we will do our best to release
|
||||
a fix within a week. If fixing the issue will take longer than
|
||||
this, we will discuss timeline options with you.
|
||||
You can expect to hear back within two days.
|
||||
If your security issue is accepted, we will do
|
||||
our best to release a fix within a week. If
|
||||
fixing the issue will take longer than this,
|
||||
we will discuss timeline options with you.
|
||||
|
||||
@@ -25,6 +25,8 @@ exit 1
|
||||
# Line above is a shell no-op, and ends a python multi-line comment.
|
||||
# The code above runs this file with our preferred python interpreter.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
::
|
||||
@echo off
|
||||
|
||||
set spack="%SPACK_ROOT%"\bin\spack
|
||||
set spack=%SPACK_ROOT%\bin\spack
|
||||
|
||||
::#######################################################################
|
||||
:: This is a wrapper around the spack command that forwards calls to
|
||||
@@ -51,43 +51,65 @@ setlocal enabledelayedexpansion
|
||||
:: subcommands will never start with '-'
|
||||
:: everything after the subcommand is an arg
|
||||
|
||||
|
||||
:: we cannot allow batch "for" loop to directly process CL args
|
||||
:: a number of batch reserved characters are commonly passed to
|
||||
:: spack and allowing batch's "for" method to process the raw inputs
|
||||
:: results in a large number of formatting issues
|
||||
:: instead, treat the entire CLI as one string
|
||||
:: and split by space manually
|
||||
:: capture cl args in variable named cl_args
|
||||
set cl_args=%*
|
||||
:process_cl_args
|
||||
rem Set first cl argument (denoted by %1) to be processed
|
||||
set t=%1
|
||||
rem shift moves all cl positional arguments left by one
|
||||
rem meaning %2 is now %1, this allows us to iterate over each
|
||||
rem argument
|
||||
shift
|
||||
rem assign next "first" cl argument to cl_args, will be null when
|
||||
rem there are now further arguments to process
|
||||
set cl_args=%1
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
rem tokens=1* returns the first processed token produced
|
||||
rem by tokenizing the input string cl_args on spaces into
|
||||
rem the named variable %%g
|
||||
rem While this make look like a for loop, it only
|
||||
rem executes a single time for each of the cl args
|
||||
rem the actual iterative loop is performed by the
|
||||
rem goto process_cl_args stanza
|
||||
rem we are simply leveraging the "for" method's string
|
||||
rem tokenization
|
||||
for /f "tokens=1*" %%g in ("%cl_args%") do (
|
||||
set t=%%~g
|
||||
rem remainder of string is composed into %%h
|
||||
rem these are the cl args yet to be processed
|
||||
rem assign cl_args var to only the args to be processed
|
||||
rem effectively discarding the current arg %%g
|
||||
rem this will be nul when we have no further tokens to process
|
||||
set cl_args=%%h
|
||||
rem process the first space delineated cl arg
|
||||
rem of this iteration
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
rem We already have a subcommand, processing args now
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
shift
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
shift
|
||||
) else (
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
shift
|
||||
)
|
||||
) else (
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
) else (
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
)
|
||||
|
||||
rem if this is not nu;ll, we have more tokens to process
|
||||
rem if this is not nil, we have more tokens to process
|
||||
rem start above process again with remaining unprocessed cl args
|
||||
if defined cl_args goto :process_cl_args
|
||||
|
||||
@@ -192,7 +214,7 @@ goto :end_switch
|
||||
if defined _sp_args (
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:-h=%" (
|
||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
|
||||
146
bin/spack.ps1
146
bin/spack.ps1
@@ -1,146 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
# #######################################################################
|
||||
|
||||
function Compare-CommonArgs {
|
||||
$CMDArgs = $args[0]
|
||||
# These aruments take precedence and call for no futher parsing of arguments
|
||||
# invoke actual Spack entrypoint with that context and exit after
|
||||
"--help", "-h", "--version", "-V" | ForEach-Object {
|
||||
$arg_opt = $_
|
||||
if(($CMDArgs) -and ([bool]($CMDArgs.Where({$_ -eq $arg_opt})))) {
|
||||
return $true
|
||||
}
|
||||
}
|
||||
return $false
|
||||
}
|
||||
|
||||
function Read-SpackArgs {
|
||||
$SpackCMD_params = @()
|
||||
$SpackSubCommand = $NULL
|
||||
$SpackSubCommandArgs = @()
|
||||
$args_ = $args[0]
|
||||
$args_ | ForEach-Object {
|
||||
if (!$SpackSubCommand) {
|
||||
if($_.SubString(0,1) -eq "-")
|
||||
{
|
||||
$SpackCMD_params += $_
|
||||
}
|
||||
else{
|
||||
$SpackSubCommand = $_
|
||||
}
|
||||
}
|
||||
else{
|
||||
$SpackSubCommandArgs += $_
|
||||
}
|
||||
}
|
||||
return $SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs
|
||||
}
|
||||
|
||||
function Set-SpackEnv {
|
||||
# This method is responsible
|
||||
# for processing the return from $(spack <command>)
|
||||
# which are returned as System.Object[]'s containing
|
||||
# a list of env commands
|
||||
# Invoke-Expression can only handle one command at a time
|
||||
# so we iterate over the list to invoke the env modification
|
||||
# expressions one at a time
|
||||
foreach($envop in $args[0]){
|
||||
Invoke-Expression $envop
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function Invoke-SpackCD {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" cd -h
|
||||
}
|
||||
else {
|
||||
$LOC = $(python "$Env:SPACK_ROOT/bin/spack" location $SpackSubCommandArgs)
|
||||
if (($NULL -ne $LOC)){
|
||||
if ( Test-Path -Path $LOC){
|
||||
Set-Location $LOC
|
||||
}
|
||||
else{
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
else {
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Invoke-SpackEnv {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs[0]) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" env -h
|
||||
}
|
||||
else {
|
||||
$SubCommandSubCommand = $SpackSubCommandArgs[0]
|
||||
$SubCommandSubCommandArgs = $SpackSubCommandArgs[1..$SpackSubCommandArgs.Count]
|
||||
switch ($SubCommandSubCommand) {
|
||||
"activate" {
|
||||
if (Compare-CommonArgs $SubCommandSubCommandArgs) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif (!$SubCommandSubCommandArgs) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" env activate $SubCommandSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env activate "--pwsh" $SubCommandSubCommandArgs)
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
"deactivate" {
|
||||
if ([bool]($SubCommandSubCommandArgs.Where({$_ -eq "--pwsh"}))) {
|
||||
python"$Env:SPACK_ROOT/bin/spack" env deactivate $SubCommandSubCommandArgs
|
||||
}
|
||||
elseif($SubCommandSubCommandArgs) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" env deactivate -h
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params env deactivate "--pwsh")
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Invoke-SpackLoad {
|
||||
if (Compare-CommonArgs $SpackSubCommandArgs) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
elseif ([bool]($SpackSubCommandArgs.Where({($_ -eq "--pwsh") -or ($_ -eq "--list")}))) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
}
|
||||
else {
|
||||
$SpackEnv = $(python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand "--pwsh" $SpackSubCommandArgs)
|
||||
Set-SpackEnv $SpackEnv
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
$SpackCMD_params, $SpackSubCommand, $SpackSubCommandArgs = Read-SpackArgs $args
|
||||
|
||||
if (Compare-CommonArgs $SpackCMD_params) {
|
||||
python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs
|
||||
exit $LASTEXITCODE
|
||||
}
|
||||
|
||||
# Process Spack commands with special conditions
|
||||
# all other commands are piped directly to Spack
|
||||
switch($SpackSubCommand)
|
||||
{
|
||||
"cd" {Invoke-SpackCD}
|
||||
"env" {Invoke-SpackEnv}
|
||||
"load" {Invoke-SpackLoad}
|
||||
"unload" {Invoke-SpackLoad}
|
||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||
}
|
||||
@@ -36,9 +36,3 @@ concretizer:
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: true
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
|
||||
@@ -216,11 +216,10 @@ config:
|
||||
# manipulation by unprivileged user (e.g. AFS)
|
||||
allow_sgid: true
|
||||
|
||||
# Whether to show status information during building and installing packages.
|
||||
# This gives information about Spack's current progress as well as the current
|
||||
# and total number of packages. Information is shown both in the terminal
|
||||
# title and inline.
|
||||
install_status: true
|
||||
# Whether to set the terminal title to display status information during
|
||||
# building and installing packages. This gives information about Spack's
|
||||
# current progress as well as the current and total number of packages.
|
||||
terminal_title: false
|
||||
|
||||
# Number of seconds a buildcache's index.json is cached locally before probing
|
||||
# for updates, within a single Spack invocation. Defaults to 10 minutes.
|
||||
|
||||
@@ -23,20 +23,8 @@ packages:
|
||||
providers:
|
||||
elf: [libelf]
|
||||
fuse: [macfuse]
|
||||
gl: [apple-gl]
|
||||
glu: [apple-glu]
|
||||
unwind: [apple-libunwind]
|
||||
uuid: [apple-libuuid]
|
||||
apple-gl:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: apple-gl@4.1.0
|
||||
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
|
||||
apple-glu:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: apple-glu@1.3.0
|
||||
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
|
||||
apple-libunwind:
|
||||
buildable: false
|
||||
externals:
|
||||
|
||||
@@ -1,4 +1,2 @@
|
||||
mirrors:
|
||||
spack-public:
|
||||
binary: false
|
||||
url: https://mirror.spack.io
|
||||
spack-public: https://mirror.spack.io
|
||||
|
||||
@@ -40,8 +40,9 @@ modules:
|
||||
roots:
|
||||
tcl: $spack/share/spack/modules
|
||||
lmod: $spack/share/spack/lmod
|
||||
# What type of modules to use ("tcl" and/or "lmod")
|
||||
enable: []
|
||||
# What type of modules to use
|
||||
enable:
|
||||
- tcl
|
||||
|
||||
tcl:
|
||||
all:
|
||||
|
||||
@@ -20,7 +20,7 @@ packages:
|
||||
awk: [gawk]
|
||||
blas: [openblas, amdblis]
|
||||
D: [ldc]
|
||||
daal: [intel-oneapi-daal]
|
||||
daal: [intel-daal]
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
@@ -30,7 +30,7 @@ packages:
|
||||
golang: [go, gcc]
|
||||
go-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||
iconv: [libiconv]
|
||||
ipp: [intel-oneapi-ipp]
|
||||
ipp: [intel-ipp]
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
@@ -40,7 +40,7 @@ packages:
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
mariadb-client: [mariadb-c-client, mariadb]
|
||||
mkl: [intel-oneapi-mkl]
|
||||
mkl: [intel-mkl]
|
||||
mpe: [mpe2]
|
||||
mpi: [openmpi, mpich]
|
||||
mysql-client: [mysql, mariadb-c-client]
|
||||
@@ -49,7 +49,6 @@ packages:
|
||||
pbs: [openpbs, torque]
|
||||
pil: [py-pillow]
|
||||
pkgconfig: [pkgconf, pkg-config]
|
||||
qmake: [qt-base, qt]
|
||||
rpc: [libtirpc]
|
||||
scalapack: [netlib-scalapack, amdscalapack]
|
||||
sycl: [hipsycl]
|
||||
@@ -60,7 +59,6 @@ packages:
|
||||
xxd: [xxd-standalone, vim]
|
||||
yacc: [bison, byacc]
|
||||
ziglang: [zig]
|
||||
zlib-api: [zlib-ng+compat, zlib]
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
|
||||
1
lib/spack/docs/.gitignore
vendored
1
lib/spack/docs/.gitignore
vendored
@@ -1,3 +1,4 @@
|
||||
package_list.html
|
||||
command_index.rst
|
||||
spack*.rst
|
||||
llnl*.rst
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
# We use our own extension of the default style with a few modifications
|
||||
from pygments.styles.default import DefaultStyle
|
||||
from pygments.token import Generic
|
||||
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
background_color = "#f4f4f8"
|
||||
styles[Generic.Output] = "#355"
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
||||
@@ -45,8 +45,7 @@ Listing available packages
|
||||
|
||||
To install software with Spack, you need to know what software is
|
||||
available. You can see a list of available package names at the
|
||||
`packages.spack.io <https://packages.spack.io>`_ website, or
|
||||
using the ``spack list`` command.
|
||||
:ref:`package-list` webpage, or using the ``spack list`` command.
|
||||
|
||||
.. _cmd-spack-list:
|
||||
|
||||
@@ -61,7 +60,7 @@ can install:
|
||||
:ellipsis: 10
|
||||
|
||||
There are thousands of them, so we've truncated the output above, but you
|
||||
can find a `full list here <https://packages.spack.io>`_.
|
||||
can find a :ref:`full list here <package-list>`.
|
||||
Packages are listed by name in alphabetical order.
|
||||
A pattern to match with no wildcards, ``*`` or ``?``,
|
||||
will be treated as though it started and ended with
|
||||
@@ -1104,31 +1103,16 @@ Below are more details about the specifiers that you can add to specs.
|
||||
Version specifier
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
A version specifier ``pkg@<specifier>`` comes after a package name
|
||||
and starts with ``@``. It can be something abstract that matches
|
||||
multiple known versions, or a specific version. During concretization,
|
||||
Spack will pick the optimal version within the spec's constraints
|
||||
according to policies set for the particular Spack installation.
|
||||
|
||||
The version specifier can be *a specific version*, such as ``@=1.0.0`` or
|
||||
``@=1.2a7``. Or, it can be *a range of versions*, such as ``@1.0:1.5``.
|
||||
Version ranges are inclusive, so this example includes both ``1.0``
|
||||
and any ``1.5.x`` version. Version ranges can be unbounded, e.g. ``@:3``
|
||||
means any version up to and including ``3``. This would include ``3.4``
|
||||
and ``3.4.2``. Similarly, ``@4.2:`` means any version above and including
|
||||
``4.2``. As a short-hand, ``@3`` is equivalent to the range ``@3:3`` and
|
||||
includes any version with major version ``3``.
|
||||
|
||||
Notice that you can distinguish between the specific version ``@=3.2`` and
|
||||
the range ``@3.2``. This is useful for packages that follow a versioning
|
||||
scheme that omits the zero patch version number: ``3.2``, ``3.2.1``,
|
||||
``3.2.2``, etc. In general it is preferable to use the range syntax
|
||||
``@3.2``, since ranges also match versions with one-off suffixes, such as
|
||||
``3.2-custom``.
|
||||
|
||||
A version specifier can also be a list of ranges and specific versions,
|
||||
separated by commas. For example, ``@1.0:1.5,=1.7.1`` matches any version
|
||||
in the range ``1.0:1.5`` and the specific version ``1.7.1``.
|
||||
A version specifier comes somewhere after a package name and starts
|
||||
with ``@``. It can be a single version, e.g. ``@1.0``, ``@3``, or
|
||||
``@1.2a7``. Or, it can be a range of versions, such as ``@1.0:1.5``
|
||||
(all versions between ``1.0`` and ``1.5``, inclusive). Version ranges
|
||||
can be open, e.g. ``:3`` means any version up to and including ``3``.
|
||||
This would include ``3.4`` and ``3.4.2``. ``4.2:`` means any version
|
||||
above and including ``4.2``. Finally, a version specifier can be a
|
||||
set of arbitrary versions, such as ``@1.0,1.5,1.7`` (``1.0``, ``1.5``,
|
||||
or ``1.7``). When you supply such a specifier to ``spack install``,
|
||||
it constrains the set of versions that Spack will install.
|
||||
|
||||
For packages with a ``git`` attribute, ``git`` references
|
||||
may be specified instead of a numerical version i.e. branches, tags
|
||||
@@ -1137,35 +1121,36 @@ reference provided. Acceptable syntaxes for this are:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
# commit hashes
|
||||
foo@abcdef1234abcdef1234abcdef1234abcdef1234 # 40 character hashes are automatically treated as git commits
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234
|
||||
|
||||
# branches and tags
|
||||
foo@git.develop # use the develop branch
|
||||
foo@git.0.19 # use the 0.19 tag
|
||||
|
||||
Spack always needs to associate a Spack version with the git reference,
|
||||
which is used for version comparison. This Spack version is heuristically
|
||||
taken from the closest valid git tag among ancestors of the git ref.
|
||||
# commit hashes
|
||||
foo@abcdef1234abcdef1234abcdef1234abcdef1234 # 40 character hashes are automatically treated as git commits
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234
|
||||
|
||||
Once a Spack version is associated with a git ref, it always printed with
|
||||
the git ref. For example, if the commit ``@git.abcdefg`` is tagged
|
||||
``0.19``, then the spec will be shown as ``@git.abcdefg=0.19``.
|
||||
Spack versions from git reference either have an associated version supplied by the user,
|
||||
or infer a relationship to known versions from the structure of the git repository. If an
|
||||
associated version is supplied by the user, Spack treats the git version as equivalent to that
|
||||
version for all version comparisons in the package logic (e.g. ``depends_on('foo', when='@1.5')``).
|
||||
|
||||
If the git ref is not exactly a tag, then the distance to the nearest tag
|
||||
is also part of the resolved version. ``@git.abcdefg=0.19.git.8`` means
|
||||
that the commit is 8 commits away from the ``0.19`` tag.
|
||||
|
||||
In cases where Spack cannot resolve a sensible version from a git ref,
|
||||
users can specify the Spack version to use for the git ref. This is done
|
||||
by appending ``=`` and the Spack version to the git ref. For example:
|
||||
The associated version can be assigned with ``[git ref]=[version]`` syntax, with the caveat that the specified version is known to Spack from either the package definition, or in the configuration preferences (i.e. ``packages.yaml``).
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
foo@git.my_ref=3.2 # use the my_ref tag or branch, but treat it as version 3.2 for version comparisons
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234=develop # use the given commit, but treat it as develop for version comparisons
|
||||
|
||||
If an associated version is not supplied then the tags in the git repo are used to determine
|
||||
the most recent previous version known to Spack. Details about how versions are compared
|
||||
and how Spack determines if one version is less than another are discussed in the developer guide.
|
||||
|
||||
If the version spec is not provided, then Spack will choose one
|
||||
according to policies set for the particular spack installation. If
|
||||
the spec is ambiguous, i.e. it could match multiple versions, Spack
|
||||
will choose a version within the spec's constraints according to
|
||||
policies set for the particular Spack installation.
|
||||
|
||||
Details about how versions are compared and how Spack determines if
|
||||
one version is less than another are discussed in the developer guide.
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ Build caches are created via:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache push <path/url/mirror name> <spec>
|
||||
$ spack buildcache create <path/url/mirror name> <spec>
|
||||
|
||||
This command takes the locally installed spec and its dependencies, and
|
||||
creates tarballs of their install prefixes. It also generates metadata files,
|
||||
@@ -48,10 +48,14 @@ Here is an example where a build cache is created in a local directory named
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache push ./spack-cache ninja
|
||||
$ spack buildcache create --allow-root ./spack-cache ninja
|
||||
==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache
|
||||
|
||||
Note that ``ninja`` must be installed locally for this to work.
|
||||
Not that ``ninja`` must be installed locally for this to work.
|
||||
|
||||
We're using the ``--allow-root`` flag to tell Spack that is OK when any of
|
||||
the binaries we're pushing contain references to the local Spack install
|
||||
directory.
|
||||
|
||||
Once you have a build cache, you can add it as a mirror, discussed next.
|
||||
|
||||
@@ -143,7 +147,7 @@ and then install from it exclusively, you would do:
|
||||
|
||||
$ spack mirror add E4S https://cache.e4s.io
|
||||
$ spack buildcache keys --install --trust
|
||||
$ spack install --use-buildcache only <package>
|
||||
$ spack install --use-buildache only <package>
|
||||
|
||||
We use ``--install`` and ``--trust`` to say that we are installing keys to our
|
||||
keyring, and trusting all downloaded keys.
|
||||
@@ -173,7 +177,7 @@ need to be adjusted for better re-locatability.
|
||||
--------------------
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack buildcache push``
|
||||
``spack buildcache create``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Create tarball of installed Spack package and all dependencies.
|
||||
|
||||
@@ -32,14 +32,9 @@ can't be found. You can readily check if any prerequisite for using Spack is mis
|
||||
|
||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||
|
||||
% echo $?
|
||||
1
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. The return code of this command summarizes the results, if any
|
||||
dependencies are missing the return code is ``1``, otherwise ``0``. Running a command that
|
||||
concretizes a spec, like:
|
||||
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -49,7 +44,7 @@ concretizes a spec, like:
|
||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||
[ ... ]
|
||||
|
||||
automatically triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
Users can also bootstrap all the dependencies needed by Spack in a single command, which
|
||||
might be useful to setup containers or other similar environments:
|
||||
|
||||
@@ -3,103 +3,6 @@
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
.. _concretizer-options:
|
||||
|
||||
==========================================
|
||||
Concretization Settings (concretizer.yaml)
|
||||
==========================================
|
||||
|
||||
The ``concretizer.yaml`` configuration file allows to customize aspects of the
|
||||
algorithm used to select the dependencies you install. The default configuration
|
||||
is the following:
|
||||
|
||||
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
|
||||
:language: yaml
|
||||
|
||||
--------------------------------
|
||||
Reuse already installed packages
|
||||
--------------------------------
|
||||
|
||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``).
|
||||
You can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack install --reuse <spec>
|
||||
|
||||
to enable reuse for a single installation, and you can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install --fresh <spec>
|
||||
|
||||
to do a fresh install if ``reuse`` is enabled by default.
|
||||
``reuse: true`` is the default.
|
||||
|
||||
------------------------------------------
|
||||
Selection of the target microarchitectures
|
||||
------------------------------------------
|
||||
|
||||
The options under the ``targets`` attribute control which targets are considered during a solve.
|
||||
Currently the options in this section are only configurable from the ``concretizer.yaml`` file
|
||||
and there are no corresponding command line arguments to enable them for a single solve.
|
||||
|
||||
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
|
||||
If set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: microarchitectures
|
||||
|
||||
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
|
||||
compatibility. If instead the option is set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: generic
|
||||
|
||||
Spack will consider only generic microarchitectures. For instance, when running on an
|
||||
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
|
||||
``x86_64_v3`` as the best target in the latter case.
|
||||
|
||||
The ``host_compatible`` option is a Boolean option that determines whether or not the
|
||||
microarchitectures considered during the solve are constrained to be compatible with the
|
||||
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||
|
||||
---------------
|
||||
Duplicate nodes
|
||||
---------------
|
||||
|
||||
The ``duplicates`` attribute controls whether the DAG can contain multiple configurations of
|
||||
the same package. This is mainly relevant for build dependencies, which may have their version
|
||||
pinned by some nodes, and thus be required at different versions by different nodes in the same
|
||||
DAG.
|
||||
|
||||
The ``strategy`` option controls how the solver deals with duplicates. If the value is ``none``,
|
||||
then a single configuration per package is allowed in the DAG. This means, for instance, that only
|
||||
a single ``cmake`` or a single ``py-setuptools`` version is allowed. The result would be a slightly
|
||||
faster concretization, at the expense of making a few specs unsolvable.
|
||||
|
||||
If the value is ``minimal`` Spack will allow packages tagged as ``build-tools`` to have duplicates.
|
||||
This allows, for instance, to concretize specs whose nodes require different, and incompatible, ranges
|
||||
of some build tool. For instance, in the figure below the latest `py-shapely` requires a newer `py-setuptools`,
|
||||
while `py-numpy` still needs an older version:
|
||||
|
||||
.. figure:: images/shapely_duplicates.svg
|
||||
:scale: 70 %
|
||||
:align: center
|
||||
|
||||
Up to Spack v0.20 ``duplicates:strategy:none`` was the default (and only) behavior. From Spack v0.21 the
|
||||
default behavior is ``duplicates:strategy:minimal``.
|
||||
|
||||
.. _build-settings:
|
||||
|
||||
================================
|
||||
@@ -329,6 +232,76 @@ Specific limitations include:
|
||||
then Spack will not add a new external entry (``spack config blame packages``
|
||||
can help locate all external entries).
|
||||
|
||||
.. _concretizer-options:
|
||||
|
||||
----------------------
|
||||
Concretizer options
|
||||
----------------------
|
||||
|
||||
``packages.yaml`` gives the concretizer preferences for specific packages,
|
||||
but you can also use ``concretizer.yaml`` to customize aspects of the
|
||||
algorithm it uses to select the dependencies you install:
|
||||
|
||||
.. literalinclude:: _spack_root/etc/spack/defaults/concretizer.yaml
|
||||
:language: yaml
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Reuse already installed packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``).
|
||||
You can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack install --reuse <spec>
|
||||
|
||||
to enable reuse for a single installation, and you can use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install --fresh <spec>
|
||||
|
||||
to do a fresh install if ``reuse`` is enabled by default.
|
||||
``reuse: true`` is the default.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Selection of the target microarchitectures
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The options under the ``targets`` attribute control which targets are considered during a solve.
|
||||
Currently the options in this section are only configurable from the ``concretizer.yaml`` file
|
||||
and there are no corresponding command line arguments to enable them for a single solve.
|
||||
|
||||
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
|
||||
If set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: microarchitectures
|
||||
|
||||
Spack will consider all the microarchitectures known to ``archspec`` to label nodes for
|
||||
compatibility. If instead the option is set to:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: generic
|
||||
|
||||
Spack will consider only generic microarchitectures. For instance, when running on an
|
||||
Haswell node, Spack will consider ``haswell`` as the best target in the former case and
|
||||
``x86_64_v3`` as the best target in the latter case.
|
||||
|
||||
The ``host_compatible`` option is a Boolean option that determines whether or not the
|
||||
microarchitectures considered during the solve are constrained to be compatible with the
|
||||
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||
|
||||
.. _package-requirements:
|
||||
|
||||
--------------------
|
||||
@@ -352,99 +325,42 @@ on the command line, because it can specify constraints on packages
|
||||
is not possible to specify constraints on dependencies while also keeping
|
||||
those dependencies optional.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Requirements syntax
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The package requirements configuration is specified in ``packages.yaml``,
|
||||
keyed by package name and expressed using the Spec syntax. In the simplest
|
||||
case you can specify attributes that you always want the package to have
|
||||
by providing a single spec string to ``require``:
|
||||
The package requirements configuration is specified in ``packages.yaml``
|
||||
keyed by package name:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
libfabric:
|
||||
require: "@1.13.2"
|
||||
|
||||
In the above example, ``libfabric`` will always build with version 1.13.2. If you
|
||||
need to compose multiple configuration scopes ``require`` accepts a list of
|
||||
strings:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
libfabric:
|
||||
require:
|
||||
- "@1.13.2"
|
||||
- "%gcc"
|
||||
|
||||
In this case ``libfabric`` will always build with version 1.13.2 **and** using GCC
|
||||
as a compiler.
|
||||
|
||||
For more complex use cases, require accepts also a list of objects. These objects
|
||||
must have either a ``any_of`` or a ``one_of`` field, containing a list of spec strings,
|
||||
and they can optionally have a ``when`` and a ``message`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- any_of: ["@4.1.5", "%gcc"]
|
||||
message: "in this example only 4.1.5 can build with other compilers"
|
||||
|
||||
``any_of`` is a list of specs. One of those specs must be satisfied
|
||||
and it is also allowed for the concretized spec to match more than one.
|
||||
In the above example, that means you could build ``openmpi@4.1.5%gcc``,
|
||||
``openmpi@4.1.5%clang`` or ``openmpi@3.9%gcc``, but
|
||||
not ``openmpi@3.9%clang``.
|
||||
|
||||
If a custom message is provided, and the requirement is not satisfiable,
|
||||
Spack will print the custom error message:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack spec openmpi@3.9%clang
|
||||
==> Error: in this example only 4.1.5 can build with other compilers
|
||||
|
||||
We could express a similar requirement using the ``when`` attribute:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- any_of: ["%gcc"]
|
||||
when: "@:4.1.4"
|
||||
message: "in this example only 4.1.5 can build with other compilers"
|
||||
|
||||
In the example above, if the version turns out to be 4.1.4 or less, we require the compiler to be GCC.
|
||||
For readability, Spack also allows a ``spec`` key accepting a string when there is only a single
|
||||
constraint:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
openmpi:
|
||||
require:
|
||||
- spec: "%gcc"
|
||||
when: "@:4.1.4"
|
||||
message: "in this example only 4.1.5 can build with other compilers"
|
||||
|
||||
This code snippet and the one before it are semantically equivalent.
|
||||
|
||||
Finally, instead of ``any_of`` you can use ``one_of`` which also takes a list of specs. The final
|
||||
concretized spec must match one and only one of them:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
- any_of: ["~cuda", "%gcc"]
|
||||
mpich:
|
||||
require:
|
||||
- one_of: ["+cuda", "+rocm"]
|
||||
require:
|
||||
- one_of: ["+cuda", "+rocm"]
|
||||
|
||||
In the example above, that means you could build ``mpich+cuda`` or ``mpich+rocm`` but not ``mpich+cuda+rocm``.
|
||||
Requirements are expressed using Spec syntax (the same as what is provided
|
||||
to ``spack install``). In the simplest case, you can specify attributes
|
||||
that you always want the package to have by providing a single spec to
|
||||
``require``; in the above example, ``libfabric`` will always build
|
||||
with version 1.13.2.
|
||||
|
||||
You can provide a more-relaxed constraint and allow the concretizer to
|
||||
choose between a set of options using ``any_of`` or ``one_of``:
|
||||
|
||||
* ``any_of`` is a list of specs. One of those specs must be satisfied
|
||||
and it is also allowed for the concretized spec to match more than one.
|
||||
In the above example, that means you could build ``openmpi+cuda%gcc``,
|
||||
``openmpi~cuda%clang`` or ``openmpi~cuda%gcc`` (in the last case,
|
||||
note that both specs in the ``any_of`` for ``openmpi`` are
|
||||
satisfied).
|
||||
* ``one_of`` is also a list of specs, and the final concretized spec
|
||||
must match exactly one of them. In the above example, that means
|
||||
you could build ``mpich+cuda`` or ``mpich+rocm`` but not
|
||||
``mpich+cuda+rocm`` (note the current package definition for
|
||||
``mpich`` already includes a conflict, so this is redundant but
|
||||
still demonstrates the concept).
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -452,13 +368,6 @@ In the example above, that means you could build ``mpich+cuda`` or ``mpich+rocm`
|
||||
preference: items that appear earlier in the list are preferred
|
||||
(note that these preferences can be ignored in favor of others).
|
||||
|
||||
.. note::
|
||||
|
||||
When using a conditional requirement, Spack is allowed to actively avoid the triggering
|
||||
condition (the ``when=...`` spec) if that leads to a concrete spec with better scores in
|
||||
the optimization criteria. To check the current optimization criteria and their
|
||||
priorities you can run ``spack solve zlib``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting default requirements
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -9,32 +9,9 @@
|
||||
Bundle
|
||||
------
|
||||
|
||||
``BundlePackage`` represents a set of packages that are expected to work
|
||||
well together, such as a collection of commonly used software libraries.
|
||||
The associated software is specified as dependencies.
|
||||
|
||||
If it makes sense, variants, conflicts, and requirements can be added to
|
||||
the package. :ref:`Variants <variants>` ensure that common build options
|
||||
are consistent across the packages supporting them. :ref:`Conflicts
|
||||
and requirements <packaging_conflicts>` prevent attempts to build with known
|
||||
bugs or limitations.
|
||||
|
||||
For example, if ``MyBundlePackage`` is known to only build on ``linux``,
|
||||
it could use the ``require`` directive as follows:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
require("platform=linux", msg="MyBundlePackage only builds on linux")
|
||||
|
||||
Spack has a number of built-in bundle packages, such as:
|
||||
|
||||
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
|
||||
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
|
||||
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
|
||||
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
|
||||
|
||||
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
|
||||
``Libc`` is a virtual bundle package for the C standard library.
|
||||
``BundlePackage`` represents a set of packages that are expected to work well
|
||||
together, such as a collection of commonly used software libraries. The
|
||||
associated software is specified as bundle dependencies.
|
||||
|
||||
|
||||
^^^^^^^^
|
||||
|
||||
@@ -25,8 +25,8 @@ use Spack to build packages with the tools.
|
||||
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
||||
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
||||
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
||||
packages. Search for ``oneAPI`` at `<packages.spack.io>`_ for the full
|
||||
list of available oneAPI packages, or use::
|
||||
packages. See the :ref:`package-list` for the full list of available
|
||||
oneAPI packages or use::
|
||||
|
||||
spack list -d oneAPI
|
||||
|
||||
@@ -76,55 +76,6 @@ To build with with ``icx``, do ::
|
||||
|
||||
spack install patchelf%oneapi
|
||||
|
||||
|
||||
Using oneAPI Spack environment
|
||||
-------------------------------
|
||||
|
||||
In this example, we build lammps with ``icx`` using Spack environment for oneAPI packages created by Intel. The
|
||||
compilers are installed with Spack like in example above.
|
||||
|
||||
Install the oneAPI compilers::
|
||||
|
||||
spack install intel-oneapi-compilers
|
||||
|
||||
Add the compilers to your ``compilers.yaml`` so Spack can use them::
|
||||
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
|
||||
|
||||
Verify that the compilers are available::
|
||||
|
||||
spack compiler list
|
||||
|
||||
Clone `spack-configs <https://github.com/spack/spack-configs>`_ repo and activate Intel oneAPI CPU environment::
|
||||
|
||||
git clone https://github.com/spack/spack-configs
|
||||
spack env activate spack-configs/INTEL/CPU
|
||||
spack concretize -f
|
||||
|
||||
`Intel oneAPI CPU environment <https://github.com/spack/spack-configs/blob/main/INTEL/CPU/spack.yaml>`_ contains applications tested and validated by Intel, this list is constantly extended. And currently it supports:
|
||||
|
||||
- `Devito <https://www.devitoproject.org/>`_
|
||||
- `GROMACS <https://www.gromacs.org/>`_
|
||||
- `HPCG <https://www.hpcg-benchmark.org/>`_
|
||||
- `HPL <https://netlib.org/benchmark/hpl/>`_
|
||||
- `LAMMPS <https://www.lammps.org/#gsc.tab=0>`_
|
||||
- `OpenFOAM <https://www.openfoam.com/>`_
|
||||
- `Quantum Espresso <https://www.quantum-espresso.org/>`_
|
||||
- `STREAM <https://www.cs.virginia.edu/stream/>`_
|
||||
- `WRF <https://github.com/wrf-model/WRF>`_
|
||||
|
||||
To build lammps with oneAPI compiler from this environment just run::
|
||||
|
||||
spack install lammps
|
||||
|
||||
Compiled binaries can be find using::
|
||||
|
||||
spack cd -i lammps
|
||||
|
||||
You can do the same for all other applications from this environment.
|
||||
|
||||
|
||||
Using oneAPI MPI to Satisfy a Virtual Dependence
|
||||
------------------------------------------------------
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ sip-build --verbose --target-dir ...
|
||||
$ python configure.py --bindir ... --destdir ...
|
||||
$ make
|
||||
$ make install
|
||||
|
||||
@@ -41,30 +41,30 @@ By default, these phases run:
|
||||
Important files
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
Each SIP package comes with a custom configuration file written in Python.
|
||||
For newer packages, this is called ``project.py``, while in older packages,
|
||||
it may be called ``configure.py``. This script contains instructions to build
|
||||
the project.
|
||||
Each SIP package comes with a custom ``configure.py`` build script,
|
||||
written in Python. This script contains instructions to build the project.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
``SIPPackage`` requires several dependencies. Python and SIP are needed at build-time
|
||||
to run the aforementioned configure script. Python is also needed at run-time to
|
||||
actually use the installed Python library. And as we are building Python bindings
|
||||
for C/C++ libraries, Python is also needed as a link dependency. All of these
|
||||
dependencies are automatically added via the base class.
|
||||
``SIPPackage`` requires several dependencies. Python is needed to run
|
||||
the ``configure.py`` build script, and to run the resulting Python
|
||||
libraries. Qt is needed to provide the ``qmake`` command. SIP is also
|
||||
needed to build the package. All of these dependencies are automatically
|
||||
added via the base class
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
extends("python", type=("build", "link", "run"))
|
||||
depends_on("py-sip", type="build")
|
||||
extends('python')
|
||||
|
||||
depends_on('qt', type='build')
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to ``sip-build``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
depends_on('py-sip', type='build')
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Passing arguments to ``configure.py``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Each phase comes with a ``<phase_args>`` function that can be used to pass
|
||||
arguments to that particular phase. For example, if you need to pass
|
||||
@@ -72,11 +72,11 @@ arguments to the configure phase, you can use:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
return ["--no-python-dbus"]
|
||||
def configure_args(self, spec, prefix):
|
||||
return ['--no-python-dbus']
|
||||
|
||||
|
||||
A list of valid options can be found by running ``sip-build --help``.
|
||||
A list of valid options can be found by running ``python configure.py --help``.
|
||||
|
||||
^^^^^^^
|
||||
Testing
|
||||
|
||||
@@ -48,6 +48,9 @@
|
||||
os.environ["COLIFY_SIZE"] = "25x120"
|
||||
os.environ["COLUMNS"] = "120"
|
||||
|
||||
# Generate full package list if needed
|
||||
subprocess.call(["spack", "list", "--format=html", "--update=package_list.html"])
|
||||
|
||||
# Generate a command index if an update is needed
|
||||
subprocess.call(
|
||||
[
|
||||
@@ -94,7 +97,9 @@ class PatchedPythonDomain(PythonDomain):
|
||||
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
|
||||
if "refspecific" in node:
|
||||
del node["refspecific"]
|
||||
return super().resolve_xref(env, fromdocname, builder, typ, target, node, contnode)
|
||||
return super(PatchedPythonDomain, self).resolve_xref(
|
||||
env, fromdocname, builder, typ, target, node, contnode
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
@@ -144,6 +149,7 @@ def setup(sphinx):
|
||||
# Get nice vector graphics
|
||||
graphviz_output_format = "svg"
|
||||
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ["_templates"]
|
||||
|
||||
@@ -209,11 +215,8 @@ def setup(sphinx):
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
("py:class", "spack.build_systems._checks.BaseBuilder"),
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.version.VersionBase"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
("py:class", "spack.spec.InstallStatus"),
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
@@ -229,8 +232,30 @@ def setup(sphinx):
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
# show_authors = False
|
||||
sys.path.append("./_pygments")
|
||||
pygments_style = "style.SpackStyle"
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
# We use our own extension of the default style with a few modifications
|
||||
from pygments.style import Style
|
||||
from pygments.styles.default import DefaultStyle
|
||||
from pygments.token import Comment, Generic, Text
|
||||
|
||||
|
||||
class SpackStyle(DefaultStyle):
|
||||
styles = DefaultStyle.styles.copy()
|
||||
background_color = "#f4f4f8"
|
||||
styles[Generic.Output] = "#355"
|
||||
styles[Generic.Prompt] = "bold #346ec9"
|
||||
|
||||
|
||||
import pkg_resources
|
||||
|
||||
dist = pkg_resources.Distribution(__file__)
|
||||
sys.path.append(".") # make 'conf' module findable
|
||||
ep = pkg_resources.EntryPoint.parse("spack = conf:SpackStyle", dist=dist)
|
||||
dist._ep_map = {"pygments.styles": {"plugin1": ep}}
|
||||
pkg_resources.working_set.add(dist)
|
||||
|
||||
pygments_style = "spack"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
# modindex_common_prefix = []
|
||||
@@ -315,15 +340,16 @@ def setup(sphinx):
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = "Spackdoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
# 'papersize': 'letterpaper',
|
||||
#'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
# 'pointsize': '10pt',
|
||||
#'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
# 'preamble': '',
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
|
||||
@@ -292,13 +292,12 @@ It is also worth noting that:
|
||||
non_bindable_shared_objects = ["libinterface.so"]
|
||||
|
||||
----------------------
|
||||
``install_status``
|
||||
``terminal_title``
|
||||
----------------------
|
||||
|
||||
When set to ``true``, Spack will show information about its current progress
|
||||
as well as the current and total package numbers. Progress is shown both
|
||||
in the terminal title and inline. Setting it to ``false`` will not show any
|
||||
progress information.
|
||||
By setting this option to ``true``, Spack will update the terminal's title to
|
||||
provide information about its current progress as well as the current and
|
||||
total package numbers.
|
||||
|
||||
To work properly, this requires your terminal to reset its title after
|
||||
Spack has finished its work, otherwise Spack's status information will
|
||||
|
||||
@@ -20,9 +20,8 @@ case you want to skip directly to specific docs:
|
||||
* :ref:`packages.yaml <build-settings>`
|
||||
* :ref:`repos.yaml <repositories>`
|
||||
|
||||
You can also add any of these as inline configuration in the YAML
|
||||
manifest file (``spack.yaml``) describing an :ref:`environment
|
||||
<environment-configuration>`.
|
||||
You can also add any of these as inline configuration in ``spack.yaml``
|
||||
in an :ref:`environment <environment-configuration>`.
|
||||
|
||||
-----------
|
||||
YAML Format
|
||||
|
||||
@@ -143,26 +143,6 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Amazon Linux 2
|
||||
- ``amazonlinux:2``
|
||||
- ``spack/amazon-linux``
|
||||
* - AlmaLinux 8
|
||||
- ``almalinux:8``
|
||||
- ``spack/almalinux8``
|
||||
* - AlmaLinux 9
|
||||
- ``almalinux:9``
|
||||
- ``spack/almalinux9``
|
||||
* - Rocky Linux 8
|
||||
- ``rockylinux:8``
|
||||
- ``spack/rockylinux8``
|
||||
* - Rocky Linux 9
|
||||
- ``rockylinux:9``
|
||||
- ``spack/rockylinux9``
|
||||
* - Fedora Linux 37
|
||||
- ``fedora:37``
|
||||
- ``spack/fedora37``
|
||||
* - Fedora Linux 38
|
||||
- ``fedora:38``
|
||||
- ``spack/fedora38``
|
||||
|
||||
|
||||
|
||||
All the images are tagged with the corresponding release of Spack:
|
||||
|
||||
@@ -636,7 +616,7 @@ to customize the generation of container recipes:
|
||||
- No
|
||||
* - ``os_packages:command``
|
||||
- Tool used to manage system packages
|
||||
- ``apt``, ``yum``, ``dnf``, ``dnf_epel``, ``zypper``, ``apk``, ``yum_amazon``
|
||||
- ``apt``, ``yum``
|
||||
- Only with custom base images
|
||||
* - ``os_packages:update``
|
||||
- Whether or not to update the list of available packages
|
||||
|
||||
@@ -310,11 +310,53 @@ Once all of the dependencies are installed, you can try building the documentati
|
||||
$ make clean
|
||||
$ make
|
||||
|
||||
If you see any warning or error messages, you will have to correct those before your PR
|
||||
is accepted. If you are editing the documentation, you should be running the
|
||||
documentation tests to make sure there are no errors. Documentation changes can result
|
||||
in some obfuscated warning messages. If you don't understand what they mean, feel free
|
||||
to ask when you submit your PR.
|
||||
If you see any warning or error messages, you will have to correct those before
|
||||
your PR is accepted.
|
||||
|
||||
If you are editing the documentation, you should obviously be running the
|
||||
documentation tests. But even if you are simply adding a new package, your
|
||||
changes could cause the documentation tests to fail:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
package_list.rst:8745: WARNING: Block quote ends without a blank line; unexpected unindent.
|
||||
|
||||
At first, this error message will mean nothing to you, since you didn't edit
|
||||
that file. Until you look at line 8745 of the file in question:
|
||||
|
||||
.. code-block:: rst
|
||||
|
||||
Description:
|
||||
NetCDF is a set of software libraries and self-describing, machine-
|
||||
independent data formats that support the creation, access, and sharing
|
||||
of array-oriented scientific data.
|
||||
|
||||
Our documentation includes :ref:`a list of all Spack packages <package-list>`.
|
||||
If you add a new package, its docstring is added to this page. The problem in
|
||||
this case was that the docstring looked like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Netcdf(Package):
|
||||
"""
|
||||
NetCDF is a set of software libraries and self-describing,
|
||||
machine-independent data formats that support the creation,
|
||||
access, and sharing of array-oriented scientific data.
|
||||
"""
|
||||
|
||||
Docstrings cannot start with a newline character, or else Sphinx will complain.
|
||||
Instead, they should look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Netcdf(Package):
|
||||
"""NetCDF is a set of software libraries and self-describing,
|
||||
machine-independent data formats that support the creation,
|
||||
access, and sharing of array-oriented scientific data."""
|
||||
|
||||
Documentation changes can result in much more obfuscated warning messages.
|
||||
If you don't understand what they mean, feel free to ask when you submit
|
||||
your PR.
|
||||
|
||||
--------
|
||||
Coverage
|
||||
|
||||
@@ -94,9 +94,9 @@ an Environment, the ``.spack-env`` directory also contains:
|
||||
* ``logs/``: A directory containing the build logs for the packages
|
||||
in this Environment.
|
||||
|
||||
Spack Environments can also be created from either a manifest file
|
||||
(usually but not necessarily named, ``spack.yaml``) or a lockfile.
|
||||
To create an Environment from a manifest:
|
||||
Spack Environments can also be created from either a ``spack.yaml``
|
||||
manifest or a ``spack.lock`` lockfile. To create an Environment from a
|
||||
``spack.yaml`` manifest:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -174,7 +174,7 @@ Anonymous specs can be created in place using the command:
|
||||
|
||||
$ spack env create -d .
|
||||
|
||||
In this case Spack simply creates a ``spack.yaml`` file in the requested
|
||||
In this case Spack simply creates a spack.yaml file in the requested
|
||||
directory.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -347,7 +347,7 @@ the Environment and then install the concretized specs.
|
||||
(see :ref:`build-jobs`). To speed up environment builds further, independent
|
||||
packages can be installed in parallel by launching more Spack instances. For
|
||||
example, the following will build at most four packages in parallel using
|
||||
three background jobs:
|
||||
three background jobs:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -395,7 +395,7 @@ version (and other constraints) passed as the spec argument to the
|
||||
|
||||
For packages with ``git`` attributes, git branches, tags, and commits can
|
||||
also be used as valid concrete versions (see :ref:`version-specifier`).
|
||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||
the ``main`` branch of the package, and ``spack install`` will install from
|
||||
that git clone if ``foo`` is in the environment.
|
||||
Further development on ``foo`` can be tested by reinstalling the environment,
|
||||
@@ -589,11 +589,10 @@ user support groups providing a large software stack for their HPC center.
|
||||
|
||||
.. admonition:: Re-concretization of user specs
|
||||
|
||||
The ``spack concretize`` command without additional arguments will *not* change any
|
||||
previously concretized specs. This may prevent it from finding a solution when using
|
||||
``unify: true``, and it may prevent it from finding a minimal solution when using
|
||||
``unify: when_possible``. You can force Spack to ignore the existing concrete environment
|
||||
with ``spack concretize -f``.
|
||||
When using *unified* concretization (when possible), the entire set of specs will be
|
||||
re-concretized after any addition of new user specs, to ensure that
|
||||
the environment remains consistent / minimal. When instead unified concretization is
|
||||
disabled, only the new specs will be concretized after any addition.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Spec Matrices
|
||||
@@ -916,9 +915,9 @@ function, as shown in the example below:
|
||||
.. code-block:: yaml
|
||||
|
||||
projections:
|
||||
zlib: "{name}-{version}"
|
||||
^mpi: "{name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}"
|
||||
all: "{name}-{version}/{compiler.name}-{compiler.version}"
|
||||
zlib: {name}-{version}
|
||||
^mpi: {name}-{version}/{^mpi.name}-{^mpi.version}-{compiler.name}-{compiler.version}
|
||||
all: {name}-{version}/{compiler.name}-{compiler.version}
|
||||
|
||||
The entries in the projections configuration file must all be either
|
||||
specs or the keyword ``all``. For each spec, the projection used will
|
||||
@@ -1122,21 +1121,21 @@ index once every package is pushed. Note how this target uses the generated
|
||||
|
||||
SPACK ?= spack
|
||||
BUILDCACHE_DIR = $(CURDIR)/tarballs
|
||||
|
||||
|
||||
.PHONY: all
|
||||
|
||||
|
||||
all: push
|
||||
|
||||
|
||||
include env.mk
|
||||
|
||||
|
||||
example/push/%: example/install/%
|
||||
@mkdir -p $(dir $@)
|
||||
$(info About to push $(SPEC) to a buildcache)
|
||||
$(SPACK) -e . buildcache push --allow-root --only=package $(BUILDCACHE_DIR) /$(HASH)
|
||||
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
|
||||
@touch $@
|
||||
|
||||
|
||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||
$(info Updating the buildcache index)
|
||||
$(SPACK) -e . buildcache update-index $(BUILDCACHE_DIR)
|
||||
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
|
||||
$(info Done!)
|
||||
@touch $@
|
||||
|
||||
@@ -41,9 +41,12 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
dnf install epel-release
|
||||
dnf group install "Development Tools"
|
||||
dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3
|
||||
yum update -y
|
||||
yum install -y epel-release
|
||||
yum update -y
|
||||
yum --enablerepo epel groupinstall -y "Development Tools"
|
||||
yum --enablerepo epel install -y curl findutils gcc-c++ gcc gcc-gfortran git gnupg2 hostname iproute redhat-lsb-core make patch python3 python3-pip python3-setuptools unzip
|
||||
python3 -m pip install boto3
|
||||
|
||||
.. tab-item:: macOS Brew
|
||||
|
||||
@@ -317,7 +320,7 @@ installed, but you know that new compilers have been added to your
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ module load gcc/4.9.0
|
||||
$ module load gcc-4.9.0
|
||||
$ spack compiler find
|
||||
==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
|
||||
gcc@4.9.0
|
||||
@@ -365,8 +368,7 @@ Manual compiler configuration
|
||||
|
||||
If auto-detection fails, you can manually configure a compiler by
|
||||
editing your ``~/.spack/<platform>/compilers.yaml`` file. You can do this by running
|
||||
``spack config edit compilers``, which will open the file in
|
||||
:ref:`your favorite editor <controlling-the-editor>`.
|
||||
``spack config edit compilers``, which will open the file in your ``$EDITOR``.
|
||||
|
||||
Each compiler configuration in the file looks like this:
|
||||
|
||||
|
||||
@@ -1,113 +0,0 @@
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
==========================
|
||||
Using External GPU Support
|
||||
==========================
|
||||
|
||||
Many packages come with a ``+cuda`` or ``+rocm`` variant. With no added
|
||||
configuration Spack will download and install the needed components.
|
||||
It may be preferable to use existing system support: the following sections
|
||||
help with using a system installation of GPU libraries.
|
||||
|
||||
-----------------------------------
|
||||
Using an External ROCm Installation
|
||||
-----------------------------------
|
||||
|
||||
Spack breaks down ROCm into many separate component packages. The following
|
||||
is an example ``packages.yaml`` that organizes a consistent set of ROCm
|
||||
components for use by dependent packages:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [rocmcc@=5.3.0]
|
||||
variants: amdgpu_target=gfx90a
|
||||
hip:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hip@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/hip
|
||||
hsa-rocr-dev:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hsa-rocr-dev@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
llvm-amdgpu:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: llvm-amdgpu@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/llvm/
|
||||
comgr:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: comgr@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
hipsparse:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipsparse@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
hipblas:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: hipblas@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
rocblas:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocblas@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/
|
||||
rocprim:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: rocprim@5.3.0
|
||||
prefix: /opt/rocm-5.3.0/rocprim/
|
||||
|
||||
This is in combination with the following compiler definition:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: rocmcc@=5.3.0
|
||||
paths:
|
||||
cc: /opt/rocm-5.3.0/bin/amdclang
|
||||
cxx: /opt/rocm-5.3.0/bin/amdclang++
|
||||
f77: null
|
||||
fc: /opt/rocm-5.3.0/bin/amdflang
|
||||
operating_system: rhel8
|
||||
target: x86_64
|
||||
|
||||
This includes the following considerations:
|
||||
|
||||
- Each of the listed externals specifies ``buildable: false`` to force Spack
|
||||
to use only the externals we defined.
|
||||
- ``spack external find`` can automatically locate some of the ``hip``/``rocm``
|
||||
packages, but not all of them, and furthermore not in a manner that
|
||||
guarantees a complementary set if multiple ROCm installations are available.
|
||||
- The ``prefix`` is the same for several components, but note that others
|
||||
require listing one of the subdirectories as a prefix.
|
||||
|
||||
-----------------------------------
|
||||
Using an External CUDA Installation
|
||||
-----------------------------------
|
||||
|
||||
CUDA is split into fewer components and is simpler to specify:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
all:
|
||||
variants:
|
||||
- cuda_arch=70
|
||||
cuda:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: cuda@11.0.2
|
||||
prefix: /opt/cuda/cuda-11.0.2/
|
||||
|
||||
where ``/opt/cuda/cuda-11.0.2/lib/`` contains ``libcudart.so``.
|
||||
File diff suppressed because it is too large
Load Diff
|
Before Width: | Height: | Size: 108 KiB |
@@ -54,16 +54,9 @@ or refer to the full manual below.
|
||||
features
|
||||
getting_started
|
||||
basic_usage
|
||||
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
|
||||
replace_conda_homebrew
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Links
|
||||
|
||||
Tutorial (spack-tutorial.rtfd.io) <https://spack-tutorial.readthedocs.io>
|
||||
Packages (packages.spack.io) <https://packages.spack.io>
|
||||
Binaries (binaries.spack.io) <https://cache.spack.io>
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Reference
|
||||
@@ -79,11 +72,10 @@ or refer to the full manual below.
|
||||
repositories
|
||||
binary_caches
|
||||
command_index
|
||||
package_list
|
||||
chain
|
||||
extensions
|
||||
pipelines
|
||||
signing
|
||||
gpu_configuration
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
@@ -163,7 +163,7 @@ your site.
|
||||
Mirror environment
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To create a mirror of all packages required by a concrete environment, activate the environment and call ``spack mirror create -a``.
|
||||
To create a mirror of all packages required by a concerte environment, activate the environment and call ``spack mirror create -a``.
|
||||
This is especially useful to create a mirror of an environment concretized on another machine.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -35,27 +35,27 @@ showing lots of installed packages:
|
||||
$ module avail
|
||||
|
||||
--------------------------------------------------------------- ~/spack/share/spack/modules/linux-ubuntu14-x86_64 ---------------------------------------------------------------
|
||||
autoconf/2.69-gcc-4.8-qextxkq hwloc/1.11.6-gcc-6.3.0-akcisez m4/1.4.18-gcc-4.8-ev2znoc openblas/0.2.19-gcc-6.3.0-dhkmed6 py-setuptools/34.2.0-gcc-6.3.0-fadur4s
|
||||
automake/1.15-gcc-4.8-maqvukj isl/0.18-gcc-4.8-afi6taq m4/1.4.18-gcc-6.3.0-uppywnz openmpi/2.1.0-gcc-6.3.0-go2s4z5 py-six/1.10.0-gcc-6.3.0-p4dhkaw
|
||||
binutils/2.28-gcc-4.8-5s7c6rs libiconv/1.15-gcc-4.8-at46wg3 mawk/1.3.4-gcc-4.8-acjez57 openssl/1.0.2k-gcc-4.8-dkls5tk python/2.7.13-gcc-6.3.0-tyehea7
|
||||
bison/3.0.4-gcc-4.8-ek4luo5 libpciaccess/0.13.4-gcc-6.3.0-gmufnvh mawk/1.3.4-gcc-6.3.0-ostdoms openssl/1.0.2k-gcc-6.3.0-gxgr5or readline/7.0-gcc-4.8-xhufqhn
|
||||
bzip2/1.0.6-gcc-4.8-iffrxzn libsigsegv/2.11-gcc-4.8-pp2cvte mpc/1.0.3-gcc-4.8-g5mztc5 pcre/8.40-gcc-4.8-r5pbrxb readline/7.0-gcc-6.3.0-zzcyicg
|
||||
bzip2/1.0.6-gcc-6.3.0-bequudr libsigsegv/2.11-gcc-6.3.0-7enifnh mpfr/3.1.5-gcc-4.8-o7xm7az perl/5.24.1-gcc-4.8-dg5j65u sqlite/3.8.5-gcc-6.3.0-6zoruzj
|
||||
cmake/3.7.2-gcc-6.3.0-fowuuby libtool/2.4.6-gcc-4.8-7a523za mpich/3.2-gcc-6.3.0-dmvd3aw perl/5.24.1-gcc-6.3.0-6uzkpt6 tar/1.29-gcc-4.8-wse2ass
|
||||
curl/7.53.1-gcc-4.8-3fz46n6 libtool/2.4.6-gcc-6.3.0-n7zmbzt ncurses/6.0-gcc-4.8-dcpe7ia pkg-config/0.29.2-gcc-4.8-ib33t75 tcl/8.6.6-gcc-4.8-tfxzqbr
|
||||
expat/2.2.0-gcc-4.8-mrv6bd4 libxml2/2.9.4-gcc-4.8-ryzxnsu ncurses/6.0-gcc-6.3.0-ucbhcdy pkg-config/0.29.2-gcc-6.3.0-jpgubk3 util-macros/1.19.1-gcc-6.3.0-xorz2x2
|
||||
flex/2.6.3-gcc-4.8-yf345oo libxml2/2.9.4-gcc-6.3.0-rltzsdh netlib-lapack/3.6.1-gcc-6.3.0-js33dog py-appdirs/1.4.0-gcc-6.3.0-jxawmw7 xz/5.2.3-gcc-4.8-mew4log
|
||||
gcc/6.3.0-gcc-4.8-24puqve lmod/7.4.1-gcc-4.8-je4srhr netlib-scalapack/2.0.2-gcc-6.3.0-5aidk4l py-numpy/1.12.0-gcc-6.3.0-oemmoeu xz/5.2.3-gcc-6.3.0-3vqeuvb
|
||||
gettext/0.19.8.1-gcc-4.8-yymghlh lua/5.3.4-gcc-4.8-im75yaz netlib-scalapack/2.0.2-gcc-6.3.0-hjsemcn py-packaging/16.8-gcc-6.3.0-i2n3dtl zip/3.0-gcc-4.8-rwar22d
|
||||
gmp/6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem/1_6_3-gcc-4.8-wkey3nl netlib-scalapack/2.0.2-gcc-6.3.0-jva724b py-pyparsing/2.1.10-gcc-6.3.0-tbo6gmw zlib/1.2.11-gcc-4.8-pgxsxv7
|
||||
help2man/1.47.4-gcc-4.8-kcnqmau lua-luaposix/33.4.0-gcc-4.8-mdod2ry netlib-scalapack/2.0.2-gcc-6.3.0-rgqfr6d py-scipy/0.19.0-gcc-6.3.0-kr7nat4 zlib/1.2.11-gcc-6.3.0-7cqp6cj
|
||||
autoconf-2.69-gcc-4.8-qextxkq hwloc-1.11.6-gcc-6.3.0-akcisez m4-1.4.18-gcc-4.8-ev2znoc openblas-0.2.19-gcc-6.3.0-dhkmed6 py-setuptools-34.2.0-gcc-6.3.0-fadur4s
|
||||
automake-1.15-gcc-4.8-maqvukj isl-0.18-gcc-4.8-afi6taq m4-1.4.18-gcc-6.3.0-uppywnz openmpi-2.1.0-gcc-6.3.0-go2s4z5 py-six-1.10.0-gcc-6.3.0-p4dhkaw
|
||||
binutils-2.28-gcc-4.8-5s7c6rs libiconv-1.15-gcc-4.8-at46wg3 mawk-1.3.4-gcc-4.8-acjez57 openssl-1.0.2k-gcc-4.8-dkls5tk python-2.7.13-gcc-6.3.0-tyehea7
|
||||
bison-3.0.4-gcc-4.8-ek4luo5 libpciaccess-0.13.4-gcc-6.3.0-gmufnvh mawk-1.3.4-gcc-6.3.0-ostdoms openssl-1.0.2k-gcc-6.3.0-gxgr5or readline-7.0-gcc-4.8-xhufqhn
|
||||
bzip2-1.0.6-gcc-4.8-iffrxzn libsigsegv-2.11-gcc-4.8-pp2cvte mpc-1.0.3-gcc-4.8-g5mztc5 pcre-8.40-gcc-4.8-r5pbrxb readline-7.0-gcc-6.3.0-zzcyicg
|
||||
bzip2-1.0.6-gcc-6.3.0-bequudr libsigsegv-2.11-gcc-6.3.0-7enifnh mpfr-3.1.5-gcc-4.8-o7xm7az perl-5.24.1-gcc-4.8-dg5j65u sqlite-3.8.5-gcc-6.3.0-6zoruzj
|
||||
cmake-3.7.2-gcc-6.3.0-fowuuby libtool-2.4.6-gcc-4.8-7a523za mpich-3.2-gcc-6.3.0-dmvd3aw perl-5.24.1-gcc-6.3.0-6uzkpt6 tar-1.29-gcc-4.8-wse2ass
|
||||
curl-7.53.1-gcc-4.8-3fz46n6 libtool-2.4.6-gcc-6.3.0-n7zmbzt ncurses-6.0-gcc-4.8-dcpe7ia pkg-config-0.29.2-gcc-4.8-ib33t75 tcl-8.6.6-gcc-4.8-tfxzqbr
|
||||
expat-2.2.0-gcc-4.8-mrv6bd4 libxml2-2.9.4-gcc-4.8-ryzxnsu ncurses-6.0-gcc-6.3.0-ucbhcdy pkg-config-0.29.2-gcc-6.3.0-jpgubk3 util-macros-1.19.1-gcc-6.3.0-xorz2x2
|
||||
flex-2.6.3-gcc-4.8-yf345oo libxml2-2.9.4-gcc-6.3.0-rltzsdh netlib-lapack-3.6.1-gcc-6.3.0-js33dog py-appdirs-1.4.0-gcc-6.3.0-jxawmw7 xz-5.2.3-gcc-4.8-mew4log
|
||||
gcc-6.3.0-gcc-4.8-24puqve lmod-7.4.1-gcc-4.8-je4srhr netlib-scalapack-2.0.2-gcc-6.3.0-5aidk4l py-numpy-1.12.0-gcc-6.3.0-oemmoeu xz-5.2.3-gcc-6.3.0-3vqeuvb
|
||||
gettext-0.19.8.1-gcc-4.8-yymghlh lua-5.3.4-gcc-4.8-im75yaz netlib-scalapack-2.0.2-gcc-6.3.0-hjsemcn py-packaging-16.8-gcc-6.3.0-i2n3dtl zip-3.0-gcc-4.8-rwar22d
|
||||
gmp-6.1.2-gcc-4.8-5ub2wu5 lua-luafilesystem-1_6_3-gcc-4.8-wkey3nl netlib-scalapack-2.0.2-gcc-6.3.0-jva724b py-pyparsing-2.1.10-gcc-6.3.0-tbo6gmw zlib-1.2.11-gcc-4.8-pgxsxv7
|
||||
help2man-1.47.4-gcc-4.8-kcnqmau lua-luaposix-33.4.0-gcc-4.8-mdod2ry netlib-scalapack-2.0.2-gcc-6.3.0-rgqfr6d py-scipy-0.19.0-gcc-6.3.0-kr7nat4 zlib-1.2.11-gcc-6.3.0-7cqp6cj
|
||||
|
||||
The names should look familiar, as they resemble the output from ``spack find``.
|
||||
For example, you could type the following command to load the ``cmake`` module:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ module load cmake/3.7.2-gcc-6.3.0-fowuuby
|
||||
$ module load cmake-3.7.2-gcc-6.3.0-fowuuby
|
||||
|
||||
Neither of these is particularly pretty, easy to remember, or easy to
|
||||
type. Luckily, Spack offers many facilities for customizing the module
|
||||
@@ -275,12 +275,10 @@ of the installed software. For instance, in the snippet below:
|
||||
set:
|
||||
BAR: 'bar'
|
||||
# This anonymous spec selects any package that
|
||||
# depends on mpi. The double colon at the
|
||||
# depends on openmpi. The double colon at the
|
||||
# end clears the set of rules that matched so far.
|
||||
^mpi::
|
||||
^openmpi::
|
||||
environment:
|
||||
prepend_path:
|
||||
PATH: '{^mpi.prefix}/bin'
|
||||
set:
|
||||
BAR: 'baz'
|
||||
# Selects any zlib package
|
||||
@@ -295,9 +293,7 @@ of the installed software. For instance, in the snippet below:
|
||||
- FOOBAR
|
||||
|
||||
you are instructing Spack to set the environment variable ``BAR=bar`` for every module,
|
||||
unless the associated spec satisfies the abstract dependency ``^mpi`` in which case
|
||||
``BAR=baz``, and the directory containing the respective MPI executables is prepended
|
||||
to the ``PATH`` variable.
|
||||
unless the associated spec satisfies ``^openmpi`` in which case ``BAR=baz``.
|
||||
In addition in any spec that satisfies ``zlib`` the value ``foo`` will be
|
||||
prepended to ``LD_LIBRARY_PATH`` and in any spec that satisfies ``zlib%gcc@4.8``
|
||||
the variable ``FOOBAR`` will be unset.
|
||||
@@ -400,30 +396,28 @@ that are already in the Lmod hierarchy.
|
||||
|
||||
|
||||
.. note::
|
||||
Tcl and Lua modules also allow for explicit conflicts between modulefiles.
|
||||
Tcl modules
|
||||
Tcl modules also allow for explicit conflicts between modulefiles.
|
||||
|
||||
.. code-block:: yaml
|
||||
.. code-block:: yaml
|
||||
|
||||
modules:
|
||||
default:
|
||||
enable:
|
||||
- tcl
|
||||
tcl:
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}-{compiler.version}'
|
||||
all:
|
||||
conflict:
|
||||
- '{name}'
|
||||
- 'intel/14.0.1'
|
||||
modules:
|
||||
default:
|
||||
enable:
|
||||
- tcl
|
||||
tcl:
|
||||
projections:
|
||||
all: '{name}/{version}-{compiler.name}-{compiler.version}'
|
||||
all:
|
||||
conflict:
|
||||
- '{name}'
|
||||
- 'intel/14.0.1'
|
||||
|
||||
will create module files that will conflict with ``intel/14.0.1`` and with the
|
||||
base directory of the same module, effectively preventing the possibility to
|
||||
load two or more versions of the same software at the same time. The tokens
|
||||
that are available for use in this directive are the same understood by the
|
||||
:meth:`~spack.spec.Spec.format` method.
|
||||
|
||||
For Lmod and Environment Modules versions prior 4.2, it is important to
|
||||
express the conflict on both modulefiles conflicting with each other.
|
||||
will create module files that will conflict with ``intel/14.0.1`` and with the
|
||||
base directory of the same module, effectively preventing the possibility to
|
||||
load two or more versions of the same software at the same time. The tokens
|
||||
that are available for use in this directive are the same understood by
|
||||
the :meth:`~spack.spec.Spec.format` method.
|
||||
|
||||
|
||||
.. note::
|
||||
@@ -785,35 +779,35 @@ cut-and-pasted into a shell script. For example:
|
||||
|
||||
$ spack module tcl loads --dependencies py-numpy git
|
||||
# bzip2@1.0.6%gcc@4.9.3=linux-x86_64
|
||||
module load bzip2/1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
||||
module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
|
||||
# ncurses@6.0%gcc@4.9.3=linux-x86_64
|
||||
module load ncurses/6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
||||
module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
|
||||
# zlib@1.2.8%gcc@4.9.3=linux-x86_64
|
||||
module load zlib/1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
||||
module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
|
||||
# sqlite@3.8.5%gcc@4.9.3=linux-x86_64
|
||||
module load sqlite/3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
||||
module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
|
||||
# readline@6.3%gcc@4.9.3=linux-x86_64
|
||||
module load readline/6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
||||
module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
|
||||
# python@3.5.1%gcc@4.9.3=linux-x86_64
|
||||
module load python/3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
||||
module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
|
||||
# py-setuptools@20.5%gcc@4.9.3=linux-x86_64
|
||||
module load py-setuptools/20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
||||
module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
|
||||
# py-nose@1.3.7%gcc@4.9.3=linux-x86_64
|
||||
module load py-nose/1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
||||
module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
|
||||
# openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
|
||||
module load openblas/0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
||||
module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
|
||||
# py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
|
||||
module load py-numpy/1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
||||
module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
|
||||
# curl@7.47.1%gcc@4.9.3=linux-x86_64
|
||||
module load curl/7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
||||
module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
|
||||
# autoconf@2.69%gcc@4.9.3=linux-x86_64
|
||||
module load autoconf/2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
||||
module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
|
||||
# cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
|
||||
module load cmake/3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
||||
module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
|
||||
# expat@2.1.0%gcc@4.9.3=linux-x86_64
|
||||
module load expat/2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
||||
module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
|
||||
# git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
|
||||
module load git/2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
||||
module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
|
||||
|
||||
The script may be further edited by removing unnecessary modules.
|
||||
|
||||
@@ -832,12 +826,12 @@ For example, consider the following on one system:
|
||||
.. code-block:: console
|
||||
|
||||
$ module avail
|
||||
linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||
linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
$ spack module tcl loads antlr # WRONG!
|
||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||
module load antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||
module load antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
$ spack module tcl loads --prefix linux-SuSE11-x86_64/ antlr
|
||||
# antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
|
||||
module load linux-SuSE11-x86_64/antlr/2.7.7-gcc-5.3.0-bdpl46y
|
||||
module load linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
|
||||
|
||||
17
lib/spack/docs/package_list.rst
Normal file
17
lib/spack/docs/package_list.rst
Normal file
@@ -0,0 +1,17 @@
|
||||
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _package-list:
|
||||
|
||||
============
|
||||
Package List
|
||||
============
|
||||
|
||||
This is a list of things you can install using Spack. It is
|
||||
automatically generated based on the packages in this Spack
|
||||
version.
|
||||
|
||||
.. raw:: html
|
||||
:file: package_list.html
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,7 @@
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
=====================================
|
||||
Spack for Homebrew/Conda Users
|
||||
Using Spack to Replace Homebrew/Conda
|
||||
=====================================
|
||||
|
||||
Spack is an incredibly powerful package manager, designed for supercomputers
|
||||
@@ -191,18 +191,18 @@ The ``--fresh`` flag tells Spack to use the latest version of every package
|
||||
where possible instead of trying to optimize for reuse of existing installed
|
||||
packages.
|
||||
|
||||
The ``--force`` flag in addition tells Spack to overwrite its previous
|
||||
concretization decisions, allowing you to choose a new version of Python.
|
||||
If any of the new packages like Bash are already installed, ``spack install``
|
||||
The ``--force`` flag in addition tells Spack to overwrite its previous
|
||||
concretization decisions, allowing you to choose a new version of Python.
|
||||
If any of the new packages like Bash are already installed, ``spack install``
|
||||
won't re-install them, it will keep the symlinks in place.
|
||||
|
||||
-----------------------------------
|
||||
Updating & Cleaning Up Old Packages
|
||||
-----------------------------------
|
||||
|
||||
If you're looking to mimic the behavior of Homebrew, you may also want to
|
||||
clean up out-of-date packages from your environment after an upgrade. To
|
||||
upgrade your entire software stack within an environment and clean up old
|
||||
If you're looking to mimic the behavior of Homebrew, you may also want to
|
||||
clean up out-of-date packages from your environment after an upgrade. To
|
||||
upgrade your entire software stack within an environment and clean up old
|
||||
package versions, simply run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -212,9 +212,9 @@ package versions, simply run the following commands:
|
||||
$ spack concretize --fresh --force
|
||||
$ spack install
|
||||
$ spack gc
|
||||
|
||||
Running ``spack mark -i --all`` tells Spack to mark all of the existing
|
||||
packages within an environment as "implicitly" installed. This tells
|
||||
|
||||
Running ``spack mark -i --all`` tells Spack to mark all of the existing
|
||||
packages within an environment as "implicitly" installed. This tells
|
||||
spack's garbage collection system that these packages should be cleaned up.
|
||||
|
||||
Don't worry however, this will not remove your entire environment.
|
||||
@@ -223,8 +223,8 @@ a fresh concretization and will re-mark any packages that should remain
|
||||
installed as "explicitly" installed.
|
||||
|
||||
**Note:** if you use multiple spack environments you should re-run ``spack install``
|
||||
in each of your environments prior to running ``spack gc`` to prevent spack
|
||||
from uninstalling any shared packages that are no longer required by the
|
||||
in each of your environments prior to running ``spack gc`` to prevent spack
|
||||
from uninstalling any shared packages that are no longer required by the
|
||||
environment you just upgraded.
|
||||
|
||||
--------------
|
||||
|
||||
@@ -32,16 +32,11 @@ A package repository a directory structured like this::
|
||||
...
|
||||
|
||||
The top-level ``repo.yaml`` file contains configuration metadata for the
|
||||
repository. The packages subdirectory, typically ``packages``, contains
|
||||
subdirectories for each package in the repository. Each package directory
|
||||
contains a ``package.py`` file and any patches or other files needed to build the
|
||||
repository, and the ``packages`` directory contains subdirectories for
|
||||
each package in the repository. Each package directory contains a
|
||||
``package.py`` file and any patches or other files needed to build the
|
||||
package.
|
||||
|
||||
The ``repo.yaml`` file may also contain a ``subdirectory`` key,
|
||||
which can modify the name of the subdirectory used for packages. As seen above,
|
||||
the default value is ``packages``. An empty string (``subdirectory: ''``) requires
|
||||
a flattened repo structure in which the package names are top-level subdirectories.
|
||||
|
||||
Package repositories allow you to:
|
||||
|
||||
1. Maintain your own packages separately from Spack;
|
||||
@@ -378,24 +373,6 @@ You can supply a custom namespace with a second argument, e.g.:
|
||||
repo:
|
||||
namespace: 'llnl.comp'
|
||||
|
||||
You can also create repositories with custom structure with the ``-d/--subdirectory``
|
||||
argument, e.g.:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack repo create -d applications myrepo apps
|
||||
==> Created repo with namespace 'apps'.
|
||||
==> To register it with Spack, run this command:
|
||||
spack repo add ~/myrepo
|
||||
|
||||
$ ls myrepo
|
||||
applications/ repo.yaml
|
||||
|
||||
$ cat myrepo/repo.yaml
|
||||
repo:
|
||||
namespace: apps
|
||||
subdirectory: applications
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
``spack repo add``
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==1.3.0
|
||||
python-levenshtein==0.22.0
|
||||
docutils==0.18.1
|
||||
pygments==2.16.1
|
||||
urllib3==2.0.6
|
||||
pytest==7.4.2
|
||||
isort==5.12.0
|
||||
black==23.9.1
|
||||
flake8==6.1.0
|
||||
mypy==1.5.1
|
||||
# These dependencies should be installed using pip in order
|
||||
# to build the documentation.
|
||||
|
||||
sphinx>=3.4,!=4.1.2,!=5.1.0
|
||||
sphinxcontrib-programoutput
|
||||
sphinx-design
|
||||
sphinx-rtd-theme
|
||||
python-levenshtein
|
||||
# Restrict to docutils <0.17 to workaround a list rendering issue in sphinx.
|
||||
# https://stackoverflow.com/questions/67542699
|
||||
docutils <0.17
|
||||
pygments <2.13
|
||||
|
||||
@@ -1,478 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _signing:
|
||||
|
||||
=====================
|
||||
Spack Package Signing
|
||||
=====================
|
||||
|
||||
The goal of package signing in Spack is to provide data integrity
|
||||
assurances around official packages produced by the automated Spack CI
|
||||
pipelines. These assurances directly address the security of Spack’s
|
||||
software supply chain by explaining why a security-conscious user can
|
||||
be reasonably justified in the belief that packages installed via Spack
|
||||
have an uninterrupted auditable trail back to change management
|
||||
decisions judged to be appropriate by the Spack maintainers. This is
|
||||
achieved through cryptographic signing of packages built by Spack CI
|
||||
pipelines based on code that has been transparently reviewed and
|
||||
approved on GitHub. This document describes the signing process for
|
||||
interested users.
|
||||
|
||||
.. _risks:
|
||||
|
||||
------------------------------
|
||||
Risks, Impact and Threat Model
|
||||
------------------------------
|
||||
|
||||
This document addresses the approach taken to safeguard Spack’s
|
||||
reputation with regard to the integrity of the package data produced by
|
||||
Spack’s CI pipelines. It does not address issues of data confidentiality
|
||||
(Spack is intended to be largely open source) or availability (efforts
|
||||
are described elsewhere). With that said the main reputational risk can
|
||||
be broadly categorized as a loss of faith in the data integrity due to a
|
||||
breach of the private key used to sign packages. Remediation of a
|
||||
private key breach would require republishing the public key with a
|
||||
revocation certificate, generating a new signing key, an assessment and
|
||||
potential rebuild/resigning of all packages since the key was breached,
|
||||
and finally direct intervention by every spack user to update their copy
|
||||
of Spack’s public keys used for local verification.
|
||||
|
||||
The primary threat model used in mitigating the risks of these stated
|
||||
impacts is one of individual error not malicious intent or insider
|
||||
threat. The primary objective is to avoid the above impacts by making a
|
||||
private key breach nearly impossible due to oversight or configuration
|
||||
error. Obvious and straightforward measures are taken to mitigate issues
|
||||
of malicious interference in data integrity and insider threats but
|
||||
these attack vectors are not systematically addressed. It should be hard
|
||||
to exfiltrate the private key intentionally, and almost impossible to
|
||||
leak the key by accident.
|
||||
|
||||
.. _overview:
|
||||
|
||||
-----------------
|
||||
Pipeline Overview
|
||||
-----------------
|
||||
|
||||
Spack pipelines build software through progressive stages where packages
|
||||
in later stages nominally depend on packages built in earlier stages.
|
||||
For both technical and design reasons these dependencies are not
|
||||
implemented through the default GitLab artifacts mechanism; instead
|
||||
built packages are uploaded to AWS S3 mirrors (buckets) where they are
|
||||
retrieved by subsequent stages in the pipeline. Two broad categories of
|
||||
pipelines exist: Pull Request (PR) pipelines and Develop/Release
|
||||
pipelines.
|
||||
|
||||
- PR pipelines are launched in response to pull requests made by
|
||||
trusted and untrusted users. Packages built on these pipelines upload
|
||||
code to quarantined AWS S3 locations which cache the built packages
|
||||
for the purposes of review and iteration on the changes proposed in
|
||||
the pull request. Packages built on PR pipelines can come from
|
||||
untrusted users so signing of these pipelines is not implemented.
|
||||
Jobs in these pipelines are executed via normal GitLab runners both
|
||||
within the AWS GitLab infrastructure and at affiliated institutions.
|
||||
- Develop and Release pipelines **sign** the packages they produce and carry
|
||||
strong integrity assurances that trace back to auditable change management
|
||||
decisions. These pipelines only run after members from a trusted group of
|
||||
reviewers verify that the proposed changes in a pull request are appropriate.
|
||||
Once the PR is merged, or a release is cut, a pipeline is run on protected
|
||||
GitLab runners which provide access to the required signing keys within the
|
||||
job. Intermediary keys are used to sign packages in each stage of the
|
||||
pipeline as they are built and a final job officially signs each package
|
||||
external to any specific packages’ build environment. An intermediate key
|
||||
exists in the AWS infrastructure and for each affiliated instritution that
|
||||
maintains protected runners. The runners that execute these pipelines
|
||||
exclusively accept jobs from protected branches meaning the intermediate keys
|
||||
are never exposed to unreviewed code and the official keys are never exposed
|
||||
to any specific build environment.
|
||||
|
||||
.. _key_architecture:
|
||||
|
||||
----------------
|
||||
Key Architecture
|
||||
----------------
|
||||
|
||||
Spack’s CI process uses public-key infrastructure (PKI) based on GNU Privacy
|
||||
Guard (gpg) keypairs to sign public releases of spack package metadata, also
|
||||
called specs. Two classes of GPG keys are involved in the process to reduce the
|
||||
impact of an individual private key compromise, these key classes are the
|
||||
*Intermediate CI Key* and *Reputational Key*. Each of these keys has signing
|
||||
sub-keys that are used exclusively for signing packages. This can be confusing
|
||||
so for the purpose of this explanation we’ll refer to Root and Signing keys.
|
||||
Each key has a private and a public component as well as one or more identities
|
||||
and zero or more signatures.
|
||||
|
||||
-------------------
|
||||
Intermediate CI Key
|
||||
-------------------
|
||||
|
||||
The Intermediate key class is used to sign and verify packages between stages
|
||||
within a develop or release pipeline. An intermediate key exists for the AWS
|
||||
infrastructure as well as each affiliated institution that maintains protected
|
||||
runners. These intermediate keys are made available to the GitLab execution
|
||||
environment building the package so that the package’s dependencies may be
|
||||
verified by the Signing Intermediate CI Public Key and the final package may be
|
||||
signed by the Signing Intermediate CI Private Key.
|
||||
|
||||
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| **Intermediate CI Key (GPG)** |
|
||||
+==================================================+======================================================+
|
||||
| Root Intermediate CI Private Key (RSA 4096)# | Root Intermediate CI Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Signing Intermediate CI Private Key (RSA 4096) | Signing Intermediate CI Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Identity: “Intermediate CI Key <maintainers@spack.io>” |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| Signatures: None |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
|
||||
|
||||
The *Root intermediate CI Private Key*\ Is stripped out of the GPG key and
|
||||
stored offline completely separate from Spack’s infrastructure. This allows the
|
||||
core development team to append revocation certificates to the GPG key and
|
||||
issue new sub-keys for use in the pipeline. It is our expectation that this
|
||||
will happen on a semi regular basis. A corollary of this is that *this key
|
||||
should not be used to verify package integrity outside the internal CI process.*
|
||||
|
||||
----------------
|
||||
Reputational Key
|
||||
----------------
|
||||
|
||||
The Reputational Key is the public facing key used to sign complete groups of
|
||||
development and release packages. Only one key pair exsits in this class of
|
||||
keys. In contrast to the Intermediate CI Key the Reputational Key *should* be
|
||||
used to verify package integrity. At the end of develop and release pipeline a
|
||||
final pipeline job pulls down all signed package metadata built by the pipeline,
|
||||
verifies they were signed with an Intermediate CI Key, then strips the
|
||||
Intermediate CI Key signature from the package and re-signs them with the
|
||||
Signing Reputational Private Key. The officially signed packages are then
|
||||
uploaded back to the AWS S3 mirror. Please note that separating use of the
|
||||
reputational key into this final job is done to prevent leakage of the key in a
|
||||
spack package. Because the Signing Reputational Private Key is never exposed to
|
||||
a build job it cannot accidentally end up in any built package.
|
||||
|
||||
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| **Reputational Key (GPG)** |
|
||||
+==================================================+======================================================+
|
||||
| Root Reputational Private Key (RSA 4096)# | Root Reputational Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Signing Reputational Private Key (RSA 4096) | Signing Reputational Public Key (RSA 4096) |
|
||||
+--------------------------------------------------+------------------------------------------------------+
|
||||
| Identity: “Spack Project <maintainers@spack.io>” |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
| Signatures: Signed by core development team [#f1]_ |
|
||||
+---------------------------------------------------------------------------------------------------------+
|
||||
|
||||
The Root Reputational Private Key is stripped out of the GPG key and stored
|
||||
offline completely separate from Spack’s infrastructure. This allows the core
|
||||
development team to append revocation certificates to the GPG key in the
|
||||
unlikely event that the Signing Reputation Private Key is compromised. In
|
||||
general it is the expectation that rotating this key will happen infrequently if
|
||||
at all. This should allow relatively transparent verification for the end-user
|
||||
community without needing deep familiarity with GnuPG or Public Key
|
||||
Infrastructure.
|
||||
|
||||
|
||||
.. _build_cache_format:
|
||||
|
||||
------------------
|
||||
Build Cache Format
|
||||
------------------
|
||||
|
||||
A binary package consists of a metadata file unambiguously defining the
|
||||
built package (and including other details such as how to relocate it)
|
||||
and the installation directory of the package stored as a compressed
|
||||
archive file. The metadata files can either be unsigned, in which case
|
||||
the contents are simply the json-serialized concrete spec plus metadata,
|
||||
or they can be signed, in which case the json-serialized concrete spec
|
||||
plus metadata is wrapped in a gpg cleartext signature. Built package
|
||||
metadata files are named to indicate the operating system and
|
||||
architecture for which the package was built as well as the compiler
|
||||
used to build it and the packages name and version. For example::
|
||||
|
||||
linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
|
||||
would contain the concrete spec and binary metadata for a binary package
|
||||
of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell``
|
||||
architecture. The id of the built package exists in the name of the file
|
||||
as well (after the package name and version) and in this case begins
|
||||
with ``llv2ys``. The id distinguishes a particular built package from all
|
||||
other built packages with the same os/arch, compiler, name, and version.
|
||||
Below is an example of a signed binary package metadata file. Such a
|
||||
file would live in the ``build_cache`` directory of a binary mirror::
|
||||
|
||||
-----BEGIN PGP SIGNED MESSAGE-----
|
||||
Hash: SHA512
|
||||
|
||||
{
|
||||
"spec": {
|
||||
<concrete-spec-contents-omitted>
|
||||
},
|
||||
|
||||
"buildcache_layout_version": 1,
|
||||
"binary_cache_checksum": {
|
||||
"hash_algorithm": "sha256",
|
||||
"hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
|
||||
}
|
||||
}
|
||||
|
||||
-----BEGIN PGP SIGNATURE-----
|
||||
iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc
|
||||
ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn
|
||||
4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+
|
||||
QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b
|
||||
887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6
|
||||
4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH
|
||||
qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7
|
||||
QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi
|
||||
Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx
|
||||
j3DXty57
|
||||
=3gvm
|
||||
-----END PGP SIGNATURE-----
|
||||
|
||||
If a user has trusted the public key associated with the private key
|
||||
used to sign the above spec file, the signature can be verified with
|
||||
gpg, as follows::
|
||||
|
||||
$ gpg –verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
|
||||
|
||||
The metadata (regardless whether signed or unsigned) contains the checksum
|
||||
of the ``.spack`` file containing the actual installation. The checksum should
|
||||
be compared to a checksum computed locally on the ``.spack`` file to ensure the
|
||||
contents have not changed since the binary spec plus metadata were signed. The
|
||||
``.spack`` files are actually tarballs containing the compressed archive of the
|
||||
install tree. These files, along with the metadata files, live within the
|
||||
``build_cache`` directory of the mirror, and together are organized as follows::
|
||||
|
||||
build_cache/
|
||||
# unsigned metadata (for indexing, contains sha256 of .spack file)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
|
||||
# clearsigned metadata (same as above, but signed)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig
|
||||
<arch>/
|
||||
<compiler>/
|
||||
<name>-<ver>/
|
||||
# tar.gz-compressed prefix (may support more compression formats later)
|
||||
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
|
||||
|
||||
Uncompressing and extracting the ``.spack`` file results in the install tree.
|
||||
This is in contrast to previous versions of spack, where the ``.spack`` file
|
||||
contained a (duplicated) metadata file, a signature file and a nested tarball
|
||||
containing the install tree.
|
||||
|
||||
.. _internal_implementation:
|
||||
|
||||
-----------------------
|
||||
Internal Implementation
|
||||
-----------------------
|
||||
|
||||
The technical implementation of the pipeline signing process includes components
|
||||
defined in Amazon Web Services, the Kubernetes cluster, at affilicated
|
||||
institutions, and the GitLab/GitLab Runner deployment. We present the techincal
|
||||
implementation in two interdependent sections. The first addresses how secrets
|
||||
are managed through the lifecycle of a develop or release pipeline. The second
|
||||
section describes how Gitlab Runner and pipelines are configured and managed to
|
||||
support secure automated signing.
|
||||
|
||||
Secrets Management
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
As stated above the Root Private Keys (intermediate and reputational)
|
||||
are stripped from the GPG keys and stored outside Spack’s
|
||||
infrastructure.
|
||||
|
||||
.. warning::
|
||||
**TODO**
|
||||
- Explanation here about where and how access is handled for these keys.
|
||||
- Both Root private keys are protected with strong passwords
|
||||
- Who has access to these and how?
|
||||
|
||||
**Intermediate CI Key**
|
||||
-----------------------
|
||||
|
||||
Multiple intermediate CI signing keys exist, one Intermediate CI Key for jobs
|
||||
run in AWS, and one key for each affiliated institution (e.g. Univerity of
|
||||
Oregon). Here we describe how the Intermediate CI Key is managed in AWS:
|
||||
|
||||
The Intermediate CI Key (including the Signing Intermediate CI Private Key is
|
||||
exported as an ASCII armored file and stored in a Kubernetes secret called
|
||||
``spack-intermediate-ci-signing-key``. For convenience sake, this same secret
|
||||
contains an ASCII-armored export of just the *public* components of the
|
||||
Reputational Key. This secret also contains the *public* components of each of
|
||||
the affiliated institutions' Intermediate CI Key. These are potentially needed
|
||||
to verify dependent packages which may have been found in the public mirror or
|
||||
built by a protected job running on an affiliated institution's infrastrcuture
|
||||
in an earlier stage of the pipeline.
|
||||
|
||||
Procedurally the ``spack-intermediate-ci-signing-key`` secret is used in
|
||||
the following way:
|
||||
|
||||
1. A ``large-arm-prot`` or ``large-x86-prot`` protected runner picks up
|
||||
a job tagged ``protected`` from a protected GitLab branch. (See
|
||||
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
||||
2. Based on its configuration, the runner creates a job Pod in the
|
||||
pipeline namespace and mounts the spack-intermediate-ci-signing-key
|
||||
Kubernetes secret into the build container
|
||||
3. The Intermediate CI Key, affiliated institutions' public key and the
|
||||
Reputational Public Key are imported into a keyring by the ``spack gpg …``
|
||||
sub-command. This is initiated by the job’s build script which is created by
|
||||
the generate job at the beginning of the pipeline.
|
||||
4. Assuming the package has dependencies those specs are verified using
|
||||
the keyring.
|
||||
5. The package is built and the spec.json is generated
|
||||
6. The spec.json is signed by the keyring and uploaded to the mirror’s
|
||||
build cache.
|
||||
|
||||
**Reputational Key**
|
||||
--------------------
|
||||
|
||||
Because of the increased impact to end users in the case of a private
|
||||
key breach, the Reputational Key is managed separately from the
|
||||
Intermediate CI Keys and has additional controls. First, the Reputational
|
||||
Key was generated outside of Spack’s infrastructure and has been signed
|
||||
by the core development team. The Reputational Key (along with the
|
||||
Signing Reputational Private Key) was then ASCII armor exported to a
|
||||
file. Unlike the Intermediate CI Key this exported file is not stored as
|
||||
a base64 encoded secret in Kubernetes. Instead\ *the key file
|
||||
itself*\ is encrypted and stored in Kubernetes as the
|
||||
``spack-signing-key-encrypted`` secret in the pipeline namespace.
|
||||
|
||||
The encryption of the exported Reputational Key (including the Signing
|
||||
Reputational Private Key) is handled by `AWS Key Management Store (KMS) data
|
||||
keys
|
||||
<https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#data-keys>`__.
|
||||
The private key material is decrypted and imported at the time of signing into a
|
||||
memory mounted temporary directory holding the keychain. The signing job uses
|
||||
the `AWS Encryption SDK
|
||||
<https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli.html>`__
|
||||
(i.e. ``aws-encryption-cli``) to decrypt the Reputational Key. Permission to
|
||||
decrypt the key is granted to the job Pod through a Kubernetes service account
|
||||
specifically used for this, and only this, function. Finally, for convenience
|
||||
sake, this same secret contains an ASCII-armored export of the *public*
|
||||
components of the Intermediate CI Keys and the Reputational Key. This allows the
|
||||
signing script to verify that packages were built by the pipeline (both on AWS
|
||||
or at affiliated institutions), or signed previously as a part of a different
|
||||
pipeline. This is is done *before* importing decrypting and importing the
|
||||
Signing Reputational Private Key material and officially signing the packages.
|
||||
|
||||
Procedurally the ``spack-singing-key-encrypted`` secret is used in the
|
||||
following way:
|
||||
|
||||
1. The ``spack-package-signing-gitlab-runner`` protected runner picks
|
||||
up a job tagged ``notary`` from a protected GitLab branch (See
|
||||
`Protected Runners and Reserved Tags <#_8bawjmgykv0b>`__).
|
||||
2. Based on its configuration, the runner creates a job pod in the
|
||||
pipeline namespace. The job is run in a stripped down purpose-built
|
||||
image ``ghcr.io/spack/notary:latest`` Docker image. The runner is
|
||||
configured to only allow running jobs with this image.
|
||||
3. The runner also mounts the ``spack-signing-key-encrypted`` secret to
|
||||
a path on disk. Note that this becomes several files on disk, the
|
||||
public components of the Intermediate CI Keys, the public components
|
||||
of the Reputational CI, and an AWS KMS encrypted file containing the
|
||||
Singing Reputational Private Key.
|
||||
4. In addition to the secret, the runner creates a tmpfs memory mounted
|
||||
directory where the GnuPG keyring will be created to verify, and
|
||||
then resign the package specs.
|
||||
5. The job script syncs all spec.json.sig files from the build cache to
|
||||
a working directory in the job’s execution environment.
|
||||
6. The job script then runs the ``sign.sh`` script built into the
|
||||
notary Docker image.
|
||||
7. The ``sign.sh`` script imports the public components of the
|
||||
Reputational and Intermediate CI Keys and uses them to verify good
|
||||
signatures on the spec.json.sig files. If any signed spec does not
|
||||
verify the job immediately fails.
|
||||
8. Assuming all specs are verified, the ``sign.sh`` script then unpacks
|
||||
the spec json data from the signed file in preparation for being
|
||||
re-signed with the Reputational Key.
|
||||
9. The private components of the Reputational Key are decrypted to
|
||||
standard out using ``aws-encryption-cli`` directly into a ``gpg
|
||||
–import …`` statement which imports the key into the
|
||||
keyring mounted in-memory.
|
||||
10. The private key is then used to sign each of the json specs and the
|
||||
keyring is removed from disk.
|
||||
11. The re-signed json specs are resynced to the AWS S3 Mirror and the
|
||||
public signing of the packages for the develop or release pipeline
|
||||
that created them is complete.
|
||||
|
||||
Non service-account access to the private components of the Reputational
|
||||
Key that are managed through access to the symmetric secret in KMS used
|
||||
to encrypt the data key (which in turn is used to encrypt the GnuPG key
|
||||
- See:\ `Encryption SDK
|
||||
Documentation <https://docs.aws.amazon.com/encryption-sdk/latest/developer-guide/crypto-cli-examples.html#cli-example-encrypt-file>`__).
|
||||
A small trusted subset of the core development team are the only
|
||||
individuals with access to this symmetric key.
|
||||
|
||||
.. _protected_runners:
|
||||
|
||||
Protected Runners and Reserved Tags
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack has a large number of Gitlab Runners operating in its build farm.
|
||||
These include runners deployed in the AWS Kubernetes cluster as well as
|
||||
runners deployed at affiliated institutions. The majority of runners are
|
||||
shared runners that operate across projects in gitlab.spack.io. These
|
||||
runners pick up jobs primarily from the spack/spack project and execute
|
||||
them in PR pipelines.
|
||||
|
||||
A small number of runners operating on AWS and at affiliated institutions are
|
||||
registered as specific *protected* runners on the spack/spack project. In
|
||||
addition to protected runners there are protected branches on the spack/spack
|
||||
project. These are the ``develop`` branch, any release branch (i.e. managed with
|
||||
the ``releases/v*`` wildcard) and any tag branch (managed with the ``v*``
|
||||
wildcard) Finally Spack’s pipeline generation code reserves certain tags to make
|
||||
sure jobs are routed to the correct runners, these tags are ``public``,
|
||||
``protected``, and ``notary``. Understanding how all this works together to
|
||||
protect secrets and provide integrity assurances can be a little confusing so
|
||||
lets break these down:
|
||||
|
||||
- **Protected Branches**- Protected branches in Spack prevent anyone
|
||||
other than Maintainers in GitLab from pushing code. In the case of
|
||||
Spack the only Maintainer level entity pushing code to protected
|
||||
branches is Spack bot. Protecting branches also marks them in such a
|
||||
way that Protected Runners will only run jobs from those branches
|
||||
- **Protected Runners**- Protected Runners only run jobs from protected
|
||||
branches. Because protected runners have access to secrets, it's critical
|
||||
that they not run Jobs from untrusted code (i.e. PR branches). If they did it
|
||||
would be possible for a PR branch to tag a job in such a way that a protected
|
||||
runner executed that job and mounted secrets into a code execution
|
||||
environment that had not been reviewed by Spack maintainers. Note however
|
||||
that in the absence of tagging used to route jobs, public runners *could* run
|
||||
jobs from protected branches. No secrets would be at risk of being breached
|
||||
because non-protected runners do not have access to those secrets; lack of
|
||||
secrets would, however, cause the jobs to fail.
|
||||
- **Reserved Tags**- To mitigate the issue of public runners picking up
|
||||
protected jobs Spack uses a small set of “reserved” job tags (Note that these
|
||||
are *job* tags not git tags). These tags are “public”, “private”, and
|
||||
“notary.” The majority of jobs executed in Spack’s GitLab instance are
|
||||
executed via a ``generate`` job. The generate job code systematically ensures
|
||||
that no user defined configuration sets these tags. Instead, the ``generate``
|
||||
job sets these tags based on rules related to the branch where this pipeline
|
||||
originated. If the job is a part of a pipeline on a PR branch it sets the
|
||||
``public`` tag. If the job is part of a pipeline on a protected branch it
|
||||
sets the ``protected`` tag. Finally if the job is the package signing job and
|
||||
it is running on a pipeline that is part of a protected branch then it sets
|
||||
the ``notary`` tag.
|
||||
|
||||
Protected Runners are configured to only run jobs from protected branches. Only
|
||||
jobs running in pipelines on protected branches are tagged with ``protected`` or
|
||||
``notary`` tags. This tightly couples jobs on protected branches to protected
|
||||
runners that provide access to the secrets required to sign the built packages.
|
||||
The secrets are can **only** be accessed via:
|
||||
|
||||
1. Runners under direct control of the core development team.
|
||||
2. Runners under direct control of trusted maintainers at affiliated institutions.
|
||||
3. By code running the automated pipeline that has been reviewed by the
|
||||
Spack maintainers and judged to be appropriate.
|
||||
|
||||
Other attempts (either through malicious intent or incompetence) can at
|
||||
worst grab jobs intended for protected runners which will cause those
|
||||
jobs to fail alerting both Spack maintainers and the core development
|
||||
team.
|
||||
|
||||
.. [#f1]
|
||||
The Reputational Key has also cross signed core development team
|
||||
keys.
|
||||
@@ -1,8 +1,9 @@
|
||||
Name, Supported Versions, Notes, Requirement Reason
|
||||
Python, 3.6--3.12, , Interpreter for Spack
|
||||
Python, 3.6--3.11, , Interpreter for Spack
|
||||
C/C++ Compilers, , , Building software
|
||||
make, , , Build software
|
||||
patch, , , Build software
|
||||
bash, , , Compiler wrappers
|
||||
tar, , , Extract/create archives
|
||||
gzip, , , Compress/Decompress archives
|
||||
unzip, , , Compress/Decompress archives
|
||||
|
||||
|
441
lib/spack/env/cc
vendored
441
lib/spack/env/cc
vendored
@@ -416,36 +416,50 @@ input_command="$*"
|
||||
# The lists are all bell-separated to be as flexible as possible, as their
|
||||
# contents may come from the command line, from ' '-separated lists,
|
||||
# ':'-separated lists, etc.
|
||||
include_dirs_list=""
|
||||
lib_dirs_list=""
|
||||
rpath_dirs_list=""
|
||||
system_include_dirs_list=""
|
||||
system_lib_dirs_list=""
|
||||
system_rpath_dirs_list=""
|
||||
isystem_system_include_dirs_list=""
|
||||
isystem_include_dirs_list=""
|
||||
libs_list=""
|
||||
other_args_list=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
|
||||
# Same, but for -Xlinker -rpath -Xlinker /path
|
||||
xlinker_expect_rpath=no
|
||||
|
||||
parse_Wl() {
|
||||
# drop -Wl
|
||||
shift
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
append system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
append rpath_dirs_list "$1"
|
||||
fi
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
@@ -453,13 +467,8 @@ parse_Wl() {
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
-Wl)
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory, we don't support
|
||||
# it.
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "-Wl,$1"
|
||||
append other_args_list "-Wl,$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
@@ -467,210 +476,175 @@ parse_Wl() {
|
||||
done
|
||||
}
|
||||
|
||||
categorize_arguments() {
|
||||
|
||||
unset IFS
|
||||
while [ $# -ne 0 ]; do
|
||||
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
# an RPATH to be added after the case statement.
|
||||
rp=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
# Multiple consecutive spaces in the command line can
|
||||
# result in blank arguments
|
||||
if [ -z "$1" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
# Same, but for -Xlinker -rpath -Xlinker /path
|
||||
xlinker_expect_rpath=no
|
||||
|
||||
while [ $# -ne 0 ]; do
|
||||
|
||||
# an RPATH to be added after the case statement.
|
||||
rp=""
|
||||
|
||||
# Multiple consecutive spaces in the command line can
|
||||
# result in blank arguments
|
||||
if [ -z "$1" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||
eval "\
|
||||
case \"\$1\" in
|
||||
$SPACK_COMPILER_FLAGS_KEEP)
|
||||
append return_other_args_list \"\$1\"
|
||||
shift
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
"
|
||||
fi
|
||||
# the replace list is a space-separated list of pipe-separated pairs,
|
||||
# the first in each pair is the original prefix to be matched, the
|
||||
# second is the replacement prefix
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||
before=${rep%|*}
|
||||
after=${rep#*|}
|
||||
eval "\
|
||||
stripped=\"\${1##$before}\"
|
||||
"
|
||||
if [ "$stripped" = "$1" ] ; then
|
||||
continue
|
||||
fi
|
||||
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||
eval "\
|
||||
case \"\$1\" in
|
||||
$SPACK_COMPILER_FLAGS_KEEP)
|
||||
append other_args_list \"\$1\"
|
||||
shift
|
||||
|
||||
if [ -z "$replaced" ] ; then
|
||||
# completely removed, continue OUTER loop
|
||||
continue 2
|
||||
fi
|
||||
|
||||
# re-build argument list with replacement
|
||||
set -- "$replaced" "$@"
|
||||
done
|
||||
fi
|
||||
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_lib_dirs_list "$arg"
|
||||
else
|
||||
append return_lib_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# library. Filter it out.
|
||||
# TODO: generalize filtering of args with an env var, so that
|
||||
# TODO: we do not have to special case this here.
|
||||
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
||||
&& [ "$1" != "${1#-loopopt}" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
arg="${1#-l}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append return_other_args_list "-l$arg"
|
||||
;;
|
||||
-Wl,*)
|
||||
IFS=,
|
||||
if ! parse_Wl ${1#-Wl,}; then
|
||||
append return_other_args_list "$1"
|
||||
fi
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
shift
|
||||
if [ $# -eq 0 ]; then
|
||||
# -Xlinker without value: let the compiler error about it.
|
||||
append return_other_args_list -Xlinker
|
||||
xlinker_expect_rpath=no
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list -Xlinker
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "$1"
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
"
|
||||
fi
|
||||
# the replace list is a space-separated list of pipe-separated pairs,
|
||||
# the first in each pair is the original prefix to be matched, the
|
||||
# second is the replacement prefix
|
||||
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||
before=${rep%|*}
|
||||
after=${rep#*|}
|
||||
eval "\
|
||||
stripped=\"\${1##$before}\"
|
||||
"
|
||||
if [ "$stripped" = "$1" ] ; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
||||
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
||||
# parsing.
|
||||
if [ "$xlinker_expect_rpath" = yes ]; then
|
||||
append return_other_args_list -Xlinker
|
||||
append return_other_args_list -rpath
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
shift
|
||||
|
||||
if [ -z "$replaced" ] ; then
|
||||
# completely removed, continue OUTER loop
|
||||
continue 2
|
||||
fi
|
||||
|
||||
# re-build argument list with replacement
|
||||
set -- "$replaced" "$@"
|
||||
done
|
||||
fi
|
||||
|
||||
# Same, but for -Wl flags.
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append return_other_args_list -Wl,-rpath
|
||||
fi
|
||||
}
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_include_dirs_list "$arg"
|
||||
else
|
||||
append include_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append system_lib_dirs_list "$arg"
|
||||
else
|
||||
append lib_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
# and passed by ifx to the linker, which confuses it with a
|
||||
# library. Filter it out.
|
||||
# TODO: generalize filtering of args with an env var, so that
|
||||
# TODO: we do not have to special case this here.
|
||||
if { [ "$mode" = "ccld" ] || [ $mode = "ld" ]; } \
|
||||
&& [ "$1" != "${1#-loopopt}" ]; then
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
arg="${1#-l}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append other_args_list "-l$arg"
|
||||
;;
|
||||
-Wl,*)
|
||||
IFS=,
|
||||
parse_Wl $1
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
shift
|
||||
if [ $# -eq 0 ]; then
|
||||
# -Xlinker without value: let the compiler error about it.
|
||||
append other_args_list -Xlinker
|
||||
xlinker_expect_rpath=no
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append system_rpath_dirs_list "$1"
|
||||
else
|
||||
append rpath_dirs_list "$1"
|
||||
fi
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append other_args_list -Xlinker
|
||||
append other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
categorize_arguments "$@"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
other_args_list="$return_other_args_list"
|
||||
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
||||
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
||||
# parsing.
|
||||
if [ "$xlinker_expect_rpath" = yes ]; then
|
||||
append other_args_list -Xlinker
|
||||
append other_args_list -rpath
|
||||
fi
|
||||
|
||||
# Same, but for -Wl flags.
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append other_args_list -Wl,-rpath
|
||||
fi
|
||||
|
||||
#
|
||||
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
||||
@@ -690,14 +664,12 @@ elif [ "$SPACK_ADD_DEBUG_FLAGS" = "custom" ]; then
|
||||
extend flags_list SPACK_DEBUG_FLAGS
|
||||
fi
|
||||
|
||||
spack_flags_list=""
|
||||
|
||||
# Fortran flags come before CPPFLAGS
|
||||
case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
F)
|
||||
extend spack_flags_list SPACK_FFLAGS
|
||||
extend flags_list SPACK_FFLAGS
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
@@ -706,7 +678,7 @@ esac
|
||||
# C preprocessor flags come before any C/CXX flags
|
||||
case "$mode" in
|
||||
cpp|as|cc|ccld)
|
||||
extend spack_flags_list SPACK_CPPFLAGS
|
||||
extend flags_list SPACK_CPPFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -716,10 +688,10 @@ case "$mode" in
|
||||
cc|ccld)
|
||||
case $lang_flags in
|
||||
C)
|
||||
extend spack_flags_list SPACK_CFLAGS
|
||||
extend flags_list SPACK_CFLAGS
|
||||
;;
|
||||
CXX)
|
||||
extend spack_flags_list SPACK_CXXFLAGS
|
||||
extend flags_list SPACK_CXXFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
@@ -731,25 +703,10 @@ esac
|
||||
# Linker flags
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
extend spack_flags_list SPACK_LDFLAGS
|
||||
extend flags_list SPACK_LDFLAGS
|
||||
;;
|
||||
esac
|
||||
|
||||
IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
|
||||
# On macOS insert headerpad_max_install_names linker flag
|
||||
if [ "$mode" = ld ] || [ "$mode" = ccld ]; then
|
||||
if [ "${SPACK_SHORT_SPEC#*darwin}" != "${SPACK_SHORT_SPEC}" ]; then
|
||||
@@ -775,8 +732,6 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
extend lib_dirs_list SPACK_LINK_DIRS
|
||||
fi
|
||||
|
||||
libs_list=""
|
||||
|
||||
# add RPATHs if we're in in any linking mode
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
@@ -805,16 +760,12 @@ args_list="$flags_list"
|
||||
|
||||
# Insert include directories just prior to any system include directories
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_flags_include_dirs_list "-I"
|
||||
extend args_list include_dirs_list "-I"
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
elif [ "$isystem_was_used" = "true" ]; then
|
||||
if [ "$isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
else
|
||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
||||
@@ -822,15 +773,11 @@ case "$mode" in
|
||||
;;
|
||||
esac
|
||||
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
# RPATHs arguments
|
||||
@@ -839,25 +786,20 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
ld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Other arguments from the input command
|
||||
extend args_list other_args_list
|
||||
extend args_list spack_flags_other_args_list
|
||||
|
||||
# Inject SPACK_LDLIBS, if supplied
|
||||
extend args_list libs_list "-l"
|
||||
@@ -913,4 +855,3 @@ fi
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
|
||||
|
||||
7
lib/spack/external/__init__.py
vendored
7
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.1 (commit df43a1834460bf94516136951c4729a3100603ec)
|
||||
* Version: 0.2.0-dev (commit f3667f95030c6573842fb5f6df0d647285597509)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
@@ -101,7 +101,10 @@
|
||||
* Usage: Used for config files. Ruamel is based on PyYAML but is more
|
||||
actively maintained and has more features, including round-tripping
|
||||
comments read from config files.
|
||||
* Version: 0.17.21
|
||||
* Version: 0.11.15 (last version supporting Python 2.6)
|
||||
* Note: This package has been slightly modified to improve Python 2.6
|
||||
compatibility -- some ``{}`` format strings were replaced, and the
|
||||
import for ``OrderedDict`` was tweaked.
|
||||
|
||||
six
|
||||
---
|
||||
|
||||
1
lib/spack/external/_vendoring/ruamel.pyi
vendored
1
lib/spack/external/_vendoring/ruamel.pyi
vendored
@@ -1 +0,0 @@
|
||||
from ruamel import *
|
||||
@@ -1,57 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Dict, Any # NOQA
|
||||
|
||||
_package_data = dict(
|
||||
full_package_name='ruamel.yaml',
|
||||
version_info=(0, 17, 21),
|
||||
__version__='0.17.21',
|
||||
version_timestamp='2022-02-12 09:49:22',
|
||||
author='Anthon van der Neut',
|
||||
author_email='a.van.der.neut@ruamel.eu',
|
||||
description='ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order', # NOQA
|
||||
entry_points=None,
|
||||
since=2014,
|
||||
extras_require={
|
||||
':platform_python_implementation=="CPython" and python_version<"3.11"': ['ruamel.yaml.clib>=0.2.6'], # NOQA
|
||||
'jinja2': ['ruamel.yaml.jinja2>=0.2'],
|
||||
'docs': ['ryd'],
|
||||
},
|
||||
classifiers=[
|
||||
'Programming Language :: Python :: 3 :: Only',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
'Programming Language :: Python :: 3.10',
|
||||
'Programming Language :: Python :: Implementation :: CPython',
|
||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||
'Topic :: Text Processing :: Markup',
|
||||
'Typing :: Typed',
|
||||
],
|
||||
keywords='yaml 1.2 parser round-trip preserve quotes order config',
|
||||
read_the_docs='yaml',
|
||||
supported=[(3, 5)], # minimum
|
||||
tox=dict(
|
||||
env='*f', # f for 3.5
|
||||
fl8excl='_test/lib',
|
||||
),
|
||||
# universal=True,
|
||||
python_requires='>=3',
|
||||
rtfd='yaml',
|
||||
) # type: Dict[Any, Any]
|
||||
|
||||
|
||||
version_info = _package_data['version_info']
|
||||
__version__ = _package_data['__version__']
|
||||
|
||||
try:
|
||||
from .cyaml import * # NOQA
|
||||
|
||||
__with_libyaml__ = True
|
||||
except (ImportError, ValueError): # for Jython
|
||||
__with_libyaml__ = False
|
||||
|
||||
from ruamel.yaml.main import * # NOQA
|
||||
@@ -1,20 +0,0 @@
|
||||
# coding: utf-8
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA
|
||||
|
||||
anchor_attrib = '_yaml_anchor'
|
||||
|
||||
|
||||
class Anchor:
|
||||
__slots__ = 'value', 'always_dump'
|
||||
attrib = anchor_attrib
|
||||
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
self.value = None
|
||||
self.always_dump = False
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> Any
|
||||
ad = ', (always dump)' if self.always_dump else ""
|
||||
return 'Anchor({!r}{})'.format(self.value, ad)
|
||||
1267
lib/spack/external/_vendoring/ruamel/yaml/comments.py
vendored
1267
lib/spack/external/_vendoring/ruamel/yaml/comments.py
vendored
File diff suppressed because it is too large
Load Diff
268
lib/spack/external/_vendoring/ruamel/yaml/compat.py
vendored
268
lib/spack/external/_vendoring/ruamel/yaml/compat.py
vendored
@@ -1,268 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
# partially from package six by Benjamin Peterson
|
||||
|
||||
import sys
|
||||
import os
|
||||
import io
|
||||
import traceback
|
||||
from abc import abstractmethod
|
||||
import collections.abc
|
||||
|
||||
|
||||
# fmt: off
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List, Union, BinaryIO, IO, Text, Tuple # NOQA
|
||||
from typing import Optional # NOQA
|
||||
# fmt: on
|
||||
|
||||
_DEFAULT_YAML_VERSION = (1, 2)
|
||||
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from ordereddict import OrderedDict # type: ignore
|
||||
|
||||
# to get the right name import ... as ordereddict doesn't do that
|
||||
|
||||
|
||||
class ordereddict(OrderedDict): # type: ignore
|
||||
if not hasattr(OrderedDict, 'insert'):
|
||||
|
||||
def insert(self, pos, key, value):
|
||||
# type: (int, Any, Any) -> None
|
||||
if pos >= len(self):
|
||||
self[key] = value
|
||||
return
|
||||
od = ordereddict()
|
||||
od.update(self)
|
||||
for k in od:
|
||||
del self[k]
|
||||
for index, old_key in enumerate(od):
|
||||
if pos == index:
|
||||
self[key] = value
|
||||
self[old_key] = od[old_key]
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
|
||||
# replace with f-strings when 3.5 support is dropped
|
||||
# ft = '42'
|
||||
# assert _F('abc {ft!r}', ft=ft) == 'abc %r' % ft
|
||||
# 'abc %r' % ft -> _F('abc {ft!r}' -> f'abc {ft!r}'
|
||||
def _F(s, *superfluous, **kw):
|
||||
# type: (Any, Any, Any) -> Any
|
||||
if superfluous:
|
||||
raise TypeError
|
||||
return s.format(**kw)
|
||||
|
||||
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
|
||||
if False: # MYPY
|
||||
# StreamType = Union[BinaryIO, IO[str], IO[unicode], StringIO]
|
||||
# StreamType = Union[BinaryIO, IO[str], StringIO] # type: ignore
|
||||
StreamType = Any
|
||||
|
||||
StreamTextType = StreamType # Union[Text, StreamType]
|
||||
VersionType = Union[List[int], str, Tuple[int, int]]
|
||||
|
||||
builtins_module = 'builtins'
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
# type: (Any, Any) -> Any
|
||||
"""Create a base class with a metaclass."""
|
||||
return meta('NewBase', bases, {})
|
||||
|
||||
|
||||
DBG_TOKEN = 1
|
||||
DBG_EVENT = 2
|
||||
DBG_NODE = 4
|
||||
|
||||
|
||||
_debug = None # type: Optional[int]
|
||||
if 'RUAMELDEBUG' in os.environ:
|
||||
_debugx = os.environ.get('RUAMELDEBUG')
|
||||
if _debugx is None:
|
||||
_debug = 0
|
||||
else:
|
||||
_debug = int(_debugx)
|
||||
|
||||
|
||||
if bool(_debug):
|
||||
|
||||
class ObjectCounter:
|
||||
def __init__(self):
|
||||
# type: () -> None
|
||||
self.map = {} # type: Dict[Any, Any]
|
||||
|
||||
def __call__(self, k):
|
||||
# type: (Any) -> None
|
||||
self.map[k] = self.map.get(k, 0) + 1
|
||||
|
||||
def dump(self):
|
||||
# type: () -> None
|
||||
for k in sorted(self.map):
|
||||
sys.stdout.write('{} -> {}'.format(k, self.map[k]))
|
||||
|
||||
object_counter = ObjectCounter()
|
||||
|
||||
|
||||
# used from yaml util when testing
|
||||
def dbg(val=None):
|
||||
# type: (Any) -> Any
|
||||
global _debug
|
||||
if _debug is None:
|
||||
# set to true or false
|
||||
_debugx = os.environ.get('YAMLDEBUG')
|
||||
if _debugx is None:
|
||||
_debug = 0
|
||||
else:
|
||||
_debug = int(_debugx)
|
||||
if val is None:
|
||||
return _debug
|
||||
return _debug & val
|
||||
|
||||
|
||||
class Nprint:
|
||||
def __init__(self, file_name=None):
|
||||
# type: (Any) -> None
|
||||
self._max_print = None # type: Any
|
||||
self._count = None # type: Any
|
||||
self._file_name = file_name
|
||||
|
||||
def __call__(self, *args, **kw):
|
||||
# type: (Any, Any) -> None
|
||||
if not bool(_debug):
|
||||
return
|
||||
out = sys.stdout if self._file_name is None else open(self._file_name, 'a')
|
||||
dbgprint = print # to fool checking for print statements by dv utility
|
||||
kw1 = kw.copy()
|
||||
kw1['file'] = out
|
||||
dbgprint(*args, **kw1)
|
||||
out.flush()
|
||||
if self._max_print is not None:
|
||||
if self._count is None:
|
||||
self._count = self._max_print
|
||||
self._count -= 1
|
||||
if self._count == 0:
|
||||
dbgprint('forced exit\n')
|
||||
traceback.print_stack()
|
||||
out.flush()
|
||||
sys.exit(0)
|
||||
if self._file_name:
|
||||
out.close()
|
||||
|
||||
def set_max_print(self, i):
|
||||
# type: (int) -> None
|
||||
self._max_print = i
|
||||
self._count = None
|
||||
|
||||
def fp(self, mode='a'):
|
||||
# type: (str) -> Any
|
||||
out = sys.stdout if self._file_name is None else open(self._file_name, mode)
|
||||
return out
|
||||
|
||||
|
||||
nprint = Nprint()
|
||||
nprintf = Nprint('/var/tmp/ruamel.yaml.log')
|
||||
|
||||
# char checkers following production rules
|
||||
|
||||
|
||||
def check_namespace_char(ch):
|
||||
# type: (Any) -> bool
|
||||
if '\x21' <= ch <= '\x7E': # ! to ~
|
||||
return True
|
||||
if '\xA0' <= ch <= '\uD7FF':
|
||||
return True
|
||||
if ('\uE000' <= ch <= '\uFFFD') and ch != '\uFEFF': # excl. byte order mark
|
||||
return True
|
||||
if '\U00010000' <= ch <= '\U0010FFFF':
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def check_anchorname_char(ch):
|
||||
# type: (Any) -> bool
|
||||
if ch in ',[]{}':
|
||||
return False
|
||||
return check_namespace_char(ch)
|
||||
|
||||
|
||||
def version_tnf(t1, t2=None):
|
||||
# type: (Any, Any) -> Any
|
||||
"""
|
||||
return True if ruamel.yaml version_info < t1, None if t2 is specified and bigger else False
|
||||
"""
|
||||
from ruamel.yaml import version_info # NOQA
|
||||
|
||||
if version_info < t1:
|
||||
return True
|
||||
if t2 is not None and version_info < t2:
|
||||
return None
|
||||
return False
|
||||
|
||||
|
||||
class MutableSliceableSequence(collections.abc.MutableSequence): # type: ignore
|
||||
__slots__ = ()
|
||||
|
||||
def __getitem__(self, index):
|
||||
# type: (Any) -> Any
|
||||
if not isinstance(index, slice):
|
||||
return self.__getsingleitem__(index)
|
||||
return type(self)([self[i] for i in range(*index.indices(len(self)))]) # type: ignore
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
# type: (Any, Any) -> None
|
||||
if not isinstance(index, slice):
|
||||
return self.__setsingleitem__(index, value)
|
||||
assert iter(value)
|
||||
# nprint(index.start, index.stop, index.step, index.indices(len(self)))
|
||||
if index.step is None:
|
||||
del self[index.start : index.stop]
|
||||
for elem in reversed(value):
|
||||
self.insert(0 if index.start is None else index.start, elem)
|
||||
else:
|
||||
range_parms = index.indices(len(self))
|
||||
nr_assigned_items = (range_parms[1] - range_parms[0] - 1) // range_parms[2] + 1
|
||||
# need to test before changing, in case TypeError is caught
|
||||
if nr_assigned_items < len(value):
|
||||
raise TypeError(
|
||||
'too many elements in value {} < {}'.format(nr_assigned_items, len(value))
|
||||
)
|
||||
elif nr_assigned_items > len(value):
|
||||
raise TypeError(
|
||||
'not enough elements in value {} > {}'.format(
|
||||
nr_assigned_items, len(value)
|
||||
)
|
||||
)
|
||||
for idx, i in enumerate(range(*range_parms)):
|
||||
self[i] = value[idx]
|
||||
|
||||
def __delitem__(self, index):
|
||||
# type: (Any) -> None
|
||||
if not isinstance(index, slice):
|
||||
return self.__delsingleitem__(index)
|
||||
# nprint(index.start, index.stop, index.step, index.indices(len(self)))
|
||||
for i in reversed(range(*index.indices(len(self)))):
|
||||
del self[i]
|
||||
|
||||
@abstractmethod
|
||||
def __getsingleitem__(self, index):
|
||||
# type: (Any) -> Any
|
||||
raise IndexError
|
||||
|
||||
@abstractmethod
|
||||
def __setsingleitem__(self, index, value):
|
||||
# type: (Any, Any) -> None
|
||||
raise IndexError
|
||||
|
||||
@abstractmethod
|
||||
def __delsingleitem__(self, index):
|
||||
# type: (Any) -> None
|
||||
raise IndexError
|
||||
@@ -1,243 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
import warnings
|
||||
|
||||
from ruamel.yaml.error import MarkedYAMLError, ReusedAnchorWarning
|
||||
from ruamel.yaml.compat import _F, nprint, nprintf # NOQA
|
||||
|
||||
from ruamel.yaml.events import (
|
||||
StreamStartEvent,
|
||||
StreamEndEvent,
|
||||
MappingStartEvent,
|
||||
MappingEndEvent,
|
||||
SequenceStartEvent,
|
||||
SequenceEndEvent,
|
||||
AliasEvent,
|
||||
ScalarEvent,
|
||||
)
|
||||
from ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List # NOQA
|
||||
|
||||
__all__ = ['Composer', 'ComposerError']
|
||||
|
||||
|
||||
class ComposerError(MarkedYAMLError):
|
||||
pass
|
||||
|
||||
|
||||
class Composer:
|
||||
def __init__(self, loader=None):
|
||||
# type: (Any) -> None
|
||||
self.loader = loader
|
||||
if self.loader is not None and getattr(self.loader, '_composer', None) is None:
|
||||
self.loader._composer = self
|
||||
self.anchors = {} # type: Dict[Any, Any]
|
||||
|
||||
@property
|
||||
def parser(self):
|
||||
# type: () -> Any
|
||||
if hasattr(self.loader, 'typ'):
|
||||
self.loader.parser
|
||||
return self.loader._parser
|
||||
|
||||
@property
|
||||
def resolver(self):
|
||||
# type: () -> Any
|
||||
# assert self.loader._resolver is not None
|
||||
if hasattr(self.loader, 'typ'):
|
||||
self.loader.resolver
|
||||
return self.loader._resolver
|
||||
|
||||
def check_node(self):
|
||||
# type: () -> Any
|
||||
# Drop the STREAM-START event.
|
||||
if self.parser.check_event(StreamStartEvent):
|
||||
self.parser.get_event()
|
||||
|
||||
# If there are more documents available?
|
||||
return not self.parser.check_event(StreamEndEvent)
|
||||
|
||||
def get_node(self):
|
||||
# type: () -> Any
|
||||
# Get the root node of the next document.
|
||||
if not self.parser.check_event(StreamEndEvent):
|
||||
return self.compose_document()
|
||||
|
||||
def get_single_node(self):
|
||||
# type: () -> Any
|
||||
# Drop the STREAM-START event.
|
||||
self.parser.get_event()
|
||||
|
||||
# Compose a document if the stream is not empty.
|
||||
document = None # type: Any
|
||||
if not self.parser.check_event(StreamEndEvent):
|
||||
document = self.compose_document()
|
||||
|
||||
# Ensure that the stream contains no more documents.
|
||||
if not self.parser.check_event(StreamEndEvent):
|
||||
event = self.parser.get_event()
|
||||
raise ComposerError(
|
||||
'expected a single document in the stream',
|
||||
document.start_mark,
|
||||
'but found another document',
|
||||
event.start_mark,
|
||||
)
|
||||
|
||||
# Drop the STREAM-END event.
|
||||
self.parser.get_event()
|
||||
|
||||
return document
|
||||
|
||||
def compose_document(self):
|
||||
# type: (Any) -> Any
|
||||
# Drop the DOCUMENT-START event.
|
||||
self.parser.get_event()
|
||||
|
||||
# Compose the root node.
|
||||
node = self.compose_node(None, None)
|
||||
|
||||
# Drop the DOCUMENT-END event.
|
||||
self.parser.get_event()
|
||||
|
||||
self.anchors = {}
|
||||
return node
|
||||
|
||||
def return_alias(self, a):
|
||||
# type: (Any) -> Any
|
||||
return a
|
||||
|
||||
def compose_node(self, parent, index):
|
||||
# type: (Any, Any) -> Any
|
||||
if self.parser.check_event(AliasEvent):
|
||||
event = self.parser.get_event()
|
||||
alias = event.anchor
|
||||
if alias not in self.anchors:
|
||||
raise ComposerError(
|
||||
None,
|
||||
None,
|
||||
_F('found undefined alias {alias!r}', alias=alias),
|
||||
event.start_mark,
|
||||
)
|
||||
return self.return_alias(self.anchors[alias])
|
||||
event = self.parser.peek_event()
|
||||
anchor = event.anchor
|
||||
if anchor is not None: # have an anchor
|
||||
if anchor in self.anchors:
|
||||
# raise ComposerError(
|
||||
# "found duplicate anchor %r; first occurrence"
|
||||
# % (anchor), self.anchors[anchor].start_mark,
|
||||
# "second occurrence", event.start_mark)
|
||||
ws = (
|
||||
'\nfound duplicate anchor {!r}\nfirst occurrence {}\nsecond occurrence '
|
||||
'{}'.format((anchor), self.anchors[anchor].start_mark, event.start_mark)
|
||||
)
|
||||
warnings.warn(ws, ReusedAnchorWarning)
|
||||
self.resolver.descend_resolver(parent, index)
|
||||
if self.parser.check_event(ScalarEvent):
|
||||
node = self.compose_scalar_node(anchor)
|
||||
elif self.parser.check_event(SequenceStartEvent):
|
||||
node = self.compose_sequence_node(anchor)
|
||||
elif self.parser.check_event(MappingStartEvent):
|
||||
node = self.compose_mapping_node(anchor)
|
||||
self.resolver.ascend_resolver()
|
||||
return node
|
||||
|
||||
def compose_scalar_node(self, anchor):
|
||||
# type: (Any) -> Any
|
||||
event = self.parser.get_event()
|
||||
tag = event.tag
|
||||
if tag is None or tag == '!':
|
||||
tag = self.resolver.resolve(ScalarNode, event.value, event.implicit)
|
||||
node = ScalarNode(
|
||||
tag,
|
||||
event.value,
|
||||
event.start_mark,
|
||||
event.end_mark,
|
||||
style=event.style,
|
||||
comment=event.comment,
|
||||
anchor=anchor,
|
||||
)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
return node
|
||||
|
||||
def compose_sequence_node(self, anchor):
|
||||
# type: (Any) -> Any
|
||||
start_event = self.parser.get_event()
|
||||
tag = start_event.tag
|
||||
if tag is None or tag == '!':
|
||||
tag = self.resolver.resolve(SequenceNode, None, start_event.implicit)
|
||||
node = SequenceNode(
|
||||
tag,
|
||||
[],
|
||||
start_event.start_mark,
|
||||
None,
|
||||
flow_style=start_event.flow_style,
|
||||
comment=start_event.comment,
|
||||
anchor=anchor,
|
||||
)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
index = 0
|
||||
while not self.parser.check_event(SequenceEndEvent):
|
||||
node.value.append(self.compose_node(node, index))
|
||||
index += 1
|
||||
end_event = self.parser.get_event()
|
||||
if node.flow_style is True and end_event.comment is not None:
|
||||
if node.comment is not None:
|
||||
nprint(
|
||||
'Warning: unexpected end_event commment in sequence '
|
||||
'node {}'.format(node.flow_style)
|
||||
)
|
||||
node.comment = end_event.comment
|
||||
node.end_mark = end_event.end_mark
|
||||
self.check_end_doc_comment(end_event, node)
|
||||
return node
|
||||
|
||||
def compose_mapping_node(self, anchor):
|
||||
# type: (Any) -> Any
|
||||
start_event = self.parser.get_event()
|
||||
tag = start_event.tag
|
||||
if tag is None or tag == '!':
|
||||
tag = self.resolver.resolve(MappingNode, None, start_event.implicit)
|
||||
node = MappingNode(
|
||||
tag,
|
||||
[],
|
||||
start_event.start_mark,
|
||||
None,
|
||||
flow_style=start_event.flow_style,
|
||||
comment=start_event.comment,
|
||||
anchor=anchor,
|
||||
)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
while not self.parser.check_event(MappingEndEvent):
|
||||
# key_event = self.parser.peek_event()
|
||||
item_key = self.compose_node(node, None)
|
||||
# if item_key in node.value:
|
||||
# raise ComposerError("while composing a mapping",
|
||||
# start_event.start_mark,
|
||||
# "found duplicate key", key_event.start_mark)
|
||||
item_value = self.compose_node(node, item_key)
|
||||
# node.value[item_key] = item_value
|
||||
node.value.append((item_key, item_value))
|
||||
end_event = self.parser.get_event()
|
||||
if node.flow_style is True and end_event.comment is not None:
|
||||
node.comment = end_event.comment
|
||||
node.end_mark = end_event.end_mark
|
||||
self.check_end_doc_comment(end_event, node)
|
||||
return node
|
||||
|
||||
def check_end_doc_comment(self, end_event, node):
|
||||
# type: (Any, Any) -> None
|
||||
if end_event.comment and end_event.comment[1]:
|
||||
# pre comments on an end_event, no following to move to
|
||||
if node.comment is None:
|
||||
node.comment = [None, None]
|
||||
assert not isinstance(node, ScalarEvent)
|
||||
# this is a post comment on a mapping node, add as third element
|
||||
# in the list
|
||||
node.comment.append(end_event.comment[1])
|
||||
end_event.comment[1] = None
|
||||
@@ -1,14 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
import warnings
|
||||
|
||||
from ruamel.yaml.util import configobj_walker as new_configobj_walker
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any # NOQA
|
||||
|
||||
|
||||
def configobj_walker(cfg):
|
||||
# type: (Any) -> Any
|
||||
warnings.warn('configobj_walker has moved to ruamel.yaml.util, please update your code')
|
||||
return new_configobj_walker(cfg)
|
||||
1845
lib/spack/external/_vendoring/ruamel/yaml/constructor.py
vendored
1845
lib/spack/external/_vendoring/ruamel/yaml/constructor.py
vendored
File diff suppressed because it is too large
Load Diff
183
lib/spack/external/_vendoring/ruamel/yaml/cyaml.py
vendored
183
lib/spack/external/_vendoring/ruamel/yaml/cyaml.py
vendored
@@ -1,183 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from _ruamel_yaml import CParser, CEmitter # type: ignore
|
||||
|
||||
from ruamel.yaml.constructor import Constructor, BaseConstructor, SafeConstructor
|
||||
from ruamel.yaml.representer import Representer, SafeRepresenter, BaseRepresenter
|
||||
from ruamel.yaml.resolver import Resolver, BaseResolver
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Union, Optional # NOQA
|
||||
from ruamel.yaml.compat import StreamTextType, StreamType, VersionType # NOQA
|
||||
|
||||
__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', 'CBaseDumper', 'CSafeDumper', 'CDumper']
|
||||
|
||||
|
||||
# this includes some hacks to solve the usage of resolver by lower level
|
||||
# parts of the parser
|
||||
|
||||
|
||||
class CBaseLoader(CParser, BaseConstructor, BaseResolver): # type: ignore
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
|
||||
CParser.__init__(self, stream)
|
||||
self._parser = self._composer = self
|
||||
BaseConstructor.__init__(self, loader=self)
|
||||
BaseResolver.__init__(self, loadumper=self)
|
||||
# self.descend_resolver = self._resolver.descend_resolver
|
||||
# self.ascend_resolver = self._resolver.ascend_resolver
|
||||
# self.resolve = self._resolver.resolve
|
||||
|
||||
|
||||
class CSafeLoader(CParser, SafeConstructor, Resolver): # type: ignore
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
|
||||
CParser.__init__(self, stream)
|
||||
self._parser = self._composer = self
|
||||
SafeConstructor.__init__(self, loader=self)
|
||||
Resolver.__init__(self, loadumper=self)
|
||||
# self.descend_resolver = self._resolver.descend_resolver
|
||||
# self.ascend_resolver = self._resolver.ascend_resolver
|
||||
# self.resolve = self._resolver.resolve
|
||||
|
||||
|
||||
class CLoader(CParser, Constructor, Resolver): # type: ignore
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
|
||||
CParser.__init__(self, stream)
|
||||
self._parser = self._composer = self
|
||||
Constructor.__init__(self, loader=self)
|
||||
Resolver.__init__(self, loadumper=self)
|
||||
# self.descend_resolver = self._resolver.descend_resolver
|
||||
# self.ascend_resolver = self._resolver.ascend_resolver
|
||||
# self.resolve = self._resolver.resolve
|
||||
|
||||
|
||||
class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): # type: ignore
|
||||
def __init__(
|
||||
self,
|
||||
stream,
|
||||
default_style=None,
|
||||
default_flow_style=None,
|
||||
canonical=None,
|
||||
indent=None,
|
||||
width=None,
|
||||
allow_unicode=None,
|
||||
line_break=None,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
block_seq_indent=None,
|
||||
top_level_colon_align=None,
|
||||
prefix_colon=None,
|
||||
):
|
||||
# type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
|
||||
CEmitter.__init__(
|
||||
self,
|
||||
stream,
|
||||
canonical=canonical,
|
||||
indent=indent,
|
||||
width=width,
|
||||
encoding=encoding,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version,
|
||||
tags=tags,
|
||||
)
|
||||
self._emitter = self._serializer = self._representer = self
|
||||
BaseRepresenter.__init__(
|
||||
self,
|
||||
default_style=default_style,
|
||||
default_flow_style=default_flow_style,
|
||||
dumper=self,
|
||||
)
|
||||
BaseResolver.__init__(self, loadumper=self)
|
||||
|
||||
|
||||
class CSafeDumper(CEmitter, SafeRepresenter, Resolver): # type: ignore
|
||||
def __init__(
|
||||
self,
|
||||
stream,
|
||||
default_style=None,
|
||||
default_flow_style=None,
|
||||
canonical=None,
|
||||
indent=None,
|
||||
width=None,
|
||||
allow_unicode=None,
|
||||
line_break=None,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
block_seq_indent=None,
|
||||
top_level_colon_align=None,
|
||||
prefix_colon=None,
|
||||
):
|
||||
# type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
|
||||
self._emitter = self._serializer = self._representer = self
|
||||
CEmitter.__init__(
|
||||
self,
|
||||
stream,
|
||||
canonical=canonical,
|
||||
indent=indent,
|
||||
width=width,
|
||||
encoding=encoding,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version,
|
||||
tags=tags,
|
||||
)
|
||||
self._emitter = self._serializer = self._representer = self
|
||||
SafeRepresenter.__init__(
|
||||
self, default_style=default_style, default_flow_style=default_flow_style
|
||||
)
|
||||
Resolver.__init__(self)
|
||||
|
||||
|
||||
class CDumper(CEmitter, Representer, Resolver): # type: ignore
|
||||
def __init__(
|
||||
self,
|
||||
stream,
|
||||
default_style=None,
|
||||
default_flow_style=None,
|
||||
canonical=None,
|
||||
indent=None,
|
||||
width=None,
|
||||
allow_unicode=None,
|
||||
line_break=None,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
block_seq_indent=None,
|
||||
top_level_colon_align=None,
|
||||
prefix_colon=None,
|
||||
):
|
||||
# type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
|
||||
CEmitter.__init__(
|
||||
self,
|
||||
stream,
|
||||
canonical=canonical,
|
||||
indent=indent,
|
||||
width=width,
|
||||
encoding=encoding,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version,
|
||||
tags=tags,
|
||||
)
|
||||
self._emitter = self._serializer = self._representer = self
|
||||
Representer.__init__(
|
||||
self, default_style=default_style, default_flow_style=default_flow_style
|
||||
)
|
||||
Resolver.__init__(self)
|
||||
219
lib/spack/external/_vendoring/ruamel/yaml/dumper.py
vendored
219
lib/spack/external/_vendoring/ruamel/yaml/dumper.py
vendored
@@ -1,219 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.emitter import Emitter
|
||||
from ruamel.yaml.serializer import Serializer
|
||||
from ruamel.yaml.representer import (
|
||||
Representer,
|
||||
SafeRepresenter,
|
||||
BaseRepresenter,
|
||||
RoundTripRepresenter,
|
||||
)
|
||||
from ruamel.yaml.resolver import Resolver, BaseResolver, VersionedResolver
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, List, Union, Optional # NOQA
|
||||
from ruamel.yaml.compat import StreamType, VersionType # NOQA
|
||||
|
||||
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper', 'RoundTripDumper']
|
||||
|
||||
|
||||
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
|
||||
def __init__(
|
||||
self,
|
||||
stream,
|
||||
default_style=None,
|
||||
default_flow_style=None,
|
||||
canonical=None,
|
||||
indent=None,
|
||||
width=None,
|
||||
allow_unicode=None,
|
||||
line_break=None,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
block_seq_indent=None,
|
||||
top_level_colon_align=None,
|
||||
prefix_colon=None,
|
||||
):
|
||||
# type: (Any, StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
|
||||
Emitter.__init__(
|
||||
self,
|
||||
stream,
|
||||
canonical=canonical,
|
||||
indent=indent,
|
||||
width=width,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
block_seq_indent=block_seq_indent,
|
||||
dumper=self,
|
||||
)
|
||||
Serializer.__init__(
|
||||
self,
|
||||
encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version,
|
||||
tags=tags,
|
||||
dumper=self,
|
||||
)
|
||||
BaseRepresenter.__init__(
|
||||
self,
|
||||
default_style=default_style,
|
||||
default_flow_style=default_flow_style,
|
||||
dumper=self,
|
||||
)
|
||||
BaseResolver.__init__(self, loadumper=self)
|
||||
|
||||
|
||||
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
|
||||
def __init__(
|
||||
self,
|
||||
stream,
|
||||
default_style=None,
|
||||
default_flow_style=None,
|
||||
canonical=None,
|
||||
indent=None,
|
||||
width=None,
|
||||
allow_unicode=None,
|
||||
line_break=None,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
block_seq_indent=None,
|
||||
top_level_colon_align=None,
|
||||
prefix_colon=None,
|
||||
):
|
||||
# type: (StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
|
||||
Emitter.__init__(
|
||||
self,
|
||||
stream,
|
||||
canonical=canonical,
|
||||
indent=indent,
|
||||
width=width,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
block_seq_indent=block_seq_indent,
|
||||
dumper=self,
|
||||
)
|
||||
Serializer.__init__(
|
||||
self,
|
||||
encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version,
|
||||
tags=tags,
|
||||
dumper=self,
|
||||
)
|
||||
SafeRepresenter.__init__(
|
||||
self,
|
||||
default_style=default_style,
|
||||
default_flow_style=default_flow_style,
|
||||
dumper=self,
|
||||
)
|
||||
Resolver.__init__(self, loadumper=self)
|
||||
|
||||
|
||||
class Dumper(Emitter, Serializer, Representer, Resolver):
|
||||
def __init__(
|
||||
self,
|
||||
stream,
|
||||
default_style=None,
|
||||
default_flow_style=None,
|
||||
canonical=None,
|
||||
indent=None,
|
||||
width=None,
|
||||
allow_unicode=None,
|
||||
line_break=None,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
block_seq_indent=None,
|
||||
top_level_colon_align=None,
|
||||
prefix_colon=None,
|
||||
):
|
||||
# type: (StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
|
||||
Emitter.__init__(
|
||||
self,
|
||||
stream,
|
||||
canonical=canonical,
|
||||
indent=indent,
|
||||
width=width,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
block_seq_indent=block_seq_indent,
|
||||
dumper=self,
|
||||
)
|
||||
Serializer.__init__(
|
||||
self,
|
||||
encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version,
|
||||
tags=tags,
|
||||
dumper=self,
|
||||
)
|
||||
Representer.__init__(
|
||||
self,
|
||||
default_style=default_style,
|
||||
default_flow_style=default_flow_style,
|
||||
dumper=self,
|
||||
)
|
||||
Resolver.__init__(self, loadumper=self)
|
||||
|
||||
|
||||
class RoundTripDumper(Emitter, Serializer, RoundTripRepresenter, VersionedResolver):
|
||||
def __init__(
|
||||
self,
|
||||
stream,
|
||||
default_style=None,
|
||||
default_flow_style=None,
|
||||
canonical=None,
|
||||
indent=None,
|
||||
width=None,
|
||||
allow_unicode=None,
|
||||
line_break=None,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
block_seq_indent=None,
|
||||
top_level_colon_align=None,
|
||||
prefix_colon=None,
|
||||
):
|
||||
# type: (StreamType, Any, Optional[bool], Optional[int], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
|
||||
Emitter.__init__(
|
||||
self,
|
||||
stream,
|
||||
canonical=canonical,
|
||||
indent=indent,
|
||||
width=width,
|
||||
allow_unicode=allow_unicode,
|
||||
line_break=line_break,
|
||||
block_seq_indent=block_seq_indent,
|
||||
top_level_colon_align=top_level_colon_align,
|
||||
prefix_colon=prefix_colon,
|
||||
dumper=self,
|
||||
)
|
||||
Serializer.__init__(
|
||||
self,
|
||||
encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version,
|
||||
tags=tags,
|
||||
dumper=self,
|
||||
)
|
||||
RoundTripRepresenter.__init__(
|
||||
self,
|
||||
default_style=default_style,
|
||||
default_flow_style=default_flow_style,
|
||||
dumper=self,
|
||||
)
|
||||
VersionedResolver.__init__(self, loader=self)
|
||||
1772
lib/spack/external/_vendoring/ruamel/yaml/emitter.py
vendored
1772
lib/spack/external/_vendoring/ruamel/yaml/emitter.py
vendored
File diff suppressed because it is too large
Load Diff
332
lib/spack/external/_vendoring/ruamel/yaml/error.py
vendored
332
lib/spack/external/_vendoring/ruamel/yaml/error.py
vendored
@@ -1,332 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
import warnings
|
||||
import textwrap
|
||||
|
||||
from ruamel.yaml.compat import _F
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List, Text # NOQA
|
||||
|
||||
|
||||
__all__ = [
|
||||
'FileMark',
|
||||
'StringMark',
|
||||
'CommentMark',
|
||||
'YAMLError',
|
||||
'MarkedYAMLError',
|
||||
'ReusedAnchorWarning',
|
||||
'UnsafeLoaderWarning',
|
||||
'MarkedYAMLWarning',
|
||||
'MarkedYAMLFutureWarning',
|
||||
]
|
||||
|
||||
|
||||
class StreamMark:
|
||||
__slots__ = 'name', 'index', 'line', 'column'
|
||||
|
||||
def __init__(self, name, index, line, column):
|
||||
# type: (Any, int, int, int) -> None
|
||||
self.name = name
|
||||
self.index = index
|
||||
self.line = line
|
||||
self.column = column
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> Any
|
||||
where = _F(
|
||||
' in "{sname!s}", line {sline1:d}, column {scolumn1:d}',
|
||||
sname=self.name,
|
||||
sline1=self.line + 1,
|
||||
scolumn1=self.column + 1,
|
||||
)
|
||||
return where
|
||||
|
||||
def __eq__(self, other):
|
||||
# type: (Any) -> bool
|
||||
if self.line != other.line or self.column != other.column:
|
||||
return False
|
||||
if self.name != other.name or self.index != other.index:
|
||||
return False
|
||||
return True
|
||||
|
||||
def __ne__(self, other):
|
||||
# type: (Any) -> bool
|
||||
return not self.__eq__(other)
|
||||
|
||||
|
||||
class FileMark(StreamMark):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class StringMark(StreamMark):
|
||||
__slots__ = 'name', 'index', 'line', 'column', 'buffer', 'pointer'
|
||||
|
||||
def __init__(self, name, index, line, column, buffer, pointer):
|
||||
# type: (Any, int, int, int, Any, Any) -> None
|
||||
StreamMark.__init__(self, name, index, line, column)
|
||||
self.buffer = buffer
|
||||
self.pointer = pointer
|
||||
|
||||
def get_snippet(self, indent=4, max_length=75):
|
||||
# type: (int, int) -> Any
|
||||
if self.buffer is None: # always False
|
||||
return None
|
||||
head = ""
|
||||
start = self.pointer
|
||||
while start > 0 and self.buffer[start - 1] not in '\0\r\n\x85\u2028\u2029':
|
||||
start -= 1
|
||||
if self.pointer - start > max_length / 2 - 1:
|
||||
head = ' ... '
|
||||
start += 5
|
||||
break
|
||||
tail = ""
|
||||
end = self.pointer
|
||||
while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029':
|
||||
end += 1
|
||||
if end - self.pointer > max_length / 2 - 1:
|
||||
tail = ' ... '
|
||||
end -= 5
|
||||
break
|
||||
snippet = self.buffer[start:end]
|
||||
caret = '^'
|
||||
caret = '^ (line: {})'.format(self.line + 1)
|
||||
return (
|
||||
' ' * indent
|
||||
+ head
|
||||
+ snippet
|
||||
+ tail
|
||||
+ '\n'
|
||||
+ ' ' * (indent + self.pointer - start + len(head))
|
||||
+ caret
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> Any
|
||||
snippet = self.get_snippet()
|
||||
where = _F(
|
||||
' in "{sname!s}", line {sline1:d}, column {scolumn1:d}',
|
||||
sname=self.name,
|
||||
sline1=self.line + 1,
|
||||
scolumn1=self.column + 1,
|
||||
)
|
||||
if snippet is not None:
|
||||
where += ':\n' + snippet
|
||||
return where
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> Any
|
||||
snippet = self.get_snippet()
|
||||
where = _F(
|
||||
' in "{sname!s}", line {sline1:d}, column {scolumn1:d}',
|
||||
sname=self.name,
|
||||
sline1=self.line + 1,
|
||||
scolumn1=self.column + 1,
|
||||
)
|
||||
if snippet is not None:
|
||||
where += ':\n' + snippet
|
||||
return where
|
||||
|
||||
|
||||
class CommentMark:
|
||||
__slots__ = ('column',)
|
||||
|
||||
def __init__(self, column):
|
||||
# type: (Any) -> None
|
||||
self.column = column
|
||||
|
||||
|
||||
class YAMLError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class MarkedYAMLError(YAMLError):
|
||||
def __init__(
|
||||
self,
|
||||
context=None,
|
||||
context_mark=None,
|
||||
problem=None,
|
||||
problem_mark=None,
|
||||
note=None,
|
||||
warn=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any) -> None
|
||||
self.context = context
|
||||
self.context_mark = context_mark
|
||||
self.problem = problem
|
||||
self.problem_mark = problem_mark
|
||||
self.note = note
|
||||
# warn is ignored
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> Any
|
||||
lines = [] # type: List[str]
|
||||
if self.context is not None:
|
||||
lines.append(self.context)
|
||||
if self.context_mark is not None and (
|
||||
self.problem is None
|
||||
or self.problem_mark is None
|
||||
or self.context_mark.name != self.problem_mark.name
|
||||
or self.context_mark.line != self.problem_mark.line
|
||||
or self.context_mark.column != self.problem_mark.column
|
||||
):
|
||||
lines.append(str(self.context_mark))
|
||||
if self.problem is not None:
|
||||
lines.append(self.problem)
|
||||
if self.problem_mark is not None:
|
||||
lines.append(str(self.problem_mark))
|
||||
if self.note is not None and self.note:
|
||||
note = textwrap.dedent(self.note)
|
||||
lines.append(note)
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
class YAMLStreamError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class YAMLWarning(Warning):
|
||||
pass
|
||||
|
||||
|
||||
class MarkedYAMLWarning(YAMLWarning):
|
||||
def __init__(
|
||||
self,
|
||||
context=None,
|
||||
context_mark=None,
|
||||
problem=None,
|
||||
problem_mark=None,
|
||||
note=None,
|
||||
warn=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any) -> None
|
||||
self.context = context
|
||||
self.context_mark = context_mark
|
||||
self.problem = problem
|
||||
self.problem_mark = problem_mark
|
||||
self.note = note
|
||||
self.warn = warn
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> Any
|
||||
lines = [] # type: List[str]
|
||||
if self.context is not None:
|
||||
lines.append(self.context)
|
||||
if self.context_mark is not None and (
|
||||
self.problem is None
|
||||
or self.problem_mark is None
|
||||
or self.context_mark.name != self.problem_mark.name
|
||||
or self.context_mark.line != self.problem_mark.line
|
||||
or self.context_mark.column != self.problem_mark.column
|
||||
):
|
||||
lines.append(str(self.context_mark))
|
||||
if self.problem is not None:
|
||||
lines.append(self.problem)
|
||||
if self.problem_mark is not None:
|
||||
lines.append(str(self.problem_mark))
|
||||
if self.note is not None and self.note:
|
||||
note = textwrap.dedent(self.note)
|
||||
lines.append(note)
|
||||
if self.warn is not None and self.warn:
|
||||
warn = textwrap.dedent(self.warn)
|
||||
lines.append(warn)
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
class ReusedAnchorWarning(YAMLWarning):
|
||||
pass
|
||||
|
||||
|
||||
class UnsafeLoaderWarning(YAMLWarning):
|
||||
text = """
|
||||
The default 'Loader' for 'load(stream)' without further arguments can be unsafe.
|
||||
Use 'load(stream, Loader=ruamel.yaml.Loader)' explicitly if that is OK.
|
||||
Alternatively include the following in your code:
|
||||
|
||||
import warnings
|
||||
warnings.simplefilter('ignore', ruamel.yaml.error.UnsafeLoaderWarning)
|
||||
|
||||
In most other cases you should consider using 'safe_load(stream)'"""
|
||||
pass
|
||||
|
||||
|
||||
warnings.simplefilter('once', UnsafeLoaderWarning)
|
||||
|
||||
|
||||
class MantissaNoDotYAML1_1Warning(YAMLWarning):
|
||||
def __init__(self, node, flt_str):
|
||||
# type: (Any, Any) -> None
|
||||
self.node = node
|
||||
self.flt = flt_str
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> Any
|
||||
line = self.node.start_mark.line
|
||||
col = self.node.start_mark.column
|
||||
return """
|
||||
In YAML 1.1 floating point values should have a dot ('.') in their mantissa.
|
||||
See the Floating-Point Language-Independent Type for YAML™ Version 1.1 specification
|
||||
( http://yaml.org/type/float.html ). This dot is not required for JSON nor for YAML 1.2
|
||||
|
||||
Correct your float: "{}" on line: {}, column: {}
|
||||
|
||||
or alternatively include the following in your code:
|
||||
|
||||
import warnings
|
||||
warnings.simplefilter('ignore', ruamel.yaml.error.MantissaNoDotYAML1_1Warning)
|
||||
|
||||
""".format(
|
||||
self.flt, line, col
|
||||
)
|
||||
|
||||
|
||||
warnings.simplefilter('once', MantissaNoDotYAML1_1Warning)
|
||||
|
||||
|
||||
class YAMLFutureWarning(Warning):
|
||||
pass
|
||||
|
||||
|
||||
class MarkedYAMLFutureWarning(YAMLFutureWarning):
|
||||
def __init__(
|
||||
self,
|
||||
context=None,
|
||||
context_mark=None,
|
||||
problem=None,
|
||||
problem_mark=None,
|
||||
note=None,
|
||||
warn=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any) -> None
|
||||
self.context = context
|
||||
self.context_mark = context_mark
|
||||
self.problem = problem
|
||||
self.problem_mark = problem_mark
|
||||
self.note = note
|
||||
self.warn = warn
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> Any
|
||||
lines = [] # type: List[str]
|
||||
if self.context is not None:
|
||||
lines.append(self.context)
|
||||
|
||||
if self.context_mark is not None and (
|
||||
self.problem is None
|
||||
or self.problem_mark is None
|
||||
or self.context_mark.name != self.problem_mark.name
|
||||
or self.context_mark.line != self.problem_mark.line
|
||||
or self.context_mark.column != self.problem_mark.column
|
||||
):
|
||||
lines.append(str(self.context_mark))
|
||||
if self.problem is not None:
|
||||
lines.append(self.problem)
|
||||
if self.problem_mark is not None:
|
||||
lines.append(str(self.problem_mark))
|
||||
if self.note is not None and self.note:
|
||||
note = textwrap.dedent(self.note)
|
||||
lines.append(note)
|
||||
if self.warn is not None and self.warn:
|
||||
warn = textwrap.dedent(self.warn)
|
||||
lines.append(warn)
|
||||
return '\n'.join(lines)
|
||||
196
lib/spack/external/_vendoring/ruamel/yaml/events.py
vendored
196
lib/spack/external/_vendoring/ruamel/yaml/events.py
vendored
@@ -1,196 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.compat import _F
|
||||
|
||||
# Abstract classes.
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List # NOQA
|
||||
|
||||
SHOW_LINES = False
|
||||
|
||||
|
||||
def CommentCheck():
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
|
||||
class Event:
|
||||
__slots__ = 'start_mark', 'end_mark', 'comment'
|
||||
|
||||
def __init__(self, start_mark=None, end_mark=None, comment=CommentCheck):
|
||||
# type: (Any, Any, Any) -> None
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
# assert comment is not CommentCheck
|
||||
if comment is CommentCheck:
|
||||
comment = None
|
||||
self.comment = comment
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> Any
|
||||
if True:
|
||||
arguments = []
|
||||
if hasattr(self, 'value'):
|
||||
# if you use repr(getattr(self, 'value')) then flake8 complains about
|
||||
# abuse of getattr with a constant. When you change to self.value
|
||||
# then mypy throws an error
|
||||
arguments.append(repr(self.value)) # type: ignore
|
||||
for key in ['anchor', 'tag', 'implicit', 'flow_style', 'style']:
|
||||
v = getattr(self, key, None)
|
||||
if v is not None:
|
||||
arguments.append(_F('{key!s}={v!r}', key=key, v=v))
|
||||
if self.comment not in [None, CommentCheck]:
|
||||
arguments.append('comment={!r}'.format(self.comment))
|
||||
if SHOW_LINES:
|
||||
arguments.append(
|
||||
'({}:{}/{}:{})'.format(
|
||||
self.start_mark.line,
|
||||
self.start_mark.column,
|
||||
self.end_mark.line,
|
||||
self.end_mark.column,
|
||||
)
|
||||
)
|
||||
arguments = ', '.join(arguments) # type: ignore
|
||||
else:
|
||||
attributes = [
|
||||
key
|
||||
for key in ['anchor', 'tag', 'implicit', 'value', 'flow_style', 'style']
|
||||
if hasattr(self, key)
|
||||
]
|
||||
arguments = ', '.join(
|
||||
[_F('{k!s}={attr!r}', k=key, attr=getattr(self, key)) for key in attributes]
|
||||
)
|
||||
if self.comment not in [None, CommentCheck]:
|
||||
arguments += ', comment={!r}'.format(self.comment)
|
||||
return _F(
|
||||
'{self_class_name!s}({arguments!s})',
|
||||
self_class_name=self.__class__.__name__,
|
||||
arguments=arguments,
|
||||
)
|
||||
|
||||
|
||||
class NodeEvent(Event):
|
||||
__slots__ = ('anchor',)
|
||||
|
||||
def __init__(self, anchor, start_mark=None, end_mark=None, comment=None):
|
||||
# type: (Any, Any, Any, Any) -> None
|
||||
Event.__init__(self, start_mark, end_mark, comment)
|
||||
self.anchor = anchor
|
||||
|
||||
|
||||
class CollectionStartEvent(NodeEvent):
|
||||
__slots__ = 'tag', 'implicit', 'flow_style', 'nr_items'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
anchor,
|
||||
tag,
|
||||
implicit,
|
||||
start_mark=None,
|
||||
end_mark=None,
|
||||
flow_style=None,
|
||||
comment=None,
|
||||
nr_items=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any, Any, Optional[int]) -> None
|
||||
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
|
||||
self.tag = tag
|
||||
self.implicit = implicit
|
||||
self.flow_style = flow_style
|
||||
self.nr_items = nr_items
|
||||
|
||||
|
||||
class CollectionEndEvent(Event):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
# Implementations.
|
||||
|
||||
|
||||
class StreamStartEvent(Event):
|
||||
__slots__ = ('encoding',)
|
||||
|
||||
def __init__(self, start_mark=None, end_mark=None, encoding=None, comment=None):
|
||||
# type: (Any, Any, Any, Any) -> None
|
||||
Event.__init__(self, start_mark, end_mark, comment)
|
||||
self.encoding = encoding
|
||||
|
||||
|
||||
class StreamEndEvent(Event):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class DocumentStartEvent(Event):
|
||||
__slots__ = 'explicit', 'version', 'tags'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
start_mark=None,
|
||||
end_mark=None,
|
||||
explicit=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
comment=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any) -> None
|
||||
Event.__init__(self, start_mark, end_mark, comment)
|
||||
self.explicit = explicit
|
||||
self.version = version
|
||||
self.tags = tags
|
||||
|
||||
|
||||
class DocumentEndEvent(Event):
|
||||
__slots__ = ('explicit',)
|
||||
|
||||
def __init__(self, start_mark=None, end_mark=None, explicit=None, comment=None):
|
||||
# type: (Any, Any, Any, Any) -> None
|
||||
Event.__init__(self, start_mark, end_mark, comment)
|
||||
self.explicit = explicit
|
||||
|
||||
|
||||
class AliasEvent(NodeEvent):
|
||||
__slots__ = 'style'
|
||||
|
||||
def __init__(self, anchor, start_mark=None, end_mark=None, style=None, comment=None):
|
||||
# type: (Any, Any, Any, Any, Any) -> None
|
||||
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
|
||||
self.style = style
|
||||
|
||||
|
||||
class ScalarEvent(NodeEvent):
|
||||
__slots__ = 'tag', 'implicit', 'value', 'style'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
anchor,
|
||||
tag,
|
||||
implicit,
|
||||
value,
|
||||
start_mark=None,
|
||||
end_mark=None,
|
||||
style=None,
|
||||
comment=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any, Any, Any) -> None
|
||||
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
|
||||
self.tag = tag
|
||||
self.implicit = implicit
|
||||
self.value = value
|
||||
self.style = style
|
||||
|
||||
|
||||
class SequenceStartEvent(CollectionStartEvent):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class SequenceEndEvent(CollectionEndEvent):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class MappingStartEvent(CollectionStartEvent):
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
class MappingEndEvent(CollectionEndEvent):
|
||||
__slots__ = ()
|
||||
@@ -1,75 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.reader import Reader
|
||||
from ruamel.yaml.scanner import Scanner, RoundTripScanner
|
||||
from ruamel.yaml.parser import Parser, RoundTripParser
|
||||
from ruamel.yaml.composer import Composer
|
||||
from ruamel.yaml.constructor import (
|
||||
BaseConstructor,
|
||||
SafeConstructor,
|
||||
Constructor,
|
||||
RoundTripConstructor,
|
||||
)
|
||||
from ruamel.yaml.resolver import VersionedResolver
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, List, Union, Optional # NOQA
|
||||
from ruamel.yaml.compat import StreamTextType, VersionType # NOQA
|
||||
|
||||
__all__ = ['BaseLoader', 'SafeLoader', 'Loader', 'RoundTripLoader']
|
||||
|
||||
|
||||
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, VersionedResolver):
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
|
||||
self.comment_handling = None
|
||||
Reader.__init__(self, stream, loader=self)
|
||||
Scanner.__init__(self, loader=self)
|
||||
Parser.__init__(self, loader=self)
|
||||
Composer.__init__(self, loader=self)
|
||||
BaseConstructor.__init__(self, loader=self)
|
||||
VersionedResolver.__init__(self, version, loader=self)
|
||||
|
||||
|
||||
class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, VersionedResolver):
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
|
||||
self.comment_handling = None
|
||||
Reader.__init__(self, stream, loader=self)
|
||||
Scanner.__init__(self, loader=self)
|
||||
Parser.__init__(self, loader=self)
|
||||
Composer.__init__(self, loader=self)
|
||||
SafeConstructor.__init__(self, loader=self)
|
||||
VersionedResolver.__init__(self, version, loader=self)
|
||||
|
||||
|
||||
class Loader(Reader, Scanner, Parser, Composer, Constructor, VersionedResolver):
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
|
||||
self.comment_handling = None
|
||||
Reader.__init__(self, stream, loader=self)
|
||||
Scanner.__init__(self, loader=self)
|
||||
Parser.__init__(self, loader=self)
|
||||
Composer.__init__(self, loader=self)
|
||||
Constructor.__init__(self, loader=self)
|
||||
VersionedResolver.__init__(self, version, loader=self)
|
||||
|
||||
|
||||
class RoundTripLoader(
|
||||
Reader,
|
||||
RoundTripScanner,
|
||||
RoundTripParser,
|
||||
Composer,
|
||||
RoundTripConstructor,
|
||||
VersionedResolver,
|
||||
):
|
||||
def __init__(self, stream, version=None, preserve_quotes=None):
|
||||
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
|
||||
# self.reader = Reader.__init__(self, stream)
|
||||
self.comment_handling = None # issue 385
|
||||
Reader.__init__(self, stream, loader=self)
|
||||
RoundTripScanner.__init__(self, loader=self)
|
||||
RoundTripParser.__init__(self, loader=self)
|
||||
Composer.__init__(self, loader=self)
|
||||
RoundTripConstructor.__init__(self, preserve_quotes=preserve_quotes, loader=self)
|
||||
VersionedResolver.__init__(self, version, loader=self)
|
||||
1667
lib/spack/external/_vendoring/ruamel/yaml/main.py
vendored
1667
lib/spack/external/_vendoring/ruamel/yaml/main.py
vendored
File diff suppressed because it is too large
Load Diff
135
lib/spack/external/_vendoring/ruamel/yaml/nodes.py
vendored
135
lib/spack/external/_vendoring/ruamel/yaml/nodes.py
vendored
@@ -1,135 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
import sys
|
||||
|
||||
from ruamel.yaml.compat import _F
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Dict, Any, Text # NOQA
|
||||
|
||||
|
||||
class Node:
|
||||
__slots__ = 'tag', 'value', 'start_mark', 'end_mark', 'comment', 'anchor'
|
||||
|
||||
def __init__(self, tag, value, start_mark, end_mark, comment=None, anchor=None):
|
||||
# type: (Any, Any, Any, Any, Any, Any) -> None
|
||||
self.tag = tag
|
||||
self.value = value
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
self.comment = comment
|
||||
self.anchor = anchor
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> Any
|
||||
value = self.value
|
||||
# if isinstance(value, list):
|
||||
# if len(value) == 0:
|
||||
# value = '<empty>'
|
||||
# elif len(value) == 1:
|
||||
# value = '<1 item>'
|
||||
# else:
|
||||
# value = f'<{len(value)} items>'
|
||||
# else:
|
||||
# if len(value) > 75:
|
||||
# value = repr(value[:70]+' ... ')
|
||||
# else:
|
||||
# value = repr(value)
|
||||
value = repr(value)
|
||||
return _F(
|
||||
'{class_name!s}(tag={self_tag!r}, value={value!s})',
|
||||
class_name=self.__class__.__name__,
|
||||
self_tag=self.tag,
|
||||
value=value,
|
||||
)
|
||||
|
||||
def dump(self, indent=0):
|
||||
# type: (int) -> None
|
||||
if isinstance(self.value, str):
|
||||
sys.stdout.write(
|
||||
'{}{}(tag={!r}, value={!r})\n'.format(
|
||||
' ' * indent, self.__class__.__name__, self.tag, self.value
|
||||
)
|
||||
)
|
||||
if self.comment:
|
||||
sys.stdout.write(' {}comment: {})\n'.format(' ' * indent, self.comment))
|
||||
return
|
||||
sys.stdout.write(
|
||||
'{}{}(tag={!r})\n'.format(' ' * indent, self.__class__.__name__, self.tag)
|
||||
)
|
||||
if self.comment:
|
||||
sys.stdout.write(' {}comment: {})\n'.format(' ' * indent, self.comment))
|
||||
for v in self.value:
|
||||
if isinstance(v, tuple):
|
||||
for v1 in v:
|
||||
v1.dump(indent + 1)
|
||||
elif isinstance(v, Node):
|
||||
v.dump(indent + 1)
|
||||
else:
|
||||
sys.stdout.write('Node value type? {}\n'.format(type(v)))
|
||||
|
||||
|
||||
class ScalarNode(Node):
|
||||
"""
|
||||
styles:
|
||||
? -> set() ? key, no value
|
||||
" -> double quoted
|
||||
' -> single quoted
|
||||
| -> literal style
|
||||
> -> folding style
|
||||
"""
|
||||
|
||||
__slots__ = ('style',)
|
||||
id = 'scalar'
|
||||
|
||||
def __init__(
|
||||
self, tag, value, start_mark=None, end_mark=None, style=None, comment=None, anchor=None
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any, Any) -> None
|
||||
Node.__init__(self, tag, value, start_mark, end_mark, comment=comment, anchor=anchor)
|
||||
self.style = style
|
||||
|
||||
|
||||
class CollectionNode(Node):
|
||||
__slots__ = ('flow_style',)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tag,
|
||||
value,
|
||||
start_mark=None,
|
||||
end_mark=None,
|
||||
flow_style=None,
|
||||
comment=None,
|
||||
anchor=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any, Any) -> None
|
||||
Node.__init__(self, tag, value, start_mark, end_mark, comment=comment)
|
||||
self.flow_style = flow_style
|
||||
self.anchor = anchor
|
||||
|
||||
|
||||
class SequenceNode(CollectionNode):
|
||||
__slots__ = ()
|
||||
id = 'sequence'
|
||||
|
||||
|
||||
class MappingNode(CollectionNode):
|
||||
__slots__ = ('merge',)
|
||||
id = 'mapping'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tag,
|
||||
value,
|
||||
start_mark=None,
|
||||
end_mark=None,
|
||||
flow_style=None,
|
||||
comment=None,
|
||||
anchor=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any, Any) -> None
|
||||
CollectionNode.__init__(
|
||||
self, tag, value, start_mark, end_mark, flow_style, comment, anchor
|
||||
)
|
||||
self.merge = None
|
||||
884
lib/spack/external/_vendoring/ruamel/yaml/parser.py
vendored
884
lib/spack/external/_vendoring/ruamel/yaml/parser.py
vendored
@@ -1,884 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
# The following YAML grammar is LL(1) and is parsed by a recursive descent
|
||||
# parser.
|
||||
#
|
||||
# stream ::= STREAM-START implicit_document? explicit_document*
|
||||
# STREAM-END
|
||||
# implicit_document ::= block_node DOCUMENT-END*
|
||||
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
|
||||
# block_node_or_indentless_sequence ::=
|
||||
# ALIAS
|
||||
# | properties (block_content |
|
||||
# indentless_block_sequence)?
|
||||
# | block_content
|
||||
# | indentless_block_sequence
|
||||
# block_node ::= ALIAS
|
||||
# | properties block_content?
|
||||
# | block_content
|
||||
# flow_node ::= ALIAS
|
||||
# | properties flow_content?
|
||||
# | flow_content
|
||||
# properties ::= TAG ANCHOR? | ANCHOR TAG?
|
||||
# block_content ::= block_collection | flow_collection | SCALAR
|
||||
# flow_content ::= flow_collection | SCALAR
|
||||
# block_collection ::= block_sequence | block_mapping
|
||||
# flow_collection ::= flow_sequence | flow_mapping
|
||||
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
|
||||
# BLOCK-END
|
||||
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
||||
# block_mapping ::= BLOCK-MAPPING_START
|
||||
# ((KEY block_node_or_indentless_sequence?)?
|
||||
# (VALUE block_node_or_indentless_sequence?)?)*
|
||||
# BLOCK-END
|
||||
# flow_sequence ::= FLOW-SEQUENCE-START
|
||||
# (flow_sequence_entry FLOW-ENTRY)*
|
||||
# flow_sequence_entry?
|
||||
# FLOW-SEQUENCE-END
|
||||
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
# flow_mapping ::= FLOW-MAPPING-START
|
||||
# (flow_mapping_entry FLOW-ENTRY)*
|
||||
# flow_mapping_entry?
|
||||
# FLOW-MAPPING-END
|
||||
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
#
|
||||
# FIRST sets:
|
||||
#
|
||||
# stream: { STREAM-START <}
|
||||
# explicit_document: { DIRECTIVE DOCUMENT-START }
|
||||
# implicit_document: FIRST(block_node)
|
||||
# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START
|
||||
# BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START
|
||||
# FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
||||
# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
||||
# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
|
||||
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# block_sequence: { BLOCK-SEQUENCE-START }
|
||||
# block_mapping: { BLOCK-MAPPING-START }
|
||||
# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR
|
||||
# BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START
|
||||
# FLOW-MAPPING-START BLOCK-ENTRY }
|
||||
# indentless_sequence: { ENTRY }
|
||||
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
||||
# flow_sequence: { FLOW-SEQUENCE-START }
|
||||
# flow_mapping: { FLOW-MAPPING-START }
|
||||
# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START
|
||||
# FLOW-MAPPING-START KEY }
|
||||
# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START
|
||||
# FLOW-MAPPING-START KEY }
|
||||
|
||||
# need to have full path with import, as pkg_resources tries to load parser.py in __init__.py
|
||||
# only to not do anything with the package afterwards
|
||||
# and for Jython too
|
||||
|
||||
|
||||
from ruamel.yaml.error import MarkedYAMLError
|
||||
from ruamel.yaml.tokens import * # NOQA
|
||||
from ruamel.yaml.events import * # NOQA
|
||||
from ruamel.yaml.scanner import Scanner, RoundTripScanner, ScannerError # NOQA
|
||||
from ruamel.yaml.scanner import BlankLineComment
|
||||
from ruamel.yaml.comments import C_PRE, C_POST, C_SPLIT_ON_FIRST_BLANK
|
||||
from ruamel.yaml.compat import _F, nprint, nprintf # NOQA
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List, Optional # NOQA
|
||||
|
||||
__all__ = ['Parser', 'RoundTripParser', 'ParserError']
|
||||
|
||||
|
||||
def xprintf(*args, **kw):
|
||||
# type: (Any, Any) -> Any
|
||||
return nprintf(*args, **kw)
|
||||
pass
|
||||
|
||||
|
||||
class ParserError(MarkedYAMLError):
|
||||
pass
|
||||
|
||||
|
||||
class Parser:
|
||||
# Since writing a recursive-descendant parser is a straightforward task, we
|
||||
# do not give many comments here.
|
||||
|
||||
DEFAULT_TAGS = {'!': '!', '!!': 'tag:yaml.org,2002:'}
|
||||
|
||||
def __init__(self, loader):
|
||||
# type: (Any) -> None
|
||||
self.loader = loader
|
||||
if self.loader is not None and getattr(self.loader, '_parser', None) is None:
|
||||
self.loader._parser = self
|
||||
self.reset_parser()
|
||||
|
||||
def reset_parser(self):
|
||||
# type: () -> None
|
||||
# Reset the state attributes (to clear self-references)
|
||||
self.current_event = self.last_event = None
|
||||
self.tag_handles = {} # type: Dict[Any, Any]
|
||||
self.states = [] # type: List[Any]
|
||||
self.marks = [] # type: List[Any]
|
||||
self.state = self.parse_stream_start # type: Any
|
||||
|
||||
def dispose(self):
|
||||
# type: () -> None
|
||||
self.reset_parser()
|
||||
|
||||
@property
|
||||
def scanner(self):
|
||||
# type: () -> Any
|
||||
if hasattr(self.loader, 'typ'):
|
||||
return self.loader.scanner
|
||||
return self.loader._scanner
|
||||
|
||||
@property
|
||||
def resolver(self):
|
||||
# type: () -> Any
|
||||
if hasattr(self.loader, 'typ'):
|
||||
return self.loader.resolver
|
||||
return self.loader._resolver
|
||||
|
||||
def check_event(self, *choices):
|
||||
# type: (Any) -> bool
|
||||
# Check the type of the next event.
|
||||
if self.current_event is None:
|
||||
if self.state:
|
||||
self.current_event = self.state()
|
||||
if self.current_event is not None:
|
||||
if not choices:
|
||||
return True
|
||||
for choice in choices:
|
||||
if isinstance(self.current_event, choice):
|
||||
return True
|
||||
return False
|
||||
|
||||
def peek_event(self):
|
||||
# type: () -> Any
|
||||
# Get the next event.
|
||||
if self.current_event is None:
|
||||
if self.state:
|
||||
self.current_event = self.state()
|
||||
return self.current_event
|
||||
|
||||
def get_event(self):
|
||||
# type: () -> Any
|
||||
# Get the next event and proceed further.
|
||||
if self.current_event is None:
|
||||
if self.state:
|
||||
self.current_event = self.state()
|
||||
# assert self.current_event is not None
|
||||
# if self.current_event.end_mark.line != self.peek_event().start_mark.line:
|
||||
xprintf('get_event', repr(self.current_event), self.peek_event().start_mark.line)
|
||||
self.last_event = value = self.current_event
|
||||
self.current_event = None
|
||||
return value
|
||||
|
||||
# stream ::= STREAM-START implicit_document? explicit_document*
|
||||
# STREAM-END
|
||||
# implicit_document ::= block_node DOCUMENT-END*
|
||||
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
|
||||
|
||||
def parse_stream_start(self):
|
||||
# type: () -> Any
|
||||
# Parse the stream start.
|
||||
token = self.scanner.get_token()
|
||||
self.move_token_comment(token)
|
||||
event = StreamStartEvent(token.start_mark, token.end_mark, encoding=token.encoding)
|
||||
|
||||
# Prepare the next state.
|
||||
self.state = self.parse_implicit_document_start
|
||||
|
||||
return event
|
||||
|
||||
def parse_implicit_document_start(self):
|
||||
# type: () -> Any
|
||||
# Parse an implicit document.
|
||||
if not self.scanner.check_token(DirectiveToken, DocumentStartToken, StreamEndToken):
|
||||
self.tag_handles = self.DEFAULT_TAGS
|
||||
token = self.scanner.peek_token()
|
||||
start_mark = end_mark = token.start_mark
|
||||
event = DocumentStartEvent(start_mark, end_mark, explicit=False)
|
||||
|
||||
# Prepare the next state.
|
||||
self.states.append(self.parse_document_end)
|
||||
self.state = self.parse_block_node
|
||||
|
||||
return event
|
||||
|
||||
else:
|
||||
return self.parse_document_start()
|
||||
|
||||
def parse_document_start(self):
|
||||
# type: () -> Any
|
||||
# Parse any extra document end indicators.
|
||||
while self.scanner.check_token(DocumentEndToken):
|
||||
self.scanner.get_token()
|
||||
# Parse an explicit document.
|
||||
if not self.scanner.check_token(StreamEndToken):
|
||||
version, tags = self.process_directives()
|
||||
if not self.scanner.check_token(DocumentStartToken):
|
||||
raise ParserError(
|
||||
None,
|
||||
None,
|
||||
_F(
|
||||
"expected '<document start>', but found {pt!r}",
|
||||
pt=self.scanner.peek_token().id,
|
||||
),
|
||||
self.scanner.peek_token().start_mark,
|
||||
)
|
||||
token = self.scanner.get_token()
|
||||
start_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
# if self.loader is not None and \
|
||||
# end_mark.line != self.scanner.peek_token().start_mark.line:
|
||||
# self.loader.scalar_after_indicator = False
|
||||
event = DocumentStartEvent(
|
||||
start_mark, end_mark, explicit=True, version=version, tags=tags,
|
||||
comment=token.comment
|
||||
) # type: Any
|
||||
self.states.append(self.parse_document_end)
|
||||
self.state = self.parse_document_content
|
||||
else:
|
||||
# Parse the end of the stream.
|
||||
token = self.scanner.get_token()
|
||||
event = StreamEndEvent(token.start_mark, token.end_mark, comment=token.comment)
|
||||
assert not self.states
|
||||
assert not self.marks
|
||||
self.state = None
|
||||
return event
|
||||
|
||||
def parse_document_end(self):
|
||||
# type: () -> Any
|
||||
# Parse the document end.
|
||||
token = self.scanner.peek_token()
|
||||
start_mark = end_mark = token.start_mark
|
||||
explicit = False
|
||||
if self.scanner.check_token(DocumentEndToken):
|
||||
token = self.scanner.get_token()
|
||||
end_mark = token.end_mark
|
||||
explicit = True
|
||||
event = DocumentEndEvent(start_mark, end_mark, explicit=explicit)
|
||||
|
||||
# Prepare the next state.
|
||||
if self.resolver.processing_version == (1, 1):
|
||||
self.state = self.parse_document_start
|
||||
else:
|
||||
self.state = self.parse_implicit_document_start
|
||||
|
||||
return event
|
||||
|
||||
def parse_document_content(self):
|
||||
# type: () -> Any
|
||||
if self.scanner.check_token(
|
||||
DirectiveToken, DocumentStartToken, DocumentEndToken, StreamEndToken
|
||||
):
|
||||
event = self.process_empty_scalar(self.scanner.peek_token().start_mark)
|
||||
self.state = self.states.pop()
|
||||
return event
|
||||
else:
|
||||
return self.parse_block_node()
|
||||
|
||||
def process_directives(self):
|
||||
# type: () -> Any
|
||||
yaml_version = None
|
||||
self.tag_handles = {}
|
||||
while self.scanner.check_token(DirectiveToken):
|
||||
token = self.scanner.get_token()
|
||||
if token.name == 'YAML':
|
||||
if yaml_version is not None:
|
||||
raise ParserError(
|
||||
None, None, 'found duplicate YAML directive', token.start_mark
|
||||
)
|
||||
major, minor = token.value
|
||||
if major != 1:
|
||||
raise ParserError(
|
||||
None,
|
||||
None,
|
||||
'found incompatible YAML document (version 1.* is required)',
|
||||
token.start_mark,
|
||||
)
|
||||
yaml_version = token.value
|
||||
elif token.name == 'TAG':
|
||||
handle, prefix = token.value
|
||||
if handle in self.tag_handles:
|
||||
raise ParserError(
|
||||
None,
|
||||
None,
|
||||
_F('duplicate tag handle {handle!r}', handle=handle),
|
||||
token.start_mark,
|
||||
)
|
||||
self.tag_handles[handle] = prefix
|
||||
if bool(self.tag_handles):
|
||||
value = yaml_version, self.tag_handles.copy() # type: Any
|
||||
else:
|
||||
value = yaml_version, None
|
||||
if self.loader is not None and hasattr(self.loader, 'tags'):
|
||||
self.loader.version = yaml_version
|
||||
if self.loader.tags is None:
|
||||
self.loader.tags = {}
|
||||
for k in self.tag_handles:
|
||||
self.loader.tags[k] = self.tag_handles[k]
|
||||
for key in self.DEFAULT_TAGS:
|
||||
if key not in self.tag_handles:
|
||||
self.tag_handles[key] = self.DEFAULT_TAGS[key]
|
||||
return value
|
||||
|
||||
# block_node_or_indentless_sequence ::= ALIAS
|
||||
# | properties (block_content | indentless_block_sequence)?
|
||||
# | block_content
|
||||
# | indentless_block_sequence
|
||||
# block_node ::= ALIAS
|
||||
# | properties block_content?
|
||||
# | block_content
|
||||
# flow_node ::= ALIAS
|
||||
# | properties flow_content?
|
||||
# | flow_content
|
||||
# properties ::= TAG ANCHOR? | ANCHOR TAG?
|
||||
# block_content ::= block_collection | flow_collection | SCALAR
|
||||
# flow_content ::= flow_collection | SCALAR
|
||||
# block_collection ::= block_sequence | block_mapping
|
||||
# flow_collection ::= flow_sequence | flow_mapping
|
||||
|
||||
def parse_block_node(self):
|
||||
# type: () -> Any
|
||||
return self.parse_node(block=True)
|
||||
|
||||
def parse_flow_node(self):
|
||||
# type: () -> Any
|
||||
return self.parse_node()
|
||||
|
||||
def parse_block_node_or_indentless_sequence(self):
|
||||
# type: () -> Any
|
||||
return self.parse_node(block=True, indentless_sequence=True)
|
||||
|
||||
def transform_tag(self, handle, suffix):
|
||||
# type: (Any, Any) -> Any
|
||||
return self.tag_handles[handle] + suffix
|
||||
|
||||
def parse_node(self, block=False, indentless_sequence=False):
|
||||
# type: (bool, bool) -> Any
|
||||
if self.scanner.check_token(AliasToken):
|
||||
token = self.scanner.get_token()
|
||||
event = AliasEvent(token.value, token.start_mark, token.end_mark) # type: Any
|
||||
self.state = self.states.pop()
|
||||
return event
|
||||
|
||||
anchor = None
|
||||
tag = None
|
||||
start_mark = end_mark = tag_mark = None
|
||||
if self.scanner.check_token(AnchorToken):
|
||||
token = self.scanner.get_token()
|
||||
self.move_token_comment(token)
|
||||
start_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
anchor = token.value
|
||||
if self.scanner.check_token(TagToken):
|
||||
token = self.scanner.get_token()
|
||||
tag_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
tag = token.value
|
||||
elif self.scanner.check_token(TagToken):
|
||||
token = self.scanner.get_token()
|
||||
start_mark = tag_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
tag = token.value
|
||||
if self.scanner.check_token(AnchorToken):
|
||||
token = self.scanner.get_token()
|
||||
start_mark = tag_mark = token.start_mark
|
||||
end_mark = token.end_mark
|
||||
anchor = token.value
|
||||
if tag is not None:
|
||||
handle, suffix = tag
|
||||
if handle is not None:
|
||||
if handle not in self.tag_handles:
|
||||
raise ParserError(
|
||||
'while parsing a node',
|
||||
start_mark,
|
||||
_F('found undefined tag handle {handle!r}', handle=handle),
|
||||
tag_mark,
|
||||
)
|
||||
tag = self.transform_tag(handle, suffix)
|
||||
else:
|
||||
tag = suffix
|
||||
# if tag == '!':
|
||||
# raise ParserError("while parsing a node", start_mark,
|
||||
# "found non-specific tag '!'", tag_mark,
|
||||
# "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag'
|
||||
# and share your opinion.")
|
||||
if start_mark is None:
|
||||
start_mark = end_mark = self.scanner.peek_token().start_mark
|
||||
event = None
|
||||
implicit = tag is None or tag == '!'
|
||||
if indentless_sequence and self.scanner.check_token(BlockEntryToken):
|
||||
comment = None
|
||||
pt = self.scanner.peek_token()
|
||||
if self.loader and self.loader.comment_handling is None:
|
||||
if pt.comment and pt.comment[0]:
|
||||
comment = [pt.comment[0], []]
|
||||
pt.comment[0] = None
|
||||
elif self.loader:
|
||||
if pt.comment:
|
||||
comment = pt.comment
|
||||
end_mark = self.scanner.peek_token().end_mark
|
||||
event = SequenceStartEvent(
|
||||
anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
|
||||
)
|
||||
self.state = self.parse_indentless_sequence_entry
|
||||
return event
|
||||
|
||||
if self.scanner.check_token(ScalarToken):
|
||||
token = self.scanner.get_token()
|
||||
# self.scanner.peek_token_same_line_comment(token)
|
||||
end_mark = token.end_mark
|
||||
if (token.plain and tag is None) or tag == '!':
|
||||
implicit = (True, False)
|
||||
elif tag is None:
|
||||
implicit = (False, True)
|
||||
else:
|
||||
implicit = (False, False)
|
||||
# nprint('se', token.value, token.comment)
|
||||
event = ScalarEvent(
|
||||
anchor,
|
||||
tag,
|
||||
implicit,
|
||||
token.value,
|
||||
start_mark,
|
||||
end_mark,
|
||||
style=token.style,
|
||||
comment=token.comment,
|
||||
)
|
||||
self.state = self.states.pop()
|
||||
elif self.scanner.check_token(FlowSequenceStartToken):
|
||||
pt = self.scanner.peek_token()
|
||||
end_mark = pt.end_mark
|
||||
event = SequenceStartEvent(
|
||||
anchor,
|
||||
tag,
|
||||
implicit,
|
||||
start_mark,
|
||||
end_mark,
|
||||
flow_style=True,
|
||||
comment=pt.comment,
|
||||
)
|
||||
self.state = self.parse_flow_sequence_first_entry
|
||||
elif self.scanner.check_token(FlowMappingStartToken):
|
||||
pt = self.scanner.peek_token()
|
||||
end_mark = pt.end_mark
|
||||
event = MappingStartEvent(
|
||||
anchor,
|
||||
tag,
|
||||
implicit,
|
||||
start_mark,
|
||||
end_mark,
|
||||
flow_style=True,
|
||||
comment=pt.comment,
|
||||
)
|
||||
self.state = self.parse_flow_mapping_first_key
|
||||
elif block and self.scanner.check_token(BlockSequenceStartToken):
|
||||
end_mark = self.scanner.peek_token().start_mark
|
||||
# should inserting the comment be dependent on the
|
||||
# indentation?
|
||||
pt = self.scanner.peek_token()
|
||||
comment = pt.comment
|
||||
# nprint('pt0', type(pt))
|
||||
if comment is None or comment[1] is None:
|
||||
comment = pt.split_old_comment()
|
||||
# nprint('pt1', comment)
|
||||
event = SequenceStartEvent(
|
||||
anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
|
||||
)
|
||||
self.state = self.parse_block_sequence_first_entry
|
||||
elif block and self.scanner.check_token(BlockMappingStartToken):
|
||||
end_mark = self.scanner.peek_token().start_mark
|
||||
comment = self.scanner.peek_token().comment
|
||||
event = MappingStartEvent(
|
||||
anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
|
||||
)
|
||||
self.state = self.parse_block_mapping_first_key
|
||||
elif anchor is not None or tag is not None:
|
||||
# Empty scalars are allowed even if a tag or an anchor is
|
||||
# specified.
|
||||
event = ScalarEvent(anchor, tag, (implicit, False), "", start_mark, end_mark)
|
||||
self.state = self.states.pop()
|
||||
else:
|
||||
if block:
|
||||
node = 'block'
|
||||
else:
|
||||
node = 'flow'
|
||||
token = self.scanner.peek_token()
|
||||
raise ParserError(
|
||||
_F('while parsing a {node!s} node', node=node),
|
||||
start_mark,
|
||||
_F('expected the node content, but found {token_id!r}', token_id=token.id),
|
||||
token.start_mark,
|
||||
)
|
||||
return event
|
||||
|
||||
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
|
||||
# BLOCK-END
|
||||
|
||||
def parse_block_sequence_first_entry(self):
|
||||
# type: () -> Any
|
||||
token = self.scanner.get_token()
|
||||
# move any comment from start token
|
||||
# self.move_token_comment(token)
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_block_sequence_entry()
|
||||
|
||||
def parse_block_sequence_entry(self):
|
||||
# type: () -> Any
|
||||
if self.scanner.check_token(BlockEntryToken):
|
||||
token = self.scanner.get_token()
|
||||
self.move_token_comment(token)
|
||||
if not self.scanner.check_token(BlockEntryToken, BlockEndToken):
|
||||
self.states.append(self.parse_block_sequence_entry)
|
||||
return self.parse_block_node()
|
||||
else:
|
||||
self.state = self.parse_block_sequence_entry
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
if not self.scanner.check_token(BlockEndToken):
|
||||
token = self.scanner.peek_token()
|
||||
raise ParserError(
|
||||
'while parsing a block collection',
|
||||
self.marks[-1],
|
||||
_F('expected <block end>, but found {token_id!r}', token_id=token.id),
|
||||
token.start_mark,
|
||||
)
|
||||
token = self.scanner.get_token() # BlockEndToken
|
||||
event = SequenceEndEvent(token.start_mark, token.end_mark, comment=token.comment)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
|
||||
|
||||
# indentless_sequence?
|
||||
# sequence:
|
||||
# - entry
|
||||
# - nested
|
||||
|
||||
def parse_indentless_sequence_entry(self):
|
||||
# type: () -> Any
|
||||
if self.scanner.check_token(BlockEntryToken):
|
||||
token = self.scanner.get_token()
|
||||
self.move_token_comment(token)
|
||||
if not self.scanner.check_token(
|
||||
BlockEntryToken, KeyToken, ValueToken, BlockEndToken
|
||||
):
|
||||
self.states.append(self.parse_indentless_sequence_entry)
|
||||
return self.parse_block_node()
|
||||
else:
|
||||
self.state = self.parse_indentless_sequence_entry
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
token = self.scanner.peek_token()
|
||||
c = None
|
||||
if self.loader and self.loader.comment_handling is None:
|
||||
c = token.comment
|
||||
start_mark = token.start_mark
|
||||
else:
|
||||
start_mark = self.last_event.end_mark # type: ignore
|
||||
c = self.distribute_comment(token.comment, start_mark.line) # type: ignore
|
||||
event = SequenceEndEvent(start_mark, start_mark, comment=c)
|
||||
self.state = self.states.pop()
|
||||
return event
|
||||
|
||||
# block_mapping ::= BLOCK-MAPPING_START
|
||||
# ((KEY block_node_or_indentless_sequence?)?
|
||||
# (VALUE block_node_or_indentless_sequence?)?)*
|
||||
# BLOCK-END
|
||||
|
||||
def parse_block_mapping_first_key(self):
|
||||
# type: () -> Any
|
||||
token = self.scanner.get_token()
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_block_mapping_key()
|
||||
|
||||
def parse_block_mapping_key(self):
|
||||
# type: () -> Any
|
||||
if self.scanner.check_token(KeyToken):
|
||||
token = self.scanner.get_token()
|
||||
self.move_token_comment(token)
|
||||
if not self.scanner.check_token(KeyToken, ValueToken, BlockEndToken):
|
||||
self.states.append(self.parse_block_mapping_value)
|
||||
return self.parse_block_node_or_indentless_sequence()
|
||||
else:
|
||||
self.state = self.parse_block_mapping_value
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
if self.resolver.processing_version > (1, 1) and self.scanner.check_token(ValueToken):
|
||||
self.state = self.parse_block_mapping_value
|
||||
return self.process_empty_scalar(self.scanner.peek_token().start_mark)
|
||||
if not self.scanner.check_token(BlockEndToken):
|
||||
token = self.scanner.peek_token()
|
||||
raise ParserError(
|
||||
'while parsing a block mapping',
|
||||
self.marks[-1],
|
||||
_F('expected <block end>, but found {token_id!r}', token_id=token.id),
|
||||
token.start_mark,
|
||||
)
|
||||
token = self.scanner.get_token()
|
||||
self.move_token_comment(token)
|
||||
event = MappingEndEvent(token.start_mark, token.end_mark, comment=token.comment)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
def parse_block_mapping_value(self):
|
||||
# type: () -> Any
|
||||
if self.scanner.check_token(ValueToken):
|
||||
token = self.scanner.get_token()
|
||||
# value token might have post comment move it to e.g. block
|
||||
if self.scanner.check_token(ValueToken):
|
||||
self.move_token_comment(token)
|
||||
else:
|
||||
if not self.scanner.check_token(KeyToken):
|
||||
self.move_token_comment(token, empty=True)
|
||||
# else: empty value for this key cannot move token.comment
|
||||
if not self.scanner.check_token(KeyToken, ValueToken, BlockEndToken):
|
||||
self.states.append(self.parse_block_mapping_key)
|
||||
return self.parse_block_node_or_indentless_sequence()
|
||||
else:
|
||||
self.state = self.parse_block_mapping_key
|
||||
comment = token.comment
|
||||
if comment is None:
|
||||
token = self.scanner.peek_token()
|
||||
comment = token.comment
|
||||
if comment:
|
||||
token._comment = [None, comment[1]]
|
||||
comment = [comment[0], None]
|
||||
return self.process_empty_scalar(token.end_mark, comment=comment)
|
||||
else:
|
||||
self.state = self.parse_block_mapping_key
|
||||
token = self.scanner.peek_token()
|
||||
return self.process_empty_scalar(token.start_mark)
|
||||
|
||||
# flow_sequence ::= FLOW-SEQUENCE-START
|
||||
# (flow_sequence_entry FLOW-ENTRY)*
|
||||
# flow_sequence_entry?
|
||||
# FLOW-SEQUENCE-END
|
||||
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
#
|
||||
# Note that while production rules for both flow_sequence_entry and
|
||||
# flow_mapping_entry are equal, their interpretations are different.
|
||||
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
|
||||
# generate an inline mapping (set syntax).
|
||||
|
||||
def parse_flow_sequence_first_entry(self):
|
||||
# type: () -> Any
|
||||
token = self.scanner.get_token()
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_flow_sequence_entry(first=True)
|
||||
|
||||
def parse_flow_sequence_entry(self, first=False):
|
||||
# type: (bool) -> Any
|
||||
if not self.scanner.check_token(FlowSequenceEndToken):
|
||||
if not first:
|
||||
if self.scanner.check_token(FlowEntryToken):
|
||||
self.scanner.get_token()
|
||||
else:
|
||||
token = self.scanner.peek_token()
|
||||
raise ParserError(
|
||||
'while parsing a flow sequence',
|
||||
self.marks[-1],
|
||||
_F("expected ',' or ']', but got {token_id!r}", token_id=token.id),
|
||||
token.start_mark,
|
||||
)
|
||||
|
||||
if self.scanner.check_token(KeyToken):
|
||||
token = self.scanner.peek_token()
|
||||
event = MappingStartEvent(
|
||||
None, None, True, token.start_mark, token.end_mark, flow_style=True
|
||||
) # type: Any
|
||||
self.state = self.parse_flow_sequence_entry_mapping_key
|
||||
return event
|
||||
elif not self.scanner.check_token(FlowSequenceEndToken):
|
||||
self.states.append(self.parse_flow_sequence_entry)
|
||||
return self.parse_flow_node()
|
||||
token = self.scanner.get_token()
|
||||
event = SequenceEndEvent(token.start_mark, token.end_mark, comment=token.comment)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
def parse_flow_sequence_entry_mapping_key(self):
|
||||
# type: () -> Any
|
||||
token = self.scanner.get_token()
|
||||
if not self.scanner.check_token(ValueToken, FlowEntryToken, FlowSequenceEndToken):
|
||||
self.states.append(self.parse_flow_sequence_entry_mapping_value)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_sequence_entry_mapping_value
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
|
||||
def parse_flow_sequence_entry_mapping_value(self):
|
||||
# type: () -> Any
|
||||
if self.scanner.check_token(ValueToken):
|
||||
token = self.scanner.get_token()
|
||||
if not self.scanner.check_token(FlowEntryToken, FlowSequenceEndToken):
|
||||
self.states.append(self.parse_flow_sequence_entry_mapping_end)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_sequence_entry_mapping_end
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
else:
|
||||
self.state = self.parse_flow_sequence_entry_mapping_end
|
||||
token = self.scanner.peek_token()
|
||||
return self.process_empty_scalar(token.start_mark)
|
||||
|
||||
def parse_flow_sequence_entry_mapping_end(self):
|
||||
# type: () -> Any
|
||||
self.state = self.parse_flow_sequence_entry
|
||||
token = self.scanner.peek_token()
|
||||
return MappingEndEvent(token.start_mark, token.start_mark)
|
||||
|
||||
# flow_mapping ::= FLOW-MAPPING-START
|
||||
# (flow_mapping_entry FLOW-ENTRY)*
|
||||
# flow_mapping_entry?
|
||||
# FLOW-MAPPING-END
|
||||
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
||||
|
||||
def parse_flow_mapping_first_key(self):
|
||||
# type: () -> Any
|
||||
token = self.scanner.get_token()
|
||||
self.marks.append(token.start_mark)
|
||||
return self.parse_flow_mapping_key(first=True)
|
||||
|
||||
def parse_flow_mapping_key(self, first=False):
|
||||
# type: (Any) -> Any
|
||||
if not self.scanner.check_token(FlowMappingEndToken):
|
||||
if not first:
|
||||
if self.scanner.check_token(FlowEntryToken):
|
||||
self.scanner.get_token()
|
||||
else:
|
||||
token = self.scanner.peek_token()
|
||||
raise ParserError(
|
||||
'while parsing a flow mapping',
|
||||
self.marks[-1],
|
||||
_F("expected ',' or '}}', but got {token_id!r}", token_id=token.id),
|
||||
token.start_mark,
|
||||
)
|
||||
if self.scanner.check_token(KeyToken):
|
||||
token = self.scanner.get_token()
|
||||
if not self.scanner.check_token(
|
||||
ValueToken, FlowEntryToken, FlowMappingEndToken
|
||||
):
|
||||
self.states.append(self.parse_flow_mapping_value)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_mapping_value
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
elif self.resolver.processing_version > (1, 1) and self.scanner.check_token(
|
||||
ValueToken
|
||||
):
|
||||
self.state = self.parse_flow_mapping_value
|
||||
return self.process_empty_scalar(self.scanner.peek_token().end_mark)
|
||||
elif not self.scanner.check_token(FlowMappingEndToken):
|
||||
self.states.append(self.parse_flow_mapping_empty_value)
|
||||
return self.parse_flow_node()
|
||||
token = self.scanner.get_token()
|
||||
event = MappingEndEvent(token.start_mark, token.end_mark, comment=token.comment)
|
||||
self.state = self.states.pop()
|
||||
self.marks.pop()
|
||||
return event
|
||||
|
||||
def parse_flow_mapping_value(self):
|
||||
# type: () -> Any
|
||||
if self.scanner.check_token(ValueToken):
|
||||
token = self.scanner.get_token()
|
||||
if not self.scanner.check_token(FlowEntryToken, FlowMappingEndToken):
|
||||
self.states.append(self.parse_flow_mapping_key)
|
||||
return self.parse_flow_node()
|
||||
else:
|
||||
self.state = self.parse_flow_mapping_key
|
||||
return self.process_empty_scalar(token.end_mark)
|
||||
else:
|
||||
self.state = self.parse_flow_mapping_key
|
||||
token = self.scanner.peek_token()
|
||||
return self.process_empty_scalar(token.start_mark)
|
||||
|
||||
def parse_flow_mapping_empty_value(self):
|
||||
# type: () -> Any
|
||||
self.state = self.parse_flow_mapping_key
|
||||
return self.process_empty_scalar(self.scanner.peek_token().start_mark)
|
||||
|
||||
def process_empty_scalar(self, mark, comment=None):
|
||||
# type: (Any, Any) -> Any
|
||||
return ScalarEvent(None, None, (True, False), "", mark, mark, comment=comment)
|
||||
|
||||
def move_token_comment(self, token, nt=None, empty=False):
|
||||
# type: (Any, Optional[Any], Optional[bool]) -> Any
|
||||
pass
|
||||
|
||||
|
||||
class RoundTripParser(Parser):
|
||||
"""roundtrip is a safe loader, that wants to see the unmangled tag"""
|
||||
|
||||
def transform_tag(self, handle, suffix):
|
||||
# type: (Any, Any) -> Any
|
||||
# return self.tag_handles[handle]+suffix
|
||||
if handle == '!!' and suffix in (
|
||||
'null',
|
||||
'bool',
|
||||
'int',
|
||||
'float',
|
||||
'binary',
|
||||
'timestamp',
|
||||
'omap',
|
||||
'pairs',
|
||||
'set',
|
||||
'str',
|
||||
'seq',
|
||||
'map',
|
||||
):
|
||||
return Parser.transform_tag(self, handle, suffix)
|
||||
return handle + suffix
|
||||
|
||||
def move_token_comment(self, token, nt=None, empty=False):
|
||||
# type: (Any, Optional[Any], Optional[bool]) -> Any
|
||||
token.move_old_comment(self.scanner.peek_token() if nt is None else nt, empty=empty)
|
||||
|
||||
|
||||
class RoundTripParserSC(RoundTripParser):
|
||||
"""roundtrip is a safe loader, that wants to see the unmangled tag"""
|
||||
|
||||
# some of the differences are based on the superclass testing
|
||||
# if self.loader.comment_handling is not None
|
||||
|
||||
def move_token_comment(self, token, nt=None, empty=False):
|
||||
# type: (Any, Any, Any, Optional[bool]) -> None
|
||||
token.move_new_comment(self.scanner.peek_token() if nt is None else nt, empty=empty)
|
||||
|
||||
def distribute_comment(self, comment, line):
|
||||
# type: (Any, Any) -> Any
|
||||
# ToDo, look at indentation of the comment to determine attachment
|
||||
if comment is None:
|
||||
return None
|
||||
if not comment[0]:
|
||||
return None
|
||||
if comment[0][0] != line + 1:
|
||||
nprintf('>>>dcxxx', comment, line)
|
||||
assert comment[0][0] == line + 1
|
||||
# if comment[0] - line > 1:
|
||||
# return
|
||||
typ = self.loader.comment_handling & 0b11
|
||||
# nprintf('>>>dca', comment, line, typ)
|
||||
if typ == C_POST:
|
||||
return None
|
||||
if typ == C_PRE:
|
||||
c = [None, None, comment[0]]
|
||||
comment[0] = None
|
||||
return c
|
||||
# nprintf('>>>dcb', comment[0])
|
||||
for _idx, cmntidx in enumerate(comment[0]):
|
||||
# nprintf('>>>dcb', cmntidx)
|
||||
if isinstance(self.scanner.comments[cmntidx], BlankLineComment):
|
||||
break
|
||||
else:
|
||||
return None # no space found
|
||||
if _idx == 0:
|
||||
return None # first line was blank
|
||||
# nprintf('>>>dcc', idx)
|
||||
if typ == C_SPLIT_ON_FIRST_BLANK:
|
||||
c = [None, None, comment[0][:_idx]]
|
||||
comment[0] = comment[0][_idx:]
|
||||
return c
|
||||
raise NotImplementedError # reserved
|
||||
302
lib/spack/external/_vendoring/ruamel/yaml/reader.py
vendored
302
lib/spack/external/_vendoring/ruamel/yaml/reader.py
vendored
@@ -1,302 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
# This module contains abstractions for the input stream. You don't have to
|
||||
# looks further, there are no pretty code.
|
||||
#
|
||||
# We define two classes here.
|
||||
#
|
||||
# Mark(source, line, column)
|
||||
# It's just a record and its only use is producing nice error messages.
|
||||
# Parser does not use it for any other purposes.
|
||||
#
|
||||
# Reader(source, data)
|
||||
# Reader determines the encoding of `data` and converts it to unicode.
|
||||
# Reader provides the following methods and attributes:
|
||||
# reader.peek(length=1) - return the next `length` characters
|
||||
# reader.forward(length=1) - move the current position to `length`
|
||||
# characters.
|
||||
# reader.index - the number of the current character.
|
||||
# reader.line, stream.column - the line and the column of the current
|
||||
# character.
|
||||
|
||||
import codecs
|
||||
|
||||
from ruamel.yaml.error import YAMLError, FileMark, StringMark, YAMLStreamError
|
||||
from ruamel.yaml.compat import _F # NOQA
|
||||
from ruamel.yaml.util import RegExp
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List, Union, Text, Tuple, Optional # NOQA
|
||||
# from ruamel.yaml.compat import StreamTextType # NOQA
|
||||
|
||||
__all__ = ['Reader', 'ReaderError']
|
||||
|
||||
|
||||
class ReaderError(YAMLError):
|
||||
def __init__(self, name, position, character, encoding, reason):
|
||||
# type: (Any, Any, Any, Any, Any) -> None
|
||||
self.name = name
|
||||
self.character = character
|
||||
self.position = position
|
||||
self.encoding = encoding
|
||||
self.reason = reason
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> Any
|
||||
if isinstance(self.character, bytes):
|
||||
return _F(
|
||||
"'{self_encoding!s}' codec can't decode byte #x{ord_self_character:02x}: "
|
||||
'{self_reason!s}\n'
|
||||
' in "{self_name!s}", position {self_position:d}',
|
||||
self_encoding=self.encoding,
|
||||
ord_self_character=ord(self.character),
|
||||
self_reason=self.reason,
|
||||
self_name=self.name,
|
||||
self_position=self.position,
|
||||
)
|
||||
else:
|
||||
return _F(
|
||||
'unacceptable character #x{self_character:04x}: {self_reason!s}\n'
|
||||
' in "{self_name!s}", position {self_position:d}',
|
||||
self_character=self.character,
|
||||
self_reason=self.reason,
|
||||
self_name=self.name,
|
||||
self_position=self.position,
|
||||
)
|
||||
|
||||
|
||||
class Reader:
|
||||
# Reader:
|
||||
# - determines the data encoding and converts it to a unicode string,
|
||||
# - checks if characters are in allowed range,
|
||||
# - adds '\0' to the end.
|
||||
|
||||
# Reader accepts
|
||||
# - a `bytes` object,
|
||||
# - a `str` object,
|
||||
# - a file-like object with its `read` method returning `str`,
|
||||
# - a file-like object with its `read` method returning `unicode`.
|
||||
|
||||
# Yeah, it's ugly and slow.
|
||||
|
||||
def __init__(self, stream, loader=None):
|
||||
# type: (Any, Any) -> None
|
||||
self.loader = loader
|
||||
if self.loader is not None and getattr(self.loader, '_reader', None) is None:
|
||||
self.loader._reader = self
|
||||
self.reset_reader()
|
||||
self.stream = stream # type: Any # as .read is called
|
||||
|
||||
def reset_reader(self):
|
||||
# type: () -> None
|
||||
self.name = None # type: Any
|
||||
self.stream_pointer = 0
|
||||
self.eof = True
|
||||
self.buffer = ""
|
||||
self.pointer = 0
|
||||
self.raw_buffer = None # type: Any
|
||||
self.raw_decode = None
|
||||
self.encoding = None # type: Optional[Text]
|
||||
self.index = 0
|
||||
self.line = 0
|
||||
self.column = 0
|
||||
|
||||
@property
|
||||
def stream(self):
|
||||
# type: () -> Any
|
||||
try:
|
||||
return self._stream
|
||||
except AttributeError:
|
||||
raise YAMLStreamError('input stream needs to specified')
|
||||
|
||||
@stream.setter
|
||||
def stream(self, val):
|
||||
# type: (Any) -> None
|
||||
if val is None:
|
||||
return
|
||||
self._stream = None
|
||||
if isinstance(val, str):
|
||||
self.name = '<unicode string>'
|
||||
self.check_printable(val)
|
||||
self.buffer = val + '\0'
|
||||
elif isinstance(val, bytes):
|
||||
self.name = '<byte string>'
|
||||
self.raw_buffer = val
|
||||
self.determine_encoding()
|
||||
else:
|
||||
if not hasattr(val, 'read'):
|
||||
raise YAMLStreamError('stream argument needs to have a read() method')
|
||||
self._stream = val
|
||||
self.name = getattr(self.stream, 'name', '<file>')
|
||||
self.eof = False
|
||||
self.raw_buffer = None
|
||||
self.determine_encoding()
|
||||
|
||||
def peek(self, index=0):
|
||||
# type: (int) -> Text
|
||||
try:
|
||||
return self.buffer[self.pointer + index]
|
||||
except IndexError:
|
||||
self.update(index + 1)
|
||||
return self.buffer[self.pointer + index]
|
||||
|
||||
def prefix(self, length=1):
|
||||
# type: (int) -> Any
|
||||
if self.pointer + length >= len(self.buffer):
|
||||
self.update(length)
|
||||
return self.buffer[self.pointer : self.pointer + length]
|
||||
|
||||
def forward_1_1(self, length=1):
|
||||
# type: (int) -> None
|
||||
if self.pointer + length + 1 >= len(self.buffer):
|
||||
self.update(length + 1)
|
||||
while length != 0:
|
||||
ch = self.buffer[self.pointer]
|
||||
self.pointer += 1
|
||||
self.index += 1
|
||||
if ch in '\n\x85\u2028\u2029' or (
|
||||
ch == '\r' and self.buffer[self.pointer] != '\n'
|
||||
):
|
||||
self.line += 1
|
||||
self.column = 0
|
||||
elif ch != '\uFEFF':
|
||||
self.column += 1
|
||||
length -= 1
|
||||
|
||||
def forward(self, length=1):
|
||||
# type: (int) -> None
|
||||
if self.pointer + length + 1 >= len(self.buffer):
|
||||
self.update(length + 1)
|
||||
while length != 0:
|
||||
ch = self.buffer[self.pointer]
|
||||
self.pointer += 1
|
||||
self.index += 1
|
||||
if ch == '\n' or (ch == '\r' and self.buffer[self.pointer] != '\n'):
|
||||
self.line += 1
|
||||
self.column = 0
|
||||
elif ch != '\uFEFF':
|
||||
self.column += 1
|
||||
length -= 1
|
||||
|
||||
def get_mark(self):
|
||||
# type: () -> Any
|
||||
if self.stream is None:
|
||||
return StringMark(
|
||||
self.name, self.index, self.line, self.column, self.buffer, self.pointer
|
||||
)
|
||||
else:
|
||||
return FileMark(self.name, self.index, self.line, self.column)
|
||||
|
||||
def determine_encoding(self):
|
||||
# type: () -> None
|
||||
while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2):
|
||||
self.update_raw()
|
||||
if isinstance(self.raw_buffer, bytes):
|
||||
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
|
||||
self.raw_decode = codecs.utf_16_le_decode # type: ignore
|
||||
self.encoding = 'utf-16-le'
|
||||
elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
|
||||
self.raw_decode = codecs.utf_16_be_decode # type: ignore
|
||||
self.encoding = 'utf-16-be'
|
||||
else:
|
||||
self.raw_decode = codecs.utf_8_decode # type: ignore
|
||||
self.encoding = 'utf-8'
|
||||
self.update(1)
|
||||
|
||||
NON_PRINTABLE = RegExp(
|
||||
'[^\x09\x0A\x0D\x20-\x7E\x85' '\xA0-\uD7FF' '\uE000-\uFFFD' '\U00010000-\U0010FFFF' ']'
|
||||
)
|
||||
|
||||
_printable_ascii = ('\x09\x0A\x0D' + "".join(map(chr, range(0x20, 0x7F)))).encode('ascii')
|
||||
|
||||
@classmethod
|
||||
def _get_non_printable_ascii(cls, data): # type: ignore
|
||||
# type: (Text, bytes) -> Optional[Tuple[int, Text]]
|
||||
ascii_bytes = data.encode('ascii') # type: ignore
|
||||
non_printables = ascii_bytes.translate(None, cls._printable_ascii) # type: ignore
|
||||
if not non_printables:
|
||||
return None
|
||||
non_printable = non_printables[:1]
|
||||
return ascii_bytes.index(non_printable), non_printable.decode('ascii')
|
||||
|
||||
@classmethod
|
||||
def _get_non_printable_regex(cls, data):
|
||||
# type: (Text) -> Optional[Tuple[int, Text]]
|
||||
match = cls.NON_PRINTABLE.search(data)
|
||||
if not bool(match):
|
||||
return None
|
||||
return match.start(), match.group()
|
||||
|
||||
@classmethod
|
||||
def _get_non_printable(cls, data):
|
||||
# type: (Text) -> Optional[Tuple[int, Text]]
|
||||
try:
|
||||
return cls._get_non_printable_ascii(data) # type: ignore
|
||||
except UnicodeEncodeError:
|
||||
return cls._get_non_printable_regex(data)
|
||||
|
||||
def check_printable(self, data):
|
||||
# type: (Any) -> None
|
||||
non_printable_match = self._get_non_printable(data)
|
||||
if non_printable_match is not None:
|
||||
start, character = non_printable_match
|
||||
position = self.index + (len(self.buffer) - self.pointer) + start
|
||||
raise ReaderError(
|
||||
self.name,
|
||||
position,
|
||||
ord(character),
|
||||
'unicode',
|
||||
'special characters are not allowed',
|
||||
)
|
||||
|
||||
def update(self, length):
|
||||
# type: (int) -> None
|
||||
if self.raw_buffer is None:
|
||||
return
|
||||
self.buffer = self.buffer[self.pointer :]
|
||||
self.pointer = 0
|
||||
while len(self.buffer) < length:
|
||||
if not self.eof:
|
||||
self.update_raw()
|
||||
if self.raw_decode is not None:
|
||||
try:
|
||||
data, converted = self.raw_decode(self.raw_buffer, 'strict', self.eof)
|
||||
except UnicodeDecodeError as exc:
|
||||
character = self.raw_buffer[exc.start]
|
||||
if self.stream is not None:
|
||||
position = self.stream_pointer - len(self.raw_buffer) + exc.start
|
||||
elif self.stream is not None:
|
||||
position = self.stream_pointer - len(self.raw_buffer) + exc.start
|
||||
else:
|
||||
position = exc.start
|
||||
raise ReaderError(self.name, position, character, exc.encoding, exc.reason)
|
||||
else:
|
||||
data = self.raw_buffer
|
||||
converted = len(data)
|
||||
self.check_printable(data)
|
||||
self.buffer += data
|
||||
self.raw_buffer = self.raw_buffer[converted:]
|
||||
if self.eof:
|
||||
self.buffer += '\0'
|
||||
self.raw_buffer = None
|
||||
break
|
||||
|
||||
def update_raw(self, size=None):
|
||||
# type: (Optional[int]) -> None
|
||||
if size is None:
|
||||
size = 4096
|
||||
data = self.stream.read(size)
|
||||
if self.raw_buffer is None:
|
||||
self.raw_buffer = data
|
||||
else:
|
||||
self.raw_buffer += data
|
||||
self.stream_pointer += len(data)
|
||||
if not data:
|
||||
self.eof = True
|
||||
|
||||
|
||||
# try:
|
||||
# import psyco
|
||||
# psyco.bind(Reader)
|
||||
# except ImportError:
|
||||
# pass
|
||||
1156
lib/spack/external/_vendoring/ruamel/yaml/representer.py
vendored
1156
lib/spack/external/_vendoring/ruamel/yaml/representer.py
vendored
File diff suppressed because it is too large
Load Diff
@@ -1,47 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
You cannot subclass bool, and this is necessary for round-tripping anchored
|
||||
bool values (and also if you want to preserve the original way of writing)
|
||||
|
||||
bool.__bases__ is type 'int', so that is what is used as the basis for ScalarBoolean as well.
|
||||
|
||||
You can use these in an if statement, but not when testing equivalence
|
||||
"""
|
||||
|
||||
from ruamel.yaml.anchor import Anchor
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Text, Any, Dict, List # NOQA
|
||||
|
||||
__all__ = ['ScalarBoolean']
|
||||
|
||||
|
||||
class ScalarBoolean(int):
|
||||
def __new__(cls, *args, **kw):
|
||||
# type: (Any, Any, Any) -> Any
|
||||
anchor = kw.pop('anchor', None)
|
||||
b = int.__new__(cls, *args, **kw)
|
||||
if anchor is not None:
|
||||
b.yaml_set_anchor(anchor, always_dump=True)
|
||||
return b
|
||||
|
||||
@property
|
||||
def anchor(self):
|
||||
# type: () -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
setattr(self, Anchor.attrib, Anchor())
|
||||
return getattr(self, Anchor.attrib)
|
||||
|
||||
def yaml_anchor(self, any=False):
|
||||
# type: (bool) -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
return None
|
||||
if any or self.anchor.always_dump:
|
||||
return self.anchor
|
||||
return None
|
||||
|
||||
def yaml_set_anchor(self, value, always_dump=False):
|
||||
# type: (Any, bool) -> None
|
||||
self.anchor.value = value
|
||||
self.anchor.always_dump = always_dump
|
||||
@@ -1,124 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
import sys
|
||||
from ruamel.yaml.anchor import Anchor
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Text, Any, Dict, List # NOQA
|
||||
|
||||
__all__ = ['ScalarFloat', 'ExponentialFloat', 'ExponentialCapsFloat']
|
||||
|
||||
|
||||
class ScalarFloat(float):
|
||||
def __new__(cls, *args, **kw):
|
||||
# type: (Any, Any, Any) -> Any
|
||||
width = kw.pop('width', None)
|
||||
prec = kw.pop('prec', None)
|
||||
m_sign = kw.pop('m_sign', None)
|
||||
m_lead0 = kw.pop('m_lead0', 0)
|
||||
exp = kw.pop('exp', None)
|
||||
e_width = kw.pop('e_width', None)
|
||||
e_sign = kw.pop('e_sign', None)
|
||||
underscore = kw.pop('underscore', None)
|
||||
anchor = kw.pop('anchor', None)
|
||||
v = float.__new__(cls, *args, **kw)
|
||||
v._width = width
|
||||
v._prec = prec
|
||||
v._m_sign = m_sign
|
||||
v._m_lead0 = m_lead0
|
||||
v._exp = exp
|
||||
v._e_width = e_width
|
||||
v._e_sign = e_sign
|
||||
v._underscore = underscore
|
||||
if anchor is not None:
|
||||
v.yaml_set_anchor(anchor, always_dump=True)
|
||||
return v
|
||||
|
||||
def __iadd__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
return float(self) + a
|
||||
x = type(self)(self + a)
|
||||
x._width = self._width
|
||||
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
|
||||
return x
|
||||
|
||||
def __ifloordiv__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
return float(self) // a
|
||||
x = type(self)(self // a)
|
||||
x._width = self._width
|
||||
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
|
||||
return x
|
||||
|
||||
def __imul__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
return float(self) * a
|
||||
x = type(self)(self * a)
|
||||
x._width = self._width
|
||||
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
|
||||
x._prec = self._prec # check for others
|
||||
return x
|
||||
|
||||
def __ipow__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
return float(self) ** a
|
||||
x = type(self)(self ** a)
|
||||
x._width = self._width
|
||||
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
|
||||
return x
|
||||
|
||||
def __isub__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
return float(self) - a
|
||||
x = type(self)(self - a)
|
||||
x._width = self._width
|
||||
x._underscore = self._underscore[:] if self._underscore is not None else None # NOQA
|
||||
return x
|
||||
|
||||
@property
|
||||
def anchor(self):
|
||||
# type: () -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
setattr(self, Anchor.attrib, Anchor())
|
||||
return getattr(self, Anchor.attrib)
|
||||
|
||||
def yaml_anchor(self, any=False):
|
||||
# type: (bool) -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
return None
|
||||
if any or self.anchor.always_dump:
|
||||
return self.anchor
|
||||
return None
|
||||
|
||||
def yaml_set_anchor(self, value, always_dump=False):
|
||||
# type: (Any, bool) -> None
|
||||
self.anchor.value = value
|
||||
self.anchor.always_dump = always_dump
|
||||
|
||||
def dump(self, out=sys.stdout):
|
||||
# type: (Any) -> Any
|
||||
out.write(
|
||||
'ScalarFloat({}| w:{}, p:{}, s:{}, lz:{}, _:{}|{}, w:{}, s:{})\n'.format(
|
||||
self,
|
||||
self._width, # type: ignore
|
||||
self._prec, # type: ignore
|
||||
self._m_sign, # type: ignore
|
||||
self._m_lead0, # type: ignore
|
||||
self._underscore, # type: ignore
|
||||
self._exp, # type: ignore
|
||||
self._e_width, # type: ignore
|
||||
self._e_sign, # type: ignore
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class ExponentialFloat(ScalarFloat):
|
||||
def __new__(cls, value, width=None, underscore=None):
|
||||
# type: (Any, Any, Any) -> Any
|
||||
return ScalarFloat.__new__(cls, value, width=width, underscore=underscore)
|
||||
|
||||
|
||||
class ExponentialCapsFloat(ScalarFloat):
|
||||
def __new__(cls, value, width=None, underscore=None):
|
||||
# type: (Any, Any, Any) -> Any
|
||||
return ScalarFloat.__new__(cls, value, width=width, underscore=underscore)
|
||||
@@ -1,127 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.anchor import Anchor
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Text, Any, Dict, List # NOQA
|
||||
|
||||
__all__ = ['ScalarInt', 'BinaryInt', 'OctalInt', 'HexInt', 'HexCapsInt', 'DecimalInt']
|
||||
|
||||
|
||||
class ScalarInt(int):
|
||||
def __new__(cls, *args, **kw):
|
||||
# type: (Any, Any, Any) -> Any
|
||||
width = kw.pop('width', None)
|
||||
underscore = kw.pop('underscore', None)
|
||||
anchor = kw.pop('anchor', None)
|
||||
v = int.__new__(cls, *args, **kw)
|
||||
v._width = width
|
||||
v._underscore = underscore
|
||||
if anchor is not None:
|
||||
v.yaml_set_anchor(anchor, always_dump=True)
|
||||
return v
|
||||
|
||||
def __iadd__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
x = type(self)(self + a)
|
||||
x._width = self._width # type: ignore
|
||||
x._underscore = ( # type: ignore
|
||||
self._underscore[:] if self._underscore is not None else None # type: ignore
|
||||
) # NOQA
|
||||
return x
|
||||
|
||||
def __ifloordiv__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
x = type(self)(self // a)
|
||||
x._width = self._width # type: ignore
|
||||
x._underscore = ( # type: ignore
|
||||
self._underscore[:] if self._underscore is not None else None # type: ignore
|
||||
) # NOQA
|
||||
return x
|
||||
|
||||
def __imul__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
x = type(self)(self * a)
|
||||
x._width = self._width # type: ignore
|
||||
x._underscore = ( # type: ignore
|
||||
self._underscore[:] if self._underscore is not None else None # type: ignore
|
||||
) # NOQA
|
||||
return x
|
||||
|
||||
def __ipow__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
x = type(self)(self ** a)
|
||||
x._width = self._width # type: ignore
|
||||
x._underscore = ( # type: ignore
|
||||
self._underscore[:] if self._underscore is not None else None # type: ignore
|
||||
) # NOQA
|
||||
return x
|
||||
|
||||
def __isub__(self, a): # type: ignore
|
||||
# type: (Any) -> Any
|
||||
x = type(self)(self - a)
|
||||
x._width = self._width # type: ignore
|
||||
x._underscore = ( # type: ignore
|
||||
self._underscore[:] if self._underscore is not None else None # type: ignore
|
||||
) # NOQA
|
||||
return x
|
||||
|
||||
@property
|
||||
def anchor(self):
|
||||
# type: () -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
setattr(self, Anchor.attrib, Anchor())
|
||||
return getattr(self, Anchor.attrib)
|
||||
|
||||
def yaml_anchor(self, any=False):
|
||||
# type: (bool) -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
return None
|
||||
if any or self.anchor.always_dump:
|
||||
return self.anchor
|
||||
return None
|
||||
|
||||
def yaml_set_anchor(self, value, always_dump=False):
|
||||
# type: (Any, bool) -> None
|
||||
self.anchor.value = value
|
||||
self.anchor.always_dump = always_dump
|
||||
|
||||
|
||||
class BinaryInt(ScalarInt):
|
||||
def __new__(cls, value, width=None, underscore=None, anchor=None):
|
||||
# type: (Any, Any, Any, Any) -> Any
|
||||
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
|
||||
|
||||
|
||||
class OctalInt(ScalarInt):
|
||||
def __new__(cls, value, width=None, underscore=None, anchor=None):
|
||||
# type: (Any, Any, Any, Any) -> Any
|
||||
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
|
||||
|
||||
|
||||
# mixed casing of A-F is not supported, when loading the first non digit
|
||||
# determines the case
|
||||
|
||||
|
||||
class HexInt(ScalarInt):
|
||||
"""uses lower case (a-f)"""
|
||||
|
||||
def __new__(cls, value, width=None, underscore=None, anchor=None):
|
||||
# type: (Any, Any, Any, Any) -> Any
|
||||
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
|
||||
|
||||
|
||||
class HexCapsInt(ScalarInt):
|
||||
"""uses upper case (A-F)"""
|
||||
|
||||
def __new__(cls, value, width=None, underscore=None, anchor=None):
|
||||
# type: (Any, Any, Any, Any) -> Any
|
||||
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
|
||||
|
||||
|
||||
class DecimalInt(ScalarInt):
|
||||
"""needed if anchor"""
|
||||
|
||||
def __new__(cls, value, width=None, underscore=None, anchor=None):
|
||||
# type: (Any, Any, Any, Any) -> Any
|
||||
return ScalarInt.__new__(cls, value, width=width, underscore=underscore, anchor=anchor)
|
||||
@@ -1,152 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.anchor import Anchor
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Text, Any, Dict, List # NOQA
|
||||
|
||||
__all__ = [
|
||||
'ScalarString',
|
||||
'LiteralScalarString',
|
||||
'FoldedScalarString',
|
||||
'SingleQuotedScalarString',
|
||||
'DoubleQuotedScalarString',
|
||||
'PlainScalarString',
|
||||
# PreservedScalarString is the old name, as it was the first to be preserved on rt,
|
||||
# use LiteralScalarString instead
|
||||
'PreservedScalarString',
|
||||
]
|
||||
|
||||
|
||||
class ScalarString(str):
|
||||
__slots__ = Anchor.attrib
|
||||
|
||||
def __new__(cls, *args, **kw):
|
||||
# type: (Any, Any) -> Any
|
||||
anchor = kw.pop('anchor', None)
|
||||
ret_val = str.__new__(cls, *args, **kw)
|
||||
if anchor is not None:
|
||||
ret_val.yaml_set_anchor(anchor, always_dump=True)
|
||||
return ret_val
|
||||
|
||||
def replace(self, old, new, maxreplace=-1):
|
||||
# type: (Any, Any, int) -> Any
|
||||
return type(self)((str.replace(self, old, new, maxreplace)))
|
||||
|
||||
@property
|
||||
def anchor(self):
|
||||
# type: () -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
setattr(self, Anchor.attrib, Anchor())
|
||||
return getattr(self, Anchor.attrib)
|
||||
|
||||
def yaml_anchor(self, any=False):
|
||||
# type: (bool) -> Any
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
return None
|
||||
if any or self.anchor.always_dump:
|
||||
return self.anchor
|
||||
return None
|
||||
|
||||
def yaml_set_anchor(self, value, always_dump=False):
|
||||
# type: (Any, bool) -> None
|
||||
self.anchor.value = value
|
||||
self.anchor.always_dump = always_dump
|
||||
|
||||
|
||||
class LiteralScalarString(ScalarString):
|
||||
__slots__ = 'comment' # the comment after the | on the first line
|
||||
|
||||
style = '|'
|
||||
|
||||
def __new__(cls, value, anchor=None):
|
||||
# type: (Text, Any) -> Any
|
||||
return ScalarString.__new__(cls, value, anchor=anchor)
|
||||
|
||||
|
||||
PreservedScalarString = LiteralScalarString
|
||||
|
||||
|
||||
class FoldedScalarString(ScalarString):
|
||||
__slots__ = ('fold_pos', 'comment') # the comment after the > on the first line
|
||||
|
||||
style = '>'
|
||||
|
||||
def __new__(cls, value, anchor=None):
|
||||
# type: (Text, Any) -> Any
|
||||
return ScalarString.__new__(cls, value, anchor=anchor)
|
||||
|
||||
|
||||
class SingleQuotedScalarString(ScalarString):
|
||||
__slots__ = ()
|
||||
|
||||
style = "'"
|
||||
|
||||
def __new__(cls, value, anchor=None):
|
||||
# type: (Text, Any) -> Any
|
||||
return ScalarString.__new__(cls, value, anchor=anchor)
|
||||
|
||||
|
||||
class DoubleQuotedScalarString(ScalarString):
|
||||
__slots__ = ()
|
||||
|
||||
style = '"'
|
||||
|
||||
def __new__(cls, value, anchor=None):
|
||||
# type: (Text, Any) -> Any
|
||||
return ScalarString.__new__(cls, value, anchor=anchor)
|
||||
|
||||
|
||||
class PlainScalarString(ScalarString):
|
||||
__slots__ = ()
|
||||
|
||||
style = ''
|
||||
|
||||
def __new__(cls, value, anchor=None):
|
||||
# type: (Text, Any) -> Any
|
||||
return ScalarString.__new__(cls, value, anchor=anchor)
|
||||
|
||||
|
||||
def preserve_literal(s):
|
||||
# type: (Text) -> Text
|
||||
return LiteralScalarString(s.replace('\r\n', '\n').replace('\r', '\n'))
|
||||
|
||||
|
||||
def walk_tree(base, map=None):
|
||||
# type: (Any, Any) -> None
|
||||
"""
|
||||
the routine here walks over a simple yaml tree (recursing in
|
||||
dict values and list items) and converts strings that
|
||||
have multiple lines to literal scalars
|
||||
|
||||
You can also provide an explicit (ordered) mapping for multiple transforms
|
||||
(first of which is executed):
|
||||
map = ruamel.yaml.compat.ordereddict
|
||||
map['\n'] = preserve_literal
|
||||
map[':'] = SingleQuotedScalarString
|
||||
walk_tree(data, map=map)
|
||||
"""
|
||||
from collections.abc import MutableMapping, MutableSequence
|
||||
|
||||
if map is None:
|
||||
map = {'\n': preserve_literal}
|
||||
|
||||
if isinstance(base, MutableMapping):
|
||||
for k in base:
|
||||
v = base[k] # type: Text
|
||||
if isinstance(v, str):
|
||||
for ch in map:
|
||||
if ch in v:
|
||||
base[k] = map[ch](v)
|
||||
break
|
||||
else:
|
||||
walk_tree(v, map=map)
|
||||
elif isinstance(base, MutableSequence):
|
||||
for idx, elem in enumerate(base):
|
||||
if isinstance(elem, str):
|
||||
for ch in map:
|
||||
if ch in elem:
|
||||
base[idx] = map[ch](elem)
|
||||
break
|
||||
else:
|
||||
walk_tree(elem, map=map)
|
||||
2444
lib/spack/external/_vendoring/ruamel/yaml/scanner.py
vendored
2444
lib/spack/external/_vendoring/ruamel/yaml/scanner.py
vendored
File diff suppressed because it is too large
Load Diff
@@ -1,241 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.error import YAMLError
|
||||
from ruamel.yaml.compat import nprint, DBG_NODE, dbg, nprintf # NOQA
|
||||
from ruamel.yaml.util import RegExp
|
||||
|
||||
from ruamel.yaml.events import (
|
||||
StreamStartEvent,
|
||||
StreamEndEvent,
|
||||
MappingStartEvent,
|
||||
MappingEndEvent,
|
||||
SequenceStartEvent,
|
||||
SequenceEndEvent,
|
||||
AliasEvent,
|
||||
ScalarEvent,
|
||||
DocumentStartEvent,
|
||||
DocumentEndEvent,
|
||||
)
|
||||
from ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Union, Text, Optional # NOQA
|
||||
from ruamel.yaml.compat import VersionType # NOQA
|
||||
|
||||
__all__ = ['Serializer', 'SerializerError']
|
||||
|
||||
|
||||
class SerializerError(YAMLError):
|
||||
pass
|
||||
|
||||
|
||||
class Serializer:
|
||||
|
||||
# 'id' and 3+ numbers, but not 000
|
||||
ANCHOR_TEMPLATE = 'id%03d'
|
||||
ANCHOR_RE = RegExp('id(?!000$)\\d{3,}')
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
encoding=None,
|
||||
explicit_start=None,
|
||||
explicit_end=None,
|
||||
version=None,
|
||||
tags=None,
|
||||
dumper=None,
|
||||
):
|
||||
# type: (Any, Optional[bool], Optional[bool], Optional[VersionType], Any, Any) -> None # NOQA
|
||||
self.dumper = dumper
|
||||
if self.dumper is not None:
|
||||
self.dumper._serializer = self
|
||||
self.use_encoding = encoding
|
||||
self.use_explicit_start = explicit_start
|
||||
self.use_explicit_end = explicit_end
|
||||
if isinstance(version, str):
|
||||
self.use_version = tuple(map(int, version.split('.')))
|
||||
else:
|
||||
self.use_version = version # type: ignore
|
||||
self.use_tags = tags
|
||||
self.serialized_nodes = {} # type: Dict[Any, Any]
|
||||
self.anchors = {} # type: Dict[Any, Any]
|
||||
self.last_anchor_id = 0
|
||||
self.closed = None # type: Optional[bool]
|
||||
self._templated_id = None
|
||||
|
||||
@property
|
||||
def emitter(self):
|
||||
# type: () -> Any
|
||||
if hasattr(self.dumper, 'typ'):
|
||||
return self.dumper.emitter
|
||||
return self.dumper._emitter
|
||||
|
||||
@property
|
||||
def resolver(self):
|
||||
# type: () -> Any
|
||||
if hasattr(self.dumper, 'typ'):
|
||||
self.dumper.resolver
|
||||
return self.dumper._resolver
|
||||
|
||||
def open(self):
|
||||
# type: () -> None
|
||||
if self.closed is None:
|
||||
self.emitter.emit(StreamStartEvent(encoding=self.use_encoding))
|
||||
self.closed = False
|
||||
elif self.closed:
|
||||
raise SerializerError('serializer is closed')
|
||||
else:
|
||||
raise SerializerError('serializer is already opened')
|
||||
|
||||
def close(self):
|
||||
# type: () -> None
|
||||
if self.closed is None:
|
||||
raise SerializerError('serializer is not opened')
|
||||
elif not self.closed:
|
||||
self.emitter.emit(StreamEndEvent())
|
||||
self.closed = True
|
||||
|
||||
# def __del__(self):
|
||||
# self.close()
|
||||
|
||||
def serialize(self, node):
|
||||
# type: (Any) -> None
|
||||
if dbg(DBG_NODE):
|
||||
nprint('Serializing nodes')
|
||||
node.dump()
|
||||
if self.closed is None:
|
||||
raise SerializerError('serializer is not opened')
|
||||
elif self.closed:
|
||||
raise SerializerError('serializer is closed')
|
||||
self.emitter.emit(
|
||||
DocumentStartEvent(
|
||||
explicit=self.use_explicit_start, version=self.use_version, tags=self.use_tags
|
||||
)
|
||||
)
|
||||
self.anchor_node(node)
|
||||
self.serialize_node(node, None, None)
|
||||
self.emitter.emit(DocumentEndEvent(explicit=self.use_explicit_end))
|
||||
self.serialized_nodes = {}
|
||||
self.anchors = {}
|
||||
self.last_anchor_id = 0
|
||||
|
||||
def anchor_node(self, node):
|
||||
# type: (Any) -> None
|
||||
if node in self.anchors:
|
||||
if self.anchors[node] is None:
|
||||
self.anchors[node] = self.generate_anchor(node)
|
||||
else:
|
||||
anchor = None
|
||||
try:
|
||||
if node.anchor.always_dump:
|
||||
anchor = node.anchor.value
|
||||
except: # NOQA
|
||||
pass
|
||||
self.anchors[node] = anchor
|
||||
if isinstance(node, SequenceNode):
|
||||
for item in node.value:
|
||||
self.anchor_node(item)
|
||||
elif isinstance(node, MappingNode):
|
||||
for key, value in node.value:
|
||||
self.anchor_node(key)
|
||||
self.anchor_node(value)
|
||||
|
||||
def generate_anchor(self, node):
|
||||
# type: (Any) -> Any
|
||||
try:
|
||||
anchor = node.anchor.value
|
||||
except: # NOQA
|
||||
anchor = None
|
||||
if anchor is None:
|
||||
self.last_anchor_id += 1
|
||||
return self.ANCHOR_TEMPLATE % self.last_anchor_id
|
||||
return anchor
|
||||
|
||||
def serialize_node(self, node, parent, index):
|
||||
# type: (Any, Any, Any) -> None
|
||||
alias = self.anchors[node]
|
||||
if node in self.serialized_nodes:
|
||||
node_style = getattr(node, 'style', None)
|
||||
if node_style != '?':
|
||||
node_style = None
|
||||
self.emitter.emit(AliasEvent(alias, style=node_style))
|
||||
else:
|
||||
self.serialized_nodes[node] = True
|
||||
self.resolver.descend_resolver(parent, index)
|
||||
if isinstance(node, ScalarNode):
|
||||
# here check if the node.tag equals the one that would result from parsing
|
||||
# if not equal quoting is necessary for strings
|
||||
detected_tag = self.resolver.resolve(ScalarNode, node.value, (True, False))
|
||||
default_tag = self.resolver.resolve(ScalarNode, node.value, (False, True))
|
||||
implicit = (
|
||||
(node.tag == detected_tag),
|
||||
(node.tag == default_tag),
|
||||
node.tag.startswith('tag:yaml.org,2002:'),
|
||||
)
|
||||
self.emitter.emit(
|
||||
ScalarEvent(
|
||||
alias,
|
||||
node.tag,
|
||||
implicit,
|
||||
node.value,
|
||||
style=node.style,
|
||||
comment=node.comment,
|
||||
)
|
||||
)
|
||||
elif isinstance(node, SequenceNode):
|
||||
implicit = node.tag == self.resolver.resolve(SequenceNode, node.value, True)
|
||||
comment = node.comment
|
||||
end_comment = None
|
||||
seq_comment = None
|
||||
if node.flow_style is True:
|
||||
if comment: # eol comment on flow style sequence
|
||||
seq_comment = comment[0]
|
||||
# comment[0] = None
|
||||
if comment and len(comment) > 2:
|
||||
end_comment = comment[2]
|
||||
else:
|
||||
end_comment = None
|
||||
self.emitter.emit(
|
||||
SequenceStartEvent(
|
||||
alias,
|
||||
node.tag,
|
||||
implicit,
|
||||
flow_style=node.flow_style,
|
||||
comment=node.comment,
|
||||
)
|
||||
)
|
||||
index = 0
|
||||
for item in node.value:
|
||||
self.serialize_node(item, node, index)
|
||||
index += 1
|
||||
self.emitter.emit(SequenceEndEvent(comment=[seq_comment, end_comment]))
|
||||
elif isinstance(node, MappingNode):
|
||||
implicit = node.tag == self.resolver.resolve(MappingNode, node.value, True)
|
||||
comment = node.comment
|
||||
end_comment = None
|
||||
map_comment = None
|
||||
if node.flow_style is True:
|
||||
if comment: # eol comment on flow style sequence
|
||||
map_comment = comment[0]
|
||||
# comment[0] = None
|
||||
if comment and len(comment) > 2:
|
||||
end_comment = comment[2]
|
||||
self.emitter.emit(
|
||||
MappingStartEvent(
|
||||
alias,
|
||||
node.tag,
|
||||
implicit,
|
||||
flow_style=node.flow_style,
|
||||
comment=node.comment,
|
||||
nr_items=len(node.value),
|
||||
)
|
||||
)
|
||||
for key, value in node.value:
|
||||
self.serialize_node(key, node, None)
|
||||
self.serialize_node(value, node, key)
|
||||
self.emitter.emit(MappingEndEvent(comment=[map_comment, end_comment]))
|
||||
self.resolver.ascend_resolver()
|
||||
|
||||
|
||||
def templated_id(s):
|
||||
# type: (Text) -> Any
|
||||
return Serializer.ANCHOR_RE.match(s)
|
||||
@@ -1,61 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
import datetime
|
||||
import copy
|
||||
|
||||
# ToDo: at least on PY3 you could probably attach the tzinfo correctly to the object
|
||||
# a more complete datetime might be used by safe loading as well
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List # NOQA
|
||||
|
||||
|
||||
class TimeStamp(datetime.datetime):
|
||||
def __init__(self, *args, **kw):
|
||||
# type: (Any, Any) -> None
|
||||
self._yaml = dict(t=False, tz=None, delta=0) # type: Dict[Any, Any]
|
||||
|
||||
def __new__(cls, *args, **kw): # datetime is immutable
|
||||
# type: (Any, Any) -> Any
|
||||
return datetime.datetime.__new__(cls, *args, **kw)
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
# type: (Any) -> Any
|
||||
ts = TimeStamp(self.year, self.month, self.day, self.hour, self.minute, self.second)
|
||||
ts._yaml = copy.deepcopy(self._yaml)
|
||||
return ts
|
||||
|
||||
def replace(
|
||||
self,
|
||||
year=None,
|
||||
month=None,
|
||||
day=None,
|
||||
hour=None,
|
||||
minute=None,
|
||||
second=None,
|
||||
microsecond=None,
|
||||
tzinfo=True,
|
||||
fold=None,
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any, Any, Any, Any) -> Any
|
||||
if year is None:
|
||||
year = self.year
|
||||
if month is None:
|
||||
month = self.month
|
||||
if day is None:
|
||||
day = self.day
|
||||
if hour is None:
|
||||
hour = self.hour
|
||||
if minute is None:
|
||||
minute = self.minute
|
||||
if second is None:
|
||||
second = self.second
|
||||
if microsecond is None:
|
||||
microsecond = self.microsecond
|
||||
if tzinfo is True:
|
||||
tzinfo = self.tzinfo
|
||||
if fold is None:
|
||||
fold = self.fold
|
||||
ts = type(self)(year, month, day, hour, minute, second, microsecond, tzinfo, fold=fold)
|
||||
ts._yaml = copy.deepcopy(self._yaml)
|
||||
return ts
|
||||
404
lib/spack/external/_vendoring/ruamel/yaml/tokens.py
vendored
404
lib/spack/external/_vendoring/ruamel/yaml/tokens.py
vendored
@@ -1,404 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.compat import _F, nprintf # NOQA
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Text, Any, Dict, Optional, List # NOQA
|
||||
from .error import StreamMark # NOQA
|
||||
|
||||
SHOW_LINES = True
|
||||
|
||||
|
||||
class Token:
|
||||
__slots__ = 'start_mark', 'end_mark', '_comment'
|
||||
|
||||
def __init__(self, start_mark, end_mark):
|
||||
# type: (StreamMark, StreamMark) -> None
|
||||
self.start_mark = start_mark
|
||||
self.end_mark = end_mark
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> Any
|
||||
# attributes = [key for key in self.__slots__ if not key.endswith('_mark') and
|
||||
# hasattr('self', key)]
|
||||
attributes = [key for key in self.__slots__ if not key.endswith('_mark')]
|
||||
attributes.sort()
|
||||
# arguments = ', '.join(
|
||||
# [_F('{key!s}={gattr!r})', key=key, gattr=getattr(self, key)) for key in attributes]
|
||||
# )
|
||||
arguments = [
|
||||
_F('{key!s}={gattr!r}', key=key, gattr=getattr(self, key)) for key in attributes
|
||||
]
|
||||
if SHOW_LINES:
|
||||
try:
|
||||
arguments.append('line: ' + str(self.start_mark.line))
|
||||
except: # NOQA
|
||||
pass
|
||||
try:
|
||||
arguments.append('comment: ' + str(self._comment))
|
||||
except: # NOQA
|
||||
pass
|
||||
return '{}({})'.format(self.__class__.__name__, ', '.join(arguments))
|
||||
|
||||
@property
|
||||
def column(self):
|
||||
# type: () -> int
|
||||
return self.start_mark.column
|
||||
|
||||
@column.setter
|
||||
def column(self, pos):
|
||||
# type: (Any) -> None
|
||||
self.start_mark.column = pos
|
||||
|
||||
# old style ( <= 0.17) is a TWO element list with first being the EOL
|
||||
# comment concatenated with following FLC/BLNK; and second being a list of FLC/BLNK
|
||||
# preceding the token
|
||||
# new style ( >= 0.17 ) is a THREE element list with the first being a list of
|
||||
# preceding FLC/BLNK, the second EOL and the third following FLC/BLNK
|
||||
# note that new style has differing order, and does not consist of CommentToken(s)
|
||||
# but of CommentInfo instances
|
||||
# any non-assigned values in new style are None, but first and last can be empty list
|
||||
# new style routines add one comment at a time
|
||||
|
||||
# going to be deprecated in favour of add_comment_eol/post
|
||||
def add_post_comment(self, comment):
|
||||
# type: (Any) -> None
|
||||
if not hasattr(self, '_comment'):
|
||||
self._comment = [None, None]
|
||||
else:
|
||||
assert len(self._comment) in [2, 5] # make sure it is version 0
|
||||
# if isinstance(comment, CommentToken):
|
||||
# if comment.value.startswith('# C09'):
|
||||
# raise
|
||||
self._comment[0] = comment
|
||||
|
||||
# going to be deprecated in favour of add_comment_pre
|
||||
def add_pre_comments(self, comments):
|
||||
# type: (Any) -> None
|
||||
if not hasattr(self, '_comment'):
|
||||
self._comment = [None, None]
|
||||
else:
|
||||
assert len(self._comment) == 2 # make sure it is version 0
|
||||
assert self._comment[1] is None
|
||||
self._comment[1] = comments
|
||||
return
|
||||
|
||||
# new style
|
||||
def add_comment_pre(self, comment):
|
||||
# type: (Any) -> None
|
||||
if not hasattr(self, '_comment'):
|
||||
self._comment = [[], None, None] # type: ignore
|
||||
else:
|
||||
assert len(self._comment) == 3
|
||||
if self._comment[0] is None:
|
||||
self._comment[0] = [] # type: ignore
|
||||
self._comment[0].append(comment) # type: ignore
|
||||
|
||||
def add_comment_eol(self, comment, comment_type):
|
||||
# type: (Any, Any) -> None
|
||||
if not hasattr(self, '_comment'):
|
||||
self._comment = [None, None, None]
|
||||
else:
|
||||
assert len(self._comment) == 3
|
||||
assert self._comment[1] is None
|
||||
if self.comment[1] is None:
|
||||
self._comment[1] = [] # type: ignore
|
||||
self._comment[1].extend([None] * (comment_type + 1 - len(self.comment[1]))) # type: ignore # NOQA
|
||||
# nprintf('commy', self.comment, comment_type)
|
||||
self._comment[1][comment_type] = comment # type: ignore
|
||||
|
||||
def add_comment_post(self, comment):
|
||||
# type: (Any) -> None
|
||||
if not hasattr(self, '_comment'):
|
||||
self._comment = [None, None, []] # type: ignore
|
||||
else:
|
||||
assert len(self._comment) == 3
|
||||
if self._comment[2] is None:
|
||||
self._comment[2] = [] # type: ignore
|
||||
self._comment[2].append(comment) # type: ignore
|
||||
|
||||
# def get_comment(self):
|
||||
# # type: () -> Any
|
||||
# return getattr(self, '_comment', None)
|
||||
|
||||
@property
|
||||
def comment(self):
|
||||
# type: () -> Any
|
||||
return getattr(self, '_comment', None)
|
||||
|
||||
def move_old_comment(self, target, empty=False):
|
||||
# type: (Any, bool) -> Any
|
||||
"""move a comment from this token to target (normally next token)
|
||||
used to combine e.g. comments before a BlockEntryToken to the
|
||||
ScalarToken that follows it
|
||||
empty is a special for empty values -> comment after key
|
||||
"""
|
||||
c = self.comment
|
||||
if c is None:
|
||||
return
|
||||
# don't push beyond last element
|
||||
if isinstance(target, (StreamEndToken, DocumentStartToken)):
|
||||
return
|
||||
delattr(self, '_comment')
|
||||
tc = target.comment
|
||||
if not tc: # target comment, just insert
|
||||
# special for empty value in key: value issue 25
|
||||
if empty:
|
||||
c = [c[0], c[1], None, None, c[0]]
|
||||
target._comment = c
|
||||
# nprint('mco2:', self, target, target.comment, empty)
|
||||
return self
|
||||
if c[0] and tc[0] or c[1] and tc[1]:
|
||||
raise NotImplementedError(_F('overlap in comment {c!r} {tc!r}', c=c, tc=tc))
|
||||
if c[0]:
|
||||
tc[0] = c[0]
|
||||
if c[1]:
|
||||
tc[1] = c[1]
|
||||
return self
|
||||
|
||||
def split_old_comment(self):
|
||||
# type: () -> Any
|
||||
""" split the post part of a comment, and return it
|
||||
as comment to be added. Delete second part if [None, None]
|
||||
abc: # this goes to sequence
|
||||
# this goes to first element
|
||||
- first element
|
||||
"""
|
||||
comment = self.comment
|
||||
if comment is None or comment[0] is None:
|
||||
return None # nothing to do
|
||||
ret_val = [comment[0], None]
|
||||
if comment[1] is None:
|
||||
delattr(self, '_comment')
|
||||
return ret_val
|
||||
|
||||
def move_new_comment(self, target, empty=False):
|
||||
# type: (Any, bool) -> Any
|
||||
"""move a comment from this token to target (normally next token)
|
||||
used to combine e.g. comments before a BlockEntryToken to the
|
||||
ScalarToken that follows it
|
||||
empty is a special for empty values -> comment after key
|
||||
"""
|
||||
c = self.comment
|
||||
if c is None:
|
||||
return
|
||||
# don't push beyond last element
|
||||
if isinstance(target, (StreamEndToken, DocumentStartToken)):
|
||||
return
|
||||
delattr(self, '_comment')
|
||||
tc = target.comment
|
||||
if not tc: # target comment, just insert
|
||||
# special for empty value in key: value issue 25
|
||||
if empty:
|
||||
c = [c[0], c[1], c[2]]
|
||||
target._comment = c
|
||||
# nprint('mco2:', self, target, target.comment, empty)
|
||||
return self
|
||||
# if self and target have both pre, eol or post comments, something seems wrong
|
||||
for idx in range(3):
|
||||
if c[idx] is not None and tc[idx] is not None:
|
||||
raise NotImplementedError(_F('overlap in comment {c!r} {tc!r}', c=c, tc=tc))
|
||||
# move the comment parts
|
||||
for idx in range(3):
|
||||
if c[idx]:
|
||||
tc[idx] = c[idx]
|
||||
return self
|
||||
|
||||
|
||||
# class BOMToken(Token):
|
||||
# id = '<byte order mark>'
|
||||
|
||||
|
||||
class DirectiveToken(Token):
|
||||
__slots__ = 'name', 'value'
|
||||
id = '<directive>'
|
||||
|
||||
def __init__(self, name, value, start_mark, end_mark):
|
||||
# type: (Any, Any, Any, Any) -> None
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.name = name
|
||||
self.value = value
|
||||
|
||||
|
||||
class DocumentStartToken(Token):
|
||||
__slots__ = ()
|
||||
id = '<document start>'
|
||||
|
||||
|
||||
class DocumentEndToken(Token):
|
||||
__slots__ = ()
|
||||
id = '<document end>'
|
||||
|
||||
|
||||
class StreamStartToken(Token):
|
||||
__slots__ = ('encoding',)
|
||||
id = '<stream start>'
|
||||
|
||||
def __init__(self, start_mark=None, end_mark=None, encoding=None):
|
||||
# type: (Any, Any, Any) -> None
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.encoding = encoding
|
||||
|
||||
|
||||
class StreamEndToken(Token):
|
||||
__slots__ = ()
|
||||
id = '<stream end>'
|
||||
|
||||
|
||||
class BlockSequenceStartToken(Token):
|
||||
__slots__ = ()
|
||||
id = '<block sequence start>'
|
||||
|
||||
|
||||
class BlockMappingStartToken(Token):
|
||||
__slots__ = ()
|
||||
id = '<block mapping start>'
|
||||
|
||||
|
||||
class BlockEndToken(Token):
|
||||
__slots__ = ()
|
||||
id = '<block end>'
|
||||
|
||||
|
||||
class FlowSequenceStartToken(Token):
|
||||
__slots__ = ()
|
||||
id = '['
|
||||
|
||||
|
||||
class FlowMappingStartToken(Token):
|
||||
__slots__ = ()
|
||||
id = '{'
|
||||
|
||||
|
||||
class FlowSequenceEndToken(Token):
|
||||
__slots__ = ()
|
||||
id = ']'
|
||||
|
||||
|
||||
class FlowMappingEndToken(Token):
|
||||
__slots__ = ()
|
||||
id = '}'
|
||||
|
||||
|
||||
class KeyToken(Token):
|
||||
__slots__ = ()
|
||||
id = '?'
|
||||
|
||||
# def x__repr__(self):
|
||||
# return 'KeyToken({})'.format(
|
||||
# self.start_mark.buffer[self.start_mark.index:].split(None, 1)[0])
|
||||
|
||||
|
||||
class ValueToken(Token):
|
||||
__slots__ = ()
|
||||
id = ':'
|
||||
|
||||
|
||||
class BlockEntryToken(Token):
|
||||
__slots__ = ()
|
||||
id = '-'
|
||||
|
||||
|
||||
class FlowEntryToken(Token):
|
||||
__slots__ = ()
|
||||
id = ','
|
||||
|
||||
|
||||
class AliasToken(Token):
|
||||
__slots__ = ('value',)
|
||||
id = '<alias>'
|
||||
|
||||
def __init__(self, value, start_mark, end_mark):
|
||||
# type: (Any, Any, Any) -> None
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.value = value
|
||||
|
||||
|
||||
class AnchorToken(Token):
|
||||
__slots__ = ('value',)
|
||||
id = '<anchor>'
|
||||
|
||||
def __init__(self, value, start_mark, end_mark):
|
||||
# type: (Any, Any, Any) -> None
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.value = value
|
||||
|
||||
|
||||
class TagToken(Token):
|
||||
__slots__ = ('value',)
|
||||
id = '<tag>'
|
||||
|
||||
def __init__(self, value, start_mark, end_mark):
|
||||
# type: (Any, Any, Any) -> None
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.value = value
|
||||
|
||||
|
||||
class ScalarToken(Token):
|
||||
__slots__ = 'value', 'plain', 'style'
|
||||
id = '<scalar>'
|
||||
|
||||
def __init__(self, value, plain, start_mark, end_mark, style=None):
|
||||
# type: (Any, Any, Any, Any, Any) -> None
|
||||
Token.__init__(self, start_mark, end_mark)
|
||||
self.value = value
|
||||
self.plain = plain
|
||||
self.style = style
|
||||
|
||||
|
||||
class CommentToken(Token):
|
||||
__slots__ = '_value', 'pre_done'
|
||||
id = '<comment>'
|
||||
|
||||
def __init__(self, value, start_mark=None, end_mark=None, column=None):
|
||||
# type: (Any, Any, Any, Any) -> None
|
||||
if start_mark is None:
|
||||
assert column is not None
|
||||
self._column = column
|
||||
Token.__init__(self, start_mark, None) # type: ignore
|
||||
self._value = value
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
# type: () -> str
|
||||
if isinstance(self._value, str):
|
||||
return self._value
|
||||
return "".join(self._value)
|
||||
|
||||
@value.setter
|
||||
def value(self, val):
|
||||
# type: (Any) -> None
|
||||
self._value = val
|
||||
|
||||
def reset(self):
|
||||
# type: () -> None
|
||||
if hasattr(self, 'pre_done'):
|
||||
delattr(self, 'pre_done')
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> Any
|
||||
v = '{!r}'.format(self.value)
|
||||
if SHOW_LINES:
|
||||
try:
|
||||
v += ', line: ' + str(self.start_mark.line)
|
||||
except: # NOQA
|
||||
pass
|
||||
try:
|
||||
v += ', col: ' + str(self.start_mark.column)
|
||||
except: # NOQA
|
||||
pass
|
||||
return 'CommentToken({})'.format(v)
|
||||
|
||||
def __eq__(self, other):
|
||||
# type: (Any) -> bool
|
||||
if self.start_mark != other.start_mark:
|
||||
return False
|
||||
if self.end_mark != other.end_mark:
|
||||
return False
|
||||
if self.value != other.value:
|
||||
return False
|
||||
return True
|
||||
|
||||
def __ne__(self, other):
|
||||
# type: (Any) -> bool
|
||||
return not self.__eq__(other)
|
||||
256
lib/spack/external/_vendoring/ruamel/yaml/util.py
vendored
256
lib/spack/external/_vendoring/ruamel/yaml/util.py
vendored
@@ -1,256 +0,0 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
some helper functions that might be generally useful
|
||||
"""
|
||||
|
||||
import datetime
|
||||
from functools import partial
|
||||
import re
|
||||
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List, Text # NOQA
|
||||
from .compat import StreamTextType # NOQA
|
||||
|
||||
|
||||
class LazyEval:
|
||||
"""
|
||||
Lightweight wrapper around lazily evaluated func(*args, **kwargs).
|
||||
|
||||
func is only evaluated when any attribute of its return value is accessed.
|
||||
Every attribute access is passed through to the wrapped value.
|
||||
(This only excludes special cases like method-wrappers, e.g., __hash__.)
|
||||
The sole additional attribute is the lazy_self function which holds the
|
||||
return value (or, prior to evaluation, func and arguments), in its closure.
|
||||
"""
|
||||
|
||||
def __init__(self, func, *args, **kwargs):
|
||||
# type: (Any, Any, Any) -> None
|
||||
def lazy_self():
|
||||
# type: () -> Any
|
||||
return_value = func(*args, **kwargs)
|
||||
object.__setattr__(self, 'lazy_self', lambda: return_value)
|
||||
return return_value
|
||||
|
||||
object.__setattr__(self, 'lazy_self', lazy_self)
|
||||
|
||||
def __getattribute__(self, name):
|
||||
# type: (Any) -> Any
|
||||
lazy_self = object.__getattribute__(self, 'lazy_self')
|
||||
if name == 'lazy_self':
|
||||
return lazy_self
|
||||
return getattr(lazy_self(), name)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
# type: (Any, Any) -> None
|
||||
setattr(self.lazy_self(), name, value)
|
||||
|
||||
|
||||
RegExp = partial(LazyEval, re.compile)
|
||||
|
||||
timestamp_regexp = RegExp(
|
||||
"""^(?P<year>[0-9][0-9][0-9][0-9])
|
||||
-(?P<month>[0-9][0-9]?)
|
||||
-(?P<day>[0-9][0-9]?)
|
||||
(?:((?P<t>[Tt])|[ \\t]+) # explictly not retaining extra spaces
|
||||
(?P<hour>[0-9][0-9]?)
|
||||
:(?P<minute>[0-9][0-9])
|
||||
:(?P<second>[0-9][0-9])
|
||||
(?:\\.(?P<fraction>[0-9]*))?
|
||||
(?:[ \\t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
|
||||
(?::(?P<tz_minute>[0-9][0-9]))?))?)?$""",
|
||||
re.X,
|
||||
)
|
||||
|
||||
|
||||
def create_timestamp(
|
||||
year, month, day, t, hour, minute, second, fraction, tz, tz_sign, tz_hour, tz_minute
|
||||
):
|
||||
# type: (Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any) -> Any
|
||||
# create a timestamp from match against timestamp_regexp
|
||||
MAX_FRAC = 999999
|
||||
year = int(year)
|
||||
month = int(month)
|
||||
day = int(day)
|
||||
if not hour:
|
||||
return datetime.date(year, month, day)
|
||||
hour = int(hour)
|
||||
minute = int(minute)
|
||||
second = int(second)
|
||||
frac = 0
|
||||
if fraction:
|
||||
frac_s = fraction[:6]
|
||||
while len(frac_s) < 6:
|
||||
frac_s += '0'
|
||||
frac = int(frac_s)
|
||||
if len(fraction) > 6 and int(fraction[6]) > 4:
|
||||
frac += 1
|
||||
if frac > MAX_FRAC:
|
||||
fraction = 0
|
||||
else:
|
||||
fraction = frac
|
||||
else:
|
||||
fraction = 0
|
||||
delta = None
|
||||
if tz_sign:
|
||||
tz_hour = int(tz_hour)
|
||||
tz_minute = int(tz_minute) if tz_minute else 0
|
||||
delta = datetime.timedelta(
|
||||
hours=tz_hour, minutes=tz_minute, seconds=1 if frac > MAX_FRAC else 0
|
||||
)
|
||||
if tz_sign == '-':
|
||||
delta = -delta
|
||||
elif frac > MAX_FRAC:
|
||||
delta = -datetime.timedelta(seconds=1)
|
||||
# should do something else instead (or hook this up to the preceding if statement
|
||||
# in reverse
|
||||
# if delta is None:
|
||||
# return datetime.datetime(year, month, day, hour, minute, second, fraction)
|
||||
# return datetime.datetime(year, month, day, hour, minute, second, fraction,
|
||||
# datetime.timezone.utc)
|
||||
# the above is not good enough though, should provide tzinfo. In Python3 that is easily
|
||||
# doable drop that kind of support for Python2 as it has not native tzinfo
|
||||
data = datetime.datetime(year, month, day, hour, minute, second, fraction)
|
||||
if delta:
|
||||
data -= delta
|
||||
return data
|
||||
|
||||
|
||||
# originally as comment
|
||||
# https://github.com/pre-commit/pre-commit/pull/211#issuecomment-186466605
|
||||
# if you use this in your code, I suggest adding a test in your test suite
|
||||
# that check this routines output against a known piece of your YAML
|
||||
# before upgrades to this code break your round-tripped YAML
|
||||
def load_yaml_guess_indent(stream, **kw):
|
||||
# type: (StreamTextType, Any) -> Any
|
||||
"""guess the indent and block sequence indent of yaml stream/string
|
||||
|
||||
returns round_trip_loaded stream, indent level, block sequence indent
|
||||
- block sequence indent is the number of spaces before a dash relative to previous indent
|
||||
- if there are no block sequences, indent is taken from nested mappings, block sequence
|
||||
indent is unset (None) in that case
|
||||
"""
|
||||
from .main import YAML
|
||||
|
||||
# load a YAML document, guess the indentation, if you use TABs you are on your own
|
||||
def leading_spaces(line):
|
||||
# type: (Any) -> int
|
||||
idx = 0
|
||||
while idx < len(line) and line[idx] == ' ':
|
||||
idx += 1
|
||||
return idx
|
||||
|
||||
if isinstance(stream, str):
|
||||
yaml_str = stream # type: Any
|
||||
elif isinstance(stream, bytes):
|
||||
# most likely, but the Reader checks BOM for this
|
||||
yaml_str = stream.decode('utf-8')
|
||||
else:
|
||||
yaml_str = stream.read()
|
||||
map_indent = None
|
||||
indent = None # default if not found for some reason
|
||||
block_seq_indent = None
|
||||
prev_line_key_only = None
|
||||
key_indent = 0
|
||||
for line in yaml_str.splitlines():
|
||||
rline = line.rstrip()
|
||||
lline = rline.lstrip()
|
||||
if lline.startswith('- '):
|
||||
l_s = leading_spaces(line)
|
||||
block_seq_indent = l_s - key_indent
|
||||
idx = l_s + 1
|
||||
while line[idx] == ' ': # this will end as we rstripped
|
||||
idx += 1
|
||||
if line[idx] == '#': # comment after -
|
||||
continue
|
||||
indent = idx - key_indent
|
||||
break
|
||||
if map_indent is None and prev_line_key_only is not None and rline:
|
||||
idx = 0
|
||||
while line[idx] in ' -':
|
||||
idx += 1
|
||||
if idx > prev_line_key_only:
|
||||
map_indent = idx - prev_line_key_only
|
||||
if rline.endswith(':'):
|
||||
key_indent = leading_spaces(line)
|
||||
idx = 0
|
||||
while line[idx] == ' ': # this will end on ':'
|
||||
idx += 1
|
||||
prev_line_key_only = idx
|
||||
continue
|
||||
prev_line_key_only = None
|
||||
if indent is None and map_indent is not None:
|
||||
indent = map_indent
|
||||
yaml = YAML()
|
||||
return yaml.load(yaml_str, **kw), indent, block_seq_indent # type: ignore
|
||||
|
||||
|
||||
def configobj_walker(cfg):
|
||||
# type: (Any) -> Any
|
||||
"""
|
||||
walks over a ConfigObj (INI file with comments) generating
|
||||
corresponding YAML output (including comments
|
||||
"""
|
||||
from configobj import ConfigObj # type: ignore
|
||||
|
||||
assert isinstance(cfg, ConfigObj)
|
||||
for c in cfg.initial_comment:
|
||||
if c.strip():
|
||||
yield c
|
||||
for s in _walk_section(cfg):
|
||||
if s.strip():
|
||||
yield s
|
||||
for c in cfg.final_comment:
|
||||
if c.strip():
|
||||
yield c
|
||||
|
||||
|
||||
def _walk_section(s, level=0):
|
||||
# type: (Any, int) -> Any
|
||||
from configobj import Section
|
||||
|
||||
assert isinstance(s, Section)
|
||||
indent = ' ' * level
|
||||
for name in s.scalars:
|
||||
for c in s.comments[name]:
|
||||
yield indent + c.strip()
|
||||
x = s[name]
|
||||
if '\n' in x:
|
||||
i = indent + ' '
|
||||
x = '|\n' + i + x.strip().replace('\n', '\n' + i)
|
||||
elif ':' in x:
|
||||
x = "'" + x.replace("'", "''") + "'"
|
||||
line = '{0}{1}: {2}'.format(indent, name, x)
|
||||
c = s.inline_comments[name]
|
||||
if c:
|
||||
line += ' ' + c
|
||||
yield line
|
||||
for name in s.sections:
|
||||
for c in s.comments[name]:
|
||||
yield indent + c.strip()
|
||||
line = '{0}{1}:'.format(indent, name)
|
||||
c = s.inline_comments[name]
|
||||
if c:
|
||||
line += ' ' + c
|
||||
yield line
|
||||
for val in _walk_section(s[name], level=level + 1):
|
||||
yield val
|
||||
|
||||
|
||||
# def config_obj_2_rt_yaml(cfg):
|
||||
# from .comments import CommentedMap, CommentedSeq
|
||||
# from configobj import ConfigObj
|
||||
# assert isinstance(cfg, ConfigObj)
|
||||
# #for c in cfg.initial_comment:
|
||||
# # if c.strip():
|
||||
# # pass
|
||||
# cm = CommentedMap()
|
||||
# for name in s.sections:
|
||||
# cm[name] = d = CommentedMap()
|
||||
#
|
||||
#
|
||||
# #for c in cfg.final_comment:
|
||||
# # if c.strip():
|
||||
# # yield c
|
||||
# return cm
|
||||
2
lib/spack/external/archspec/__init__.py
vendored
2
lib/spack/external/archspec/__init__.py
vendored
@@ -1,2 +1,2 @@
|
||||
"""Init file to avoid namespace packages"""
|
||||
__version__ = "0.2.1"
|
||||
__version__ = "0.2.0"
|
||||
|
||||
8
lib/spack/external/archspec/__main__.py
vendored
8
lib/spack/external/archspec/__main__.py
vendored
@@ -1,8 +0,0 @@
|
||||
"""
|
||||
Run the `archspec` CLI as a module.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from .cli import main
|
||||
|
||||
sys.exit(main())
|
||||
@@ -79,18 +79,14 @@ def __init__(self, name, parents, vendor, features, compilers, generation=0):
|
||||
self.features = features
|
||||
self.compilers = compilers
|
||||
self.generation = generation
|
||||
# Cache the ancestor computation
|
||||
self._ancestors = None
|
||||
|
||||
@property
|
||||
def ancestors(self):
|
||||
"""All the ancestors of this microarchitecture."""
|
||||
if self._ancestors is None:
|
||||
value = self.parents[:]
|
||||
for parent in self.parents:
|
||||
value.extend(a for a in parent.ancestors if a not in value)
|
||||
self._ancestors = value
|
||||
return self._ancestors
|
||||
value = self.parents[:]
|
||||
for parent in self.parents:
|
||||
value.extend(a for a in parent.ancestors if a not in value)
|
||||
return value
|
||||
|
||||
def _to_set(self):
|
||||
"""Returns a set of the nodes in this microarchitecture DAG."""
|
||||
@@ -272,14 +268,15 @@ def tuplify(ver):
|
||||
return flags
|
||||
|
||||
msg = (
|
||||
"cannot produce optimized binary for micro-architecture '{0}' with {1}@{2}"
|
||||
"cannot produce optimized binary for micro-architecture '{0}'"
|
||||
" with {1}@{2} [supported compiler versions are {3}]"
|
||||
)
|
||||
msg = msg.format(
|
||||
self.name,
|
||||
compiler,
|
||||
version,
|
||||
", ".join([x["versions"] for x in compiler_info]),
|
||||
)
|
||||
if compiler_info:
|
||||
versions = [x["versions"] for x in compiler_info]
|
||||
msg += f' [supported compiler versions are {", ".join(versions)}]'
|
||||
else:
|
||||
msg += " [no supported compiler versions]"
|
||||
msg = msg.format(self.name, compiler, version)
|
||||
raise UnsupportedMicroarchitecture(msg)
|
||||
|
||||
|
||||
|
||||
@@ -102,8 +102,7 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"nvhpc": []
|
||||
]
|
||||
}
|
||||
},
|
||||
"x86_64_v2": {
|
||||
@@ -145,13 +144,6 @@
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune=generic -mpopcnt"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
@@ -165,8 +157,7 @@
|
||||
"name": "x86-64-v2",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"nvhpc": []
|
||||
]
|
||||
}
|
||||
},
|
||||
"x86_64_v3": {
|
||||
@@ -224,13 +215,6 @@
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name} -fma -mf16c"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
@@ -244,13 +228,6 @@
|
||||
"name": "x86-64-v3",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "px",
|
||||
"flags": "-tp {name} -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mxsave"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -314,13 +291,6 @@
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
@@ -334,13 +304,6 @@
|
||||
"name": "x86-64-v4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "px",
|
||||
"flags": "-tp {name} -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -395,8 +358,7 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": []
|
||||
]
|
||||
}
|
||||
},
|
||||
"core2": {
|
||||
@@ -450,8 +412,7 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": []
|
||||
]
|
||||
}
|
||||
},
|
||||
"nehalem": {
|
||||
@@ -516,8 +477,7 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": []
|
||||
]
|
||||
}
|
||||
},
|
||||
"westmere": {
|
||||
@@ -579,8 +539,7 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": []
|
||||
]
|
||||
}
|
||||
},
|
||||
"sandybridge": {
|
||||
@@ -650,12 +609,6 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -728,12 +681,6 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -811,12 +758,6 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -886,13 +827,6 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "haswell",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -965,13 +899,6 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "haswell",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1136,13 +1063,6 @@
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1223,13 +1143,6 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1309,13 +1222,6 @@
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1423,99 +1329,6 @@
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"sapphirerapids": {
|
||||
"from": [
|
||||
"icelake"
|
||||
],
|
||||
"vendor": "GenuineIntel",
|
||||
"features": [
|
||||
"mmx",
|
||||
"sse",
|
||||
"sse2",
|
||||
"ssse3",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"popcnt",
|
||||
"aes",
|
||||
"pclmulqdq",
|
||||
"avx",
|
||||
"rdrand",
|
||||
"f16c",
|
||||
"movbe",
|
||||
"fma",
|
||||
"avx2",
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"rdseed",
|
||||
"adx",
|
||||
"clflushopt",
|
||||
"xsavec",
|
||||
"xsaveopt",
|
||||
"avx512f",
|
||||
"avx512vl",
|
||||
"avx512bw",
|
||||
"avx512dq",
|
||||
"avx512cd",
|
||||
"avx512vbmi",
|
||||
"avx512ifma",
|
||||
"sha_ni",
|
||||
"clwb",
|
||||
"rdpid",
|
||||
"gfni",
|
||||
"avx512_vbmi2",
|
||||
"avx512_vpopcntdq",
|
||||
"avx512_bitalg",
|
||||
"avx512_vnni",
|
||||
"vpclmulqdq",
|
||||
"vaes",
|
||||
"avx512_bf16",
|
||||
"cldemote",
|
||||
"movdir64b",
|
||||
"movdiri",
|
||||
"pdcm",
|
||||
"serialize",
|
||||
"waitpkg"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "12.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "2021.2:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1574,8 +1387,7 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
],
|
||||
"nvhpc": []
|
||||
]
|
||||
}
|
||||
},
|
||||
"bulldozer": {
|
||||
@@ -1639,12 +1451,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1713,12 +1519,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1788,13 +1588,6 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "piledriver",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1870,13 +1663,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "piledriver",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1955,12 +1741,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -2040,12 +1820,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": "20.5:",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -2128,12 +1902,6 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": "21.11:",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -2172,6 +1940,8 @@
|
||||
"pku",
|
||||
"gfni",
|
||||
"flush_l1d",
|
||||
"erms",
|
||||
"avic",
|
||||
"avx512f",
|
||||
"avx512dq",
|
||||
"avx512ifma",
|
||||
@@ -2188,28 +1958,18 @@
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "10.3:12.2",
|
||||
"versions": "10.3:",
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg"
|
||||
},
|
||||
{
|
||||
"versions": "12.3:",
|
||||
"name": "znver4",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "12.0:15.9",
|
||||
"versions": "12.0:",
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name} -mavx512f -mavx512dq -mavx512ifma -mavx512cd -mavx512bw -mavx512vl -mavx512vbmi -mavx512vbmi2 -mavx512vnni -mavx512bitalg"
|
||||
},
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "znver4",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
],
|
||||
"aocc": [
|
||||
{
|
||||
"versions": "3.0:3.9",
|
||||
@@ -2222,15 +1982,7 @@
|
||||
"name": "znver4",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": "21.11:",
|
||||
"name": "zen3",
|
||||
"flags": "-tp {name}",
|
||||
"warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"ppc64": {
|
||||
@@ -2335,8 +2087,7 @@
|
||||
"versions": ":",
|
||||
"flags": "-mcpu={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": []
|
||||
]
|
||||
}
|
||||
},
|
||||
"power8le": {
|
||||
@@ -2365,13 +2116,6 @@
|
||||
"name": "power8",
|
||||
"flags": "-mcpu={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pwr8",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -2395,13 +2139,6 @@
|
||||
"name": "power9",
|
||||
"flags": "-mcpu={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pwr9",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -2433,8 +2170,7 @@
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"nvhpc": []
|
||||
]
|
||||
}
|
||||
},
|
||||
"armv8.1a": {
|
||||
@@ -2816,13 +2552,6 @@
|
||||
"versions": "20:",
|
||||
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
{
|
||||
"versions": "22.5:",
|
||||
"name": "neoverse-n1",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -2888,31 +2617,15 @@
|
||||
"flags" : "-march=armv8.2-a+crypto+fp16 -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "8.0:8.4",
|
||||
"versions": "8.0:8.9",
|
||||
"flags" : "-march=armv8.2-a+fp16+dotprod+crypto -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "8.5:8.9",
|
||||
"versions": "9.0:9.9",
|
||||
"flags" : "-mcpu=neoverse-v1"
|
||||
},
|
||||
{
|
||||
"versions": "9.0:9.3",
|
||||
"flags" : "-march=armv8.2-a+fp16+dotprod+crypto -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "9.4:9.9",
|
||||
"flags" : "-mcpu=neoverse-v1"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:10.1",
|
||||
"flags" : "-march=armv8.2-a+fp16+dotprod+crypto -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "10.2:10.2.99",
|
||||
"flags" : "-mcpu=zeus"
|
||||
},
|
||||
{
|
||||
"versions": "10.3:",
|
||||
{
|
||||
"versions": "10.0:",
|
||||
"flags" : "-mcpu=neoverse-v1"
|
||||
}
|
||||
|
||||
@@ -2944,13 +2657,6 @@
|
||||
"versions": "22:",
|
||||
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
{
|
||||
"versions": "22.5:",
|
||||
"name": "neoverse-n1",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
7
lib/spack/external/ctest_log_parser.py
vendored
7
lib/spack/external/ctest_log_parser.py
vendored
@@ -65,6 +65,9 @@
|
||||
up to date with CTest, just make sure the ``*_matches`` and
|
||||
``*_exceptions`` lists are kept up to date with CTest's build handler.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from __future__ import division
|
||||
|
||||
import re
|
||||
import math
|
||||
import multiprocessing
|
||||
@@ -208,7 +211,7 @@
|
||||
]
|
||||
|
||||
|
||||
class LogEvent:
|
||||
class LogEvent(object):
|
||||
"""Class representing interesting events (e.g., errors) in a build log."""
|
||||
def __init__(self, text, line_no,
|
||||
source_file=None, source_line_no=None,
|
||||
@@ -345,7 +348,7 @@ def _parse_unpack(args):
|
||||
return _parse(*args)
|
||||
|
||||
|
||||
class CTestLogParser:
|
||||
class CTestLogParser(object):
|
||||
"""Log file parser that extracts errors and warnings."""
|
||||
def __init__(self, profile=False):
|
||||
# whether to record timing information
|
||||
|
||||
2
lib/spack/external/ruamel/yaml/.ruamel/__init__.py
vendored
Normal file
2
lib/spack/external/ruamel/yaml/.ruamel/__init__.py
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import pkg_resources
|
||||
pkg_resources.declare_namespace(__name__)
|
||||
@@ -1,21 +1,21 @@
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014-2022 Anthon van der Neut, Ruamel bvba
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
|
||||
38
lib/spack/external/ruamel/yaml/README.rst
vendored
Normal file
38
lib/spack/external/ruamel/yaml/README.rst
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
|
||||
ruamel.yaml
|
||||
===========
|
||||
|
||||
``ruamel.yaml`` is a YAML 1.2 loader/dumper package for Python.
|
||||
|
||||
* `Overview <http://yaml.readthedocs.org/en/latest/overview.html>`_
|
||||
* `Installing <http://yaml.readthedocs.org/en/latest/install.html>`_
|
||||
* `Details <http://yaml.readthedocs.org/en/latest/detail.html>`_
|
||||
* `Examples <http://yaml.readthedocs.org/en/latest/example.html>`_
|
||||
* `Differences with PyYAML <http://yaml.readthedocs.org/en/latest/pyyaml.html>`_
|
||||
|
||||
.. image:: https://readthedocs.org/projects/yaml/badge/?version=stable
|
||||
:target: https://yaml.readthedocs.org/en/stable
|
||||
|
||||
ChangeLog
|
||||
=========
|
||||
|
||||
::
|
||||
|
||||
0.11.15 (2016-XX-XX):
|
||||
- Change to prevent FutureWarning in NumPy, as reported by tgehring
|
||||
("comparison to None will result in an elementwise object comparison in the future")
|
||||
|
||||
0.11.14 (2016-07-06):
|
||||
- fix preserve_quotes missing on original Loaders (as reported
|
||||
by Leynos, bitbucket issue 38)
|
||||
|
||||
0.11.13 (2016-07-06):
|
||||
- documentation only, automated linux wheels
|
||||
|
||||
0.11.12 (2016-07-06):
|
||||
- added support for roundtrip of single/double quoted scalars using:
|
||||
ruamel.yaml.round_trip_load(stream, preserve_quotes=True)
|
||||
|
||||
0.11.0 (2016-02-18):
|
||||
- RoundTripLoader loads 1.2 by default (no sexagesimals, 012 octals nor
|
||||
yes/no/on/off booleans
|
||||
85
lib/spack/external/ruamel/yaml/__init__.py
vendored
Normal file
85
lib/spack/external/ruamel/yaml/__init__.py
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import absolute_import
|
||||
|
||||
# install_requires of ruamel.base is not really required but the old
|
||||
# ruamel.base installed __init__.py, and thus a new version should
|
||||
# be installed at some point
|
||||
|
||||
_package_data = dict(
|
||||
full_package_name="ruamel.yaml",
|
||||
version_info=(0, 11, 15),
|
||||
author="Anthon van der Neut",
|
||||
author_email="a.van.der.neut@ruamel.eu",
|
||||
description="ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order", # NOQA
|
||||
entry_points=None,
|
||||
install_requires=dict(
|
||||
any=[],
|
||||
py26=["ruamel.ordereddict"],
|
||||
py27=["ruamel.ordereddict"]
|
||||
),
|
||||
ext_modules=[dict(
|
||||
name="_ruamel_yaml",
|
||||
src=["ext/_ruamel_yaml.c", "ext/api.c", "ext/writer.c", "ext/dumper.c",
|
||||
"ext/loader.c", "ext/reader.c", "ext/scanner.c", "ext/parser.c",
|
||||
"ext/emitter.c"],
|
||||
lib=[],
|
||||
# test='#include "ext/yaml.h"\n\nint main(int argc, char* argv[])\n{\nyaml_parser_t parser;\nparser = parser; /* prevent warning */\nreturn 0;\n}\n' # NOQA
|
||||
)
|
||||
],
|
||||
classifiers=[
|
||||
"Programming Language :: Python :: 2.6",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3.3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"Programming Language :: Python :: Implementation :: Jython",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: Text Processing :: Markup"
|
||||
],
|
||||
windows_wheels=True,
|
||||
read_the_docs='yaml',
|
||||
many_linux='libyaml-devel',
|
||||
)
|
||||
|
||||
|
||||
# < from ruamel.util.new import _convert_version
|
||||
def _convert_version(tup):
|
||||
"""create a PEP 386 pseudo-format conformant string from tuple tup"""
|
||||
ret_val = str(tup[0]) # first is always digit
|
||||
next_sep = "." # separator for next extension, can be "" or "."
|
||||
for x in tup[1:]:
|
||||
if isinstance(x, int):
|
||||
ret_val += next_sep + str(x)
|
||||
next_sep = '.'
|
||||
continue
|
||||
first_letter = x[0].lower()
|
||||
next_sep = ''
|
||||
if first_letter in 'abcr':
|
||||
ret_val += 'rc' if first_letter == 'r' else first_letter
|
||||
elif first_letter in 'pd':
|
||||
ret_val += '.post' if first_letter == 'p' else '.dev'
|
||||
return ret_val
|
||||
|
||||
|
||||
# <
|
||||
version_info = _package_data['version_info']
|
||||
__version__ = _convert_version(version_info)
|
||||
|
||||
del _convert_version
|
||||
|
||||
try:
|
||||
from .cyaml import * # NOQA
|
||||
__with_libyaml__ = True
|
||||
except (ImportError, ValueError): # for Jython
|
||||
__with_libyaml__ = False
|
||||
|
||||
|
||||
# body extracted to main.py
|
||||
try:
|
||||
from .main import * # NOQA
|
||||
except ImportError:
|
||||
from ruamel.yaml.main import * # NOQA
|
||||
486
lib/spack/external/ruamel/yaml/comments.py
vendored
Normal file
486
lib/spack/external/ruamel/yaml/comments.py
vendored
Normal file
@@ -0,0 +1,486 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
"""
|
||||
stuff to deal with comments and formatting on dict/list/ordereddict/set
|
||||
these are not really related, formatting could be factored out as
|
||||
a separate base
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 3):
|
||||
from collections.abc import MutableSet
|
||||
else:
|
||||
from collections import MutableSet
|
||||
|
||||
__all__ = ["CommentedSeq", "CommentedMap", "CommentedOrderedMap",
|
||||
"CommentedSet", 'comment_attrib', 'merge_attrib']
|
||||
|
||||
|
||||
try:
|
||||
from .compat import ordereddict
|
||||
except ImportError:
|
||||
from ruamel.yaml.compat import ordereddict
|
||||
|
||||
comment_attrib = '_yaml_comment'
|
||||
format_attrib = '_yaml_format'
|
||||
line_col_attrib = '_yaml_line_col'
|
||||
anchor_attrib = '_yaml_anchor'
|
||||
merge_attrib = '_yaml_merge'
|
||||
tag_attrib = '_yaml_tag'
|
||||
|
||||
|
||||
class Comment(object):
|
||||
# sys.getsize tested the Comment objects, __slots__ make them bigger
|
||||
# and adding self.end did not matter
|
||||
attrib = comment_attrib
|
||||
|
||||
def __init__(self):
|
||||
self.comment = None # [post, [pre]]
|
||||
# map key (mapping/omap/dict) or index (sequence/list) to a list of
|
||||
# dict: post_key, pre_key, post_value, pre_value
|
||||
# list: pre item, post item
|
||||
self._items = {}
|
||||
# self._start = [] # should not put these on first item
|
||||
self._end = [] # end of document comments
|
||||
|
||||
def __str__(self):
|
||||
if self._end:
|
||||
end = ',\n end=' + str(self._end)
|
||||
else:
|
||||
end = ''
|
||||
return "Comment(comment={0},\n items={1}{2})".format(
|
||||
self.comment, self._items, end)
|
||||
|
||||
@property
|
||||
def items(self):
|
||||
return self._items
|
||||
|
||||
@property
|
||||
def end(self):
|
||||
return self._end
|
||||
|
||||
@end.setter
|
||||
def end(self, value):
|
||||
self._end = value
|
||||
|
||||
@property
|
||||
def start(self):
|
||||
return self._start
|
||||
|
||||
@start.setter
|
||||
def start(self, value):
|
||||
self._start = value
|
||||
|
||||
|
||||
# to distinguish key from None
|
||||
def NoComment():
|
||||
pass
|
||||
|
||||
|
||||
class Format(object):
|
||||
attrib = format_attrib
|
||||
|
||||
def __init__(self):
|
||||
self._flow_style = None
|
||||
|
||||
def set_flow_style(self):
|
||||
self._flow_style = True
|
||||
|
||||
def set_block_style(self):
|
||||
self._flow_style = False
|
||||
|
||||
def flow_style(self, default=None):
|
||||
"""if default (the flow_style) is None, the flow style tacked on to
|
||||
the object explicitly will be taken. If that is None as well the
|
||||
default flow style rules the format down the line, or the type
|
||||
of the constituent values (simple -> flow, map/list -> block)"""
|
||||
if self._flow_style is None:
|
||||
return default
|
||||
return self._flow_style
|
||||
|
||||
|
||||
class LineCol(object):
|
||||
attrib = line_col_attrib
|
||||
|
||||
def __init__(self):
|
||||
self.line = None
|
||||
self.col = None
|
||||
self.data = None
|
||||
|
||||
def add_kv_line_col(self, key, data):
|
||||
if self.data is None:
|
||||
self.data = {}
|
||||
self.data[key] = data
|
||||
|
||||
def key(self, k):
|
||||
return self._kv(k, 0, 1)
|
||||
|
||||
def value(self, k):
|
||||
return self._kv(k, 2, 3)
|
||||
|
||||
def _kv(self, k, x0, x1):
|
||||
if self.data is None:
|
||||
return None
|
||||
data = self.data[k]
|
||||
return data[x0], data[x1]
|
||||
|
||||
def item(self, idx):
|
||||
if self.data is None:
|
||||
return None
|
||||
return self.data[idx][0], self.data[idx][1]
|
||||
|
||||
def add_idx_line_col(self, key, data):
|
||||
if self.data is None:
|
||||
self.data = {}
|
||||
self.data[key] = data
|
||||
|
||||
|
||||
class Anchor(object):
|
||||
attrib = anchor_attrib
|
||||
|
||||
def __init__(self):
|
||||
self.value = None
|
||||
self.always_dump = False
|
||||
|
||||
|
||||
class Tag(object):
|
||||
"""store tag information for roundtripping"""
|
||||
attrib = tag_attrib
|
||||
|
||||
def __init__(self):
|
||||
self.value = None
|
||||
|
||||
|
||||
class CommentedBase(object):
|
||||
@property
|
||||
def ca(self):
|
||||
if not hasattr(self, Comment.attrib):
|
||||
setattr(self, Comment.attrib, Comment())
|
||||
return getattr(self, Comment.attrib)
|
||||
|
||||
def yaml_end_comment_extend(self, comment, clear=False):
|
||||
if clear:
|
||||
self.ca.end = []
|
||||
self.ca.end.extend(comment)
|
||||
|
||||
def yaml_key_comment_extend(self, key, comment, clear=False):
|
||||
l = self.ca._items.setdefault(key, [None, None, None, None])
|
||||
if clear or l[1] is None:
|
||||
if comment[1] is not None:
|
||||
assert isinstance(comment[1], list)
|
||||
l[1] = comment[1]
|
||||
else:
|
||||
l[1].extend(comment[0])
|
||||
l[0] = comment[0]
|
||||
|
||||
def yaml_value_comment_extend(self, key, comment, clear=False):
|
||||
l = self.ca._items.setdefault(key, [None, None, None, None])
|
||||
if clear or l[3] is None:
|
||||
if comment[1] is not None:
|
||||
assert isinstance(comment[1], list)
|
||||
l[3] = comment[1]
|
||||
else:
|
||||
l[3].extend(comment[0])
|
||||
l[2] = comment[0]
|
||||
|
||||
def yaml_set_start_comment(self, comment, indent=0):
|
||||
"""overwrites any preceding comment lines on an object
|
||||
expects comment to be without `#` and possible have mutlple lines
|
||||
"""
|
||||
from .error import Mark
|
||||
from .tokens import CommentToken
|
||||
pre_comments = self._yaml_get_pre_comment()
|
||||
if comment[-1] == '\n':
|
||||
comment = comment[:-1] # strip final newline if there
|
||||
start_mark = Mark(None, None, None, indent, None, None)
|
||||
for com in comment.split('\n'):
|
||||
pre_comments.append(CommentToken('# ' + com + '\n', start_mark, None))
|
||||
|
||||
@property
|
||||
def fa(self):
|
||||
"""format attribute
|
||||
|
||||
set_flow_style()/set_block_style()"""
|
||||
if not hasattr(self, Format.attrib):
|
||||
setattr(self, Format.attrib, Format())
|
||||
return getattr(self, Format.attrib)
|
||||
|
||||
def yaml_add_eol_comment(self, comment, key=NoComment, column=None):
|
||||
"""
|
||||
there is a problem as eol comments should start with ' #'
|
||||
(but at the beginning of the line the space doesn't have to be before
|
||||
the #. The column index is for the # mark
|
||||
"""
|
||||
from .tokens import CommentToken
|
||||
from .error import Mark
|
||||
if column is None:
|
||||
column = self._yaml_get_column(key)
|
||||
if comment[0] != '#':
|
||||
comment = '# ' + comment
|
||||
if column is None:
|
||||
if comment[0] == '#':
|
||||
comment = ' ' + comment
|
||||
column = 0
|
||||
start_mark = Mark(None, None, None, column, None, None)
|
||||
ct = [CommentToken(comment, start_mark, None), None]
|
||||
self._yaml_add_eol_comment(ct, key=key)
|
||||
|
||||
@property
|
||||
def lc(self):
|
||||
if not hasattr(self, LineCol.attrib):
|
||||
setattr(self, LineCol.attrib, LineCol())
|
||||
return getattr(self, LineCol.attrib)
|
||||
|
||||
def _yaml_set_line_col(self, line, col):
|
||||
self.lc.line = line
|
||||
self.lc.col = col
|
||||
|
||||
def _yaml_set_kv_line_col(self, key, data):
|
||||
self.lc.add_kv_line_col(key, data)
|
||||
|
||||
def _yaml_set_idx_line_col(self, key, data):
|
||||
self.lc.add_idx_line_col(key, data)
|
||||
|
||||
@property
|
||||
def anchor(self):
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
setattr(self, Anchor.attrib, Anchor())
|
||||
return getattr(self, Anchor.attrib)
|
||||
|
||||
def yaml_anchor(self):
|
||||
if not hasattr(self, Anchor.attrib):
|
||||
return None
|
||||
return self.anchor
|
||||
|
||||
def yaml_set_anchor(self, value, always_dump=False):
|
||||
self.anchor.value = value
|
||||
self.anchor.always_dump = always_dump
|
||||
|
||||
@property
|
||||
def tag(self):
|
||||
if not hasattr(self, Tag.attrib):
|
||||
setattr(self, Tag.attrib, Tag())
|
||||
return getattr(self, Tag.attrib)
|
||||
|
||||
def yaml_set_tag(self, value):
|
||||
self.tag.value = value
|
||||
|
||||
|
||||
class CommentedSeq(list, CommentedBase):
|
||||
__slots__ = [Comment.attrib, ]
|
||||
|
||||
def _yaml_add_comment(self, comment, key=NoComment):
|
||||
if key is not NoComment:
|
||||
self.yaml_key_comment_extend(key, comment)
|
||||
else:
|
||||
self.ca.comment = comment
|
||||
|
||||
def _yaml_add_eol_comment(self, comment, key):
|
||||
self._yaml_add_comment(comment, key=key)
|
||||
|
||||
def _yaml_get_columnX(self, key):
|
||||
return self.ca.items[key][0].start_mark.column
|
||||
|
||||
def insert(self, idx, val):
|
||||
"""the comments after the insertion have to move forward"""
|
||||
list.insert(self, idx, val)
|
||||
for list_index in sorted(self.ca.items, reverse=True):
|
||||
if list_index < idx:
|
||||
break
|
||||
self.ca.items[list_index+1] = self.ca.items.pop(list_index)
|
||||
|
||||
def pop(self, idx):
|
||||
res = list.pop(self, idx)
|
||||
self.ca.items.pop(idx, None) # might not be there -> default value
|
||||
for list_index in sorted(self.ca.items):
|
||||
if list_index < idx:
|
||||
continue
|
||||
self.ca.items[list_index-1] = self.ca.items.pop(list_index)
|
||||
return res
|
||||
|
||||
def _yaml_get_column(self, key):
|
||||
column = None
|
||||
sel_idx = None
|
||||
pre, post = key-1, key+1
|
||||
if pre in self.ca.items:
|
||||
sel_idx = pre
|
||||
elif post in self.ca.items:
|
||||
sel_idx = post
|
||||
else:
|
||||
# self.ca.items is not ordered
|
||||
for row_idx, k1 in enumerate(self):
|
||||
if row_idx >= key:
|
||||
break
|
||||
if row_idx not in self.ca.items:
|
||||
continue
|
||||
sel_idx = row_idx
|
||||
if sel_idx is not None:
|
||||
column = self._yaml_get_columnX(sel_idx)
|
||||
return column
|
||||
|
||||
def _yaml_get_pre_comment(self):
|
||||
if self.ca.comment is None:
|
||||
pre_comments = []
|
||||
self.ca.comment = [None, pre_comments]
|
||||
else:
|
||||
pre_comments = self.ca.comment[1] = []
|
||||
return pre_comments
|
||||
|
||||
|
||||
class CommentedMap(ordereddict, CommentedBase):
|
||||
__slots__ = [Comment.attrib, ]
|
||||
|
||||
def _yaml_add_comment(self, comment, key=NoComment, value=NoComment):
|
||||
"""values is set to key to indicate a value attachment of comment"""
|
||||
if key is not NoComment:
|
||||
self.yaml_key_comment_extend(key, comment)
|
||||
return
|
||||
if value is not NoComment:
|
||||
self.yaml_value_comment_extend(value, comment)
|
||||
else:
|
||||
self.ca.comment = comment
|
||||
|
||||
def _yaml_add_eol_comment(self, comment, key):
|
||||
"""add on the value line, with value specified by the key"""
|
||||
self._yaml_add_comment(comment, value=key)
|
||||
|
||||
def _yaml_get_columnX(self, key):
|
||||
return self.ca.items[key][2].start_mark.column
|
||||
|
||||
def _yaml_get_column(self, key):
|
||||
column = None
|
||||
sel_idx = None
|
||||
pre, post, last = None, None, None
|
||||
for x in self:
|
||||
if pre is not None and x != key:
|
||||
post = x
|
||||
break
|
||||
if x == key:
|
||||
pre = last
|
||||
last = x
|
||||
if pre in self.ca.items:
|
||||
sel_idx = pre
|
||||
elif post in self.ca.items:
|
||||
sel_idx = post
|
||||
else:
|
||||
# self.ca.items is not ordered
|
||||
for row_idx, k1 in enumerate(self):
|
||||
if k1 >= key:
|
||||
break
|
||||
if k1 not in self.ca.items:
|
||||
continue
|
||||
sel_idx = k1
|
||||
if sel_idx is not None:
|
||||
column = self._yaml_get_columnX(sel_idx)
|
||||
return column
|
||||
|
||||
def _yaml_get_pre_comment(self):
|
||||
if self.ca.comment is None:
|
||||
pre_comments = []
|
||||
self.ca.comment = [None, pre_comments]
|
||||
else:
|
||||
pre_comments = self.ca.comment[1] = []
|
||||
return pre_comments
|
||||
|
||||
def update(self, *vals, **kwds):
|
||||
try:
|
||||
ordereddict.update(self, *vals, **kwds)
|
||||
except TypeError:
|
||||
# probably a dict that is used
|
||||
for x in vals:
|
||||
self[x] = vals[x]
|
||||
|
||||
def insert(self, pos, key, value, comment=None):
|
||||
"""insert key value into given position
|
||||
attach comment if provided
|
||||
"""
|
||||
ordereddict.insert(self, pos, key, value)
|
||||
if comment is not None:
|
||||
self.yaml_add_eol_comment(comment, key=key)
|
||||
|
||||
def mlget(self, key, default=None, list_ok=False):
|
||||
"""multi-level get that expects dicts within dicts"""
|
||||
if not isinstance(key, list):
|
||||
return self.get(key, default)
|
||||
# assume that the key is a list of recursively accessible dicts
|
||||
|
||||
def get_one_level(key_list, level, d):
|
||||
if not list_ok:
|
||||
assert isinstance(d, dict)
|
||||
if level >= len(key_list):
|
||||
if level > len(key_list):
|
||||
raise IndexError
|
||||
return d[key_list[level-1]]
|
||||
return get_one_level(key_list, level+1, d[key_list[level-1]])
|
||||
|
||||
try:
|
||||
return get_one_level(key, 1, self)
|
||||
except KeyError:
|
||||
return default
|
||||
except (TypeError, IndexError):
|
||||
if not list_ok:
|
||||
raise
|
||||
return default
|
||||
|
||||
def __getitem__(self, key):
|
||||
try:
|
||||
return ordereddict.__getitem__(self, key)
|
||||
except KeyError:
|
||||
for merged in getattr(self, merge_attrib, []):
|
||||
if key in merged[1]:
|
||||
return merged[1][key]
|
||||
raise
|
||||
|
||||
def get(self, key, default=None):
|
||||
try:
|
||||
return self.__getitem__(key)
|
||||
except:
|
||||
return default
|
||||
|
||||
@property
|
||||
def merge(self):
|
||||
if not hasattr(self, merge_attrib):
|
||||
setattr(self, merge_attrib, [])
|
||||
return getattr(self, merge_attrib)
|
||||
|
||||
def add_yaml_merge(self, value):
|
||||
self.merge.extend(value)
|
||||
|
||||
|
||||
class CommentedOrderedMap(CommentedMap):
|
||||
__slots__ = [Comment.attrib, ]
|
||||
|
||||
|
||||
class CommentedSet(MutableSet, CommentedMap):
|
||||
__slots__ = [Comment.attrib, 'odict']
|
||||
|
||||
def __init__(self, values=None):
|
||||
self.odict = ordereddict()
|
||||
MutableSet.__init__(self)
|
||||
if values is not None:
|
||||
self |= values
|
||||
|
||||
def add(self, value):
|
||||
"""Add an element."""
|
||||
self.odict[value] = None
|
||||
|
||||
def discard(self, value):
|
||||
"""Remove an element. Do not raise an exception if absent."""
|
||||
del self.odict[value]
|
||||
|
||||
def __contains__(self, x):
|
||||
return x in self.odict
|
||||
|
||||
def __iter__(self):
|
||||
for x in self.odict:
|
||||
yield x
|
||||
|
||||
def __len__(self):
|
||||
return len(self.odict)
|
||||
|
||||
def __repr__(self):
|
||||
return 'set({0!r})'.format(self.odict.keys())
|
||||
123
lib/spack/external/ruamel/yaml/compat.py
vendored
Normal file
123
lib/spack/external/ruamel/yaml/compat.py
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
# partially from package six by Benjamin Peterson
|
||||
|
||||
import sys
|
||||
import os
|
||||
import types
|
||||
|
||||
try:
|
||||
from ruamel.ordereddict import ordereddict
|
||||
except:
|
||||
try:
|
||||
from collections.abc import OrderedDict
|
||||
except ImportError:
|
||||
try:
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from ordereddict import OrderedDict
|
||||
# to get the right name import ... as ordereddict doesn't do that
|
||||
|
||||
class ordereddict(OrderedDict):
|
||||
if not hasattr(OrderedDict, 'insert'):
|
||||
def insert(self, pos, key, value):
|
||||
if pos >= len(self):
|
||||
self[key] = value
|
||||
return
|
||||
od = ordereddict()
|
||||
od.update(self)
|
||||
for k in od:
|
||||
del self[k]
|
||||
for index, old_key in enumerate(od):
|
||||
if pos == index:
|
||||
self[key] = value
|
||||
self[old_key] = od[old_key]
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
def utf8(s):
|
||||
return s
|
||||
|
||||
def to_str(s):
|
||||
return s
|
||||
|
||||
def to_unicode(s):
|
||||
return s
|
||||
|
||||
else:
|
||||
def utf8(s):
|
||||
return s.encode('utf-8')
|
||||
|
||||
def to_str(s):
|
||||
return str(s)
|
||||
|
||||
def to_unicode(s):
|
||||
return unicode(s)
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
integer_types = int,
|
||||
class_types = type,
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
|
||||
MAXSIZE = sys.maxsize
|
||||
unichr = chr
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
|
||||
else:
|
||||
string_types = basestring,
|
||||
integer_types = (int, long)
|
||||
class_types = (type, types.ClassType)
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
unichr = unichr # to allow importing
|
||||
import StringIO
|
||||
StringIO = StringIO.StringIO
|
||||
import cStringIO
|
||||
BytesIO = cStringIO.StringIO
|
||||
|
||||
if PY3:
|
||||
builtins_module = 'builtins'
|
||||
else:
|
||||
builtins_module = '__builtin__'
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
return meta("NewBase", bases, {})
|
||||
|
||||
DBG_TOKEN = 1
|
||||
DBG_EVENT = 2
|
||||
DBG_NODE = 4
|
||||
|
||||
|
||||
_debug = None
|
||||
|
||||
|
||||
# used from yaml util when testing
|
||||
def dbg(val=None):
|
||||
global _debug
|
||||
if _debug is None:
|
||||
# set to true or false
|
||||
_debug = os.environ.get('YAMLDEBUG')
|
||||
if _debug is None:
|
||||
_debug = 0
|
||||
else:
|
||||
_debug = int(_debug)
|
||||
if val is None:
|
||||
return _debug
|
||||
return _debug & val
|
||||
|
||||
|
||||
def nprint(*args, **kw):
|
||||
if dbg:
|
||||
print(*args, **kw)
|
||||
182
lib/spack/external/ruamel/yaml/composer.py
vendored
Normal file
182
lib/spack/external/ruamel/yaml/composer.py
vendored
Normal file
@@ -0,0 +1,182 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
from __future__ import print_function
|
||||
|
||||
|
||||
try:
|
||||
from .error import MarkedYAMLError
|
||||
from .compat import utf8
|
||||
except (ImportError, ValueError): # for Jython
|
||||
from ruamel.yaml.error import MarkedYAMLError
|
||||
from ruamel.yaml.compat import utf8
|
||||
|
||||
from ruamel.yaml.events import (
|
||||
StreamStartEvent, StreamEndEvent, MappingStartEvent, MappingEndEvent,
|
||||
SequenceStartEvent, SequenceEndEvent, AliasEvent, ScalarEvent,
|
||||
)
|
||||
from ruamel.yaml.nodes import (
|
||||
MappingNode, ScalarNode, SequenceNode,
|
||||
)
|
||||
|
||||
__all__ = ['Composer', 'ComposerError']
|
||||
|
||||
|
||||
class ComposerError(MarkedYAMLError):
|
||||
pass
|
||||
|
||||
|
||||
class Composer(object):
|
||||
def __init__(self):
|
||||
self.anchors = {}
|
||||
|
||||
def check_node(self):
|
||||
# Drop the STREAM-START event.
|
||||
if self.check_event(StreamStartEvent):
|
||||
self.get_event()
|
||||
|
||||
# If there are more documents available?
|
||||
return not self.check_event(StreamEndEvent)
|
||||
|
||||
def get_node(self):
|
||||
# Get the root node of the next document.
|
||||
if not self.check_event(StreamEndEvent):
|
||||
return self.compose_document()
|
||||
|
||||
def get_single_node(self):
|
||||
# Drop the STREAM-START event.
|
||||
self.get_event()
|
||||
|
||||
# Compose a document if the stream is not empty.
|
||||
document = None
|
||||
if not self.check_event(StreamEndEvent):
|
||||
document = self.compose_document()
|
||||
|
||||
# Ensure that the stream contains no more documents.
|
||||
if not self.check_event(StreamEndEvent):
|
||||
event = self.get_event()
|
||||
raise ComposerError(
|
||||
"expected a single document in the stream",
|
||||
document.start_mark, "but found another document",
|
||||
event.start_mark)
|
||||
|
||||
# Drop the STREAM-END event.
|
||||
self.get_event()
|
||||
|
||||
return document
|
||||
|
||||
def compose_document(self):
|
||||
# Drop the DOCUMENT-START event.
|
||||
self.get_event()
|
||||
|
||||
# Compose the root node.
|
||||
node = self.compose_node(None, None)
|
||||
|
||||
# Drop the DOCUMENT-END event.
|
||||
self.get_event()
|
||||
|
||||
self.anchors = {}
|
||||
return node
|
||||
|
||||
def compose_node(self, parent, index):
|
||||
if self.check_event(AliasEvent):
|
||||
event = self.get_event()
|
||||
alias = event.anchor
|
||||
if alias not in self.anchors:
|
||||
raise ComposerError(
|
||||
None, None, "found undefined alias %r"
|
||||
% utf8(alias), event.start_mark)
|
||||
return self.anchors[alias]
|
||||
event = self.peek_event()
|
||||
anchor = event.anchor
|
||||
if anchor is not None: # have an anchor
|
||||
if anchor in self.anchors:
|
||||
raise ComposerError(
|
||||
"found duplicate anchor %r; first occurence"
|
||||
% utf8(anchor), self.anchors[anchor].start_mark,
|
||||
"second occurence", event.start_mark)
|
||||
self.descend_resolver(parent, index)
|
||||
if self.check_event(ScalarEvent):
|
||||
node = self.compose_scalar_node(anchor)
|
||||
elif self.check_event(SequenceStartEvent):
|
||||
node = self.compose_sequence_node(anchor)
|
||||
elif self.check_event(MappingStartEvent):
|
||||
node = self.compose_mapping_node(anchor)
|
||||
self.ascend_resolver()
|
||||
return node
|
||||
|
||||
def compose_scalar_node(self, anchor):
|
||||
event = self.get_event()
|
||||
tag = event.tag
|
||||
if tag is None or tag == u'!':
|
||||
tag = self.resolve(ScalarNode, event.value, event.implicit)
|
||||
node = ScalarNode(tag, event.value,
|
||||
event.start_mark, event.end_mark, style=event.style,
|
||||
comment=event.comment)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
return node
|
||||
|
||||
def compose_sequence_node(self, anchor):
|
||||
start_event = self.get_event()
|
||||
tag = start_event.tag
|
||||
if tag is None or tag == u'!':
|
||||
tag = self.resolve(SequenceNode, None, start_event.implicit)
|
||||
node = SequenceNode(tag, [],
|
||||
start_event.start_mark, None,
|
||||
flow_style=start_event.flow_style,
|
||||
comment=start_event.comment, anchor=anchor)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
index = 0
|
||||
while not self.check_event(SequenceEndEvent):
|
||||
node.value.append(self.compose_node(node, index))
|
||||
index += 1
|
||||
end_event = self.get_event()
|
||||
if node.flow_style is True and end_event.comment is not None:
|
||||
if node.comment is not None:
|
||||
print('Warning: unexpected end_event commment in sequence '
|
||||
'node {0}'.format(node.flow_style))
|
||||
node.comment = end_event.comment
|
||||
node.end_mark = end_event.end_mark
|
||||
self.check_end_doc_comment(end_event, node)
|
||||
return node
|
||||
|
||||
def compose_mapping_node(self, anchor):
|
||||
start_event = self.get_event()
|
||||
tag = start_event.tag
|
||||
if tag is None or tag == u'!':
|
||||
tag = self.resolve(MappingNode, None, start_event.implicit)
|
||||
node = MappingNode(tag, [],
|
||||
start_event.start_mark, None,
|
||||
flow_style=start_event.flow_style,
|
||||
comment=start_event.comment, anchor=anchor)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
while not self.check_event(MappingEndEvent):
|
||||
# key_event = self.peek_event()
|
||||
item_key = self.compose_node(node, None)
|
||||
# if item_key in node.value:
|
||||
# raise ComposerError("while composing a mapping",
|
||||
# start_event.start_mark,
|
||||
# "found duplicate key", key_event.start_mark)
|
||||
item_value = self.compose_node(node, item_key)
|
||||
# node.value[item_key] = item_value
|
||||
node.value.append((item_key, item_value))
|
||||
end_event = self.get_event()
|
||||
if node.flow_style is True and end_event.comment is not None:
|
||||
node.comment = end_event.comment
|
||||
node.end_mark = end_event.end_mark
|
||||
self.check_end_doc_comment(end_event, node)
|
||||
return node
|
||||
|
||||
def check_end_doc_comment(self, end_event, node):
|
||||
if end_event.comment and end_event.comment[1]:
|
||||
# pre comments on an end_event, no following to move to
|
||||
if node.comment is None:
|
||||
node.comment = [None, None]
|
||||
assert not isinstance(node, ScalarEvent)
|
||||
# this is a post comment on a mapping node, add as third element
|
||||
# in the list
|
||||
node.comment.append(end_event.comment[1])
|
||||
end_event.comment[1] = None
|
||||
9
lib/spack/external/ruamel/yaml/configobjwalker.py
vendored
Normal file
9
lib/spack/external/ruamel/yaml/configobjwalker.py
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
# coding: utf-8
|
||||
|
||||
import warnings
|
||||
from ruamel.yaml.util import configobj_walker as new_configobj_walker
|
||||
|
||||
|
||||
def configobj_walker(cfg):
|
||||
warnings.warn("configobj_walker has move to ruamel.yaml.util, please update your code")
|
||||
return new_configobj_walker(cfg)
|
||||
1172
lib/spack/external/ruamel/yaml/constructor.py
vendored
Normal file
1172
lib/spack/external/ruamel/yaml/constructor.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
102
lib/spack/external/ruamel/yaml/dumper.py
vendored
Normal file
102
lib/spack/external/ruamel/yaml/dumper.py
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
# coding: utf-8
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper', 'RoundTripDumper']
|
||||
|
||||
try:
|
||||
from .emitter import * # NOQA
|
||||
from .serializer import * # NOQA
|
||||
from .representer import * # NOQA
|
||||
from .resolver import * # NOQA
|
||||
except (ImportError, ValueError): # for Jython
|
||||
from ruamel.yaml.emitter import * # NOQA
|
||||
from ruamel.yaml.serializer import * # NOQA
|
||||
from ruamel.yaml.representer import * # NOQA
|
||||
from ruamel.yaml.resolver import * # NOQA
|
||||
|
||||
|
||||
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=None,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, block_seq_indent=None,
|
||||
top_level_colon_align=None, prefix_colon=None):
|
||||
Emitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
block_seq_indent=block_seq_indent)
|
||||
Serializer.__init__(self, encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
Representer.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style)
|
||||
Resolver.__init__(self)
|
||||
|
||||
|
||||
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=None,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, block_seq_indent=None,
|
||||
top_level_colon_align=None, prefix_colon=None):
|
||||
Emitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
block_seq_indent=block_seq_indent)
|
||||
Serializer.__init__(self, encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
SafeRepresenter.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style)
|
||||
Resolver.__init__(self)
|
||||
|
||||
|
||||
class Dumper(Emitter, Serializer, Representer, Resolver):
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=None,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, block_seq_indent=None,
|
||||
top_level_colon_align=None, prefix_colon=None):
|
||||
Emitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
block_seq_indent=block_seq_indent)
|
||||
Serializer.__init__(self, encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
Representer.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style)
|
||||
Resolver.__init__(self)
|
||||
|
||||
|
||||
class RoundTripDumper(Emitter, Serializer, RoundTripRepresenter, VersionedResolver):
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=None,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, block_seq_indent=None,
|
||||
top_level_colon_align=None, prefix_colon=None):
|
||||
Emitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
block_seq_indent=block_seq_indent,
|
||||
top_level_colon_align=top_level_colon_align,
|
||||
prefix_colon=prefix_colon)
|
||||
Serializer.__init__(self, encoding=encoding,
|
||||
explicit_start=explicit_start,
|
||||
explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
RoundTripRepresenter.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style)
|
||||
VersionedResolver.__init__(self)
|
||||
1282
lib/spack/external/ruamel/yaml/emitter.py
vendored
Normal file
1282
lib/spack/external/ruamel/yaml/emitter.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user