Compare commits

..

1 Commits

Author SHA1 Message Date
Gregory Becker
03084d2ff8 avoid quadratic expansion of targets in the solver 2023-01-12 12:25:15 -08:00
8327 changed files with 44952 additions and 78342 deletions

View File

@@ -1,5 +1,3 @@
# .git-blame-ignore-revs # .git-blame-ignore-revs
# Formatted entire codebase with black 23 # Formatted entire codebase with black
603569e321013a1a63a637813c94c2834d0a0023
# Formatted entire codebase with black 22
f52f6e99dbf1131886a80112b8c79dfc414afb7c f52f6e99dbf1131886a80112b8c79dfc414afb7c

1
.gitattributes vendored
View File

@@ -1,4 +1,3 @@
*.py diff=python *.py diff=python
*.lp linguist-language=Prolog *.lp linguist-language=Prolog
lib/spack/external/* linguist-vendored lib/spack/external/* linguist-vendored
*.bat text eol=crlf

View File

@@ -29,9 +29,7 @@ body:
description: | description: |
Please post the error message from spack inside the `<details>` tag below: Please post the error message from spack inside the `<details>` tag below:
value: | value: |
<details><summary>Error message</summary> <details><summary>Error message</summary><pre>
<pre>
... ...
</pre></details> </pre></details>
validations: validations:

View File

@@ -29,6 +29,8 @@ body:
attributes: attributes:
label: General information label: General information
options: options:
- label: I have run `spack --version` and reported the version of Spack
required: true
- label: I have searched the issues of this repo and believe this is not a duplicate - label: I have searched the issues of this repo and believe this is not a duplicate
required: true required: true
- type: markdown - type: markdown

View File

@@ -21,9 +21,7 @@ body:
description: | description: |
Please post the error message from spack inside the `<details>` tag below: Please post the error message from spack inside the `<details>` tag below:
value: | value: |
<details><summary>Error message</summary> <details><summary>Error message</summary><pre>
<pre>
... ...
</pre></details> </pre></details>
validations: validations:

View File

@@ -19,13 +19,13 @@ jobs:
package-audits: package-audits:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2 - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2 - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
with: with:
python-version: ${{inputs.python_version}} python-version: ${{inputs.python_version}}
- name: Install Python packages - name: Install Python packages
run: | run: |
pip install --upgrade pip setuptools pytest coverage[toml] pip install --upgrade pip six setuptools pytest codecov coverage[toml]
- name: Package audits (with coverage) - name: Package audits (with coverage)
if: ${{ inputs.with_coverage == 'true' }} if: ${{ inputs.with_coverage == 'true' }}
run: | run: |
@@ -38,7 +38,7 @@ jobs:
run: | run: |
. share/spack/setup-env.sh . share/spack/setup-env.sh
$(which spack) audit packages $(which spack) audit packages
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0 - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0
if: ${{ inputs.with_coverage == 'true' }} if: ${{ inputs.with_coverage == 'true' }}
with: with:
flags: unittests,linux,audits flags: unittests,linux,audits

View File

@@ -24,7 +24,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \ make patch unzip which xz python3 python3-devel tree \
cmake bison bison-devel libstdc++-static cmake bison bison-devel libstdc++-static
- name: Checkout - name: Checkout
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Setup non-root user - name: Setup non-root user
@@ -62,7 +62,7 @@ jobs:
make patch unzip xz-utils python3 python3-dev tree \ make patch unzip xz-utils python3 python3-dev tree \
cmake bison cmake bison
- name: Checkout - name: Checkout
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Setup non-root user - name: Setup non-root user
@@ -99,7 +99,7 @@ jobs:
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \ bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
make patch unzip xz-utils python3 python3-dev tree make patch unzip xz-utils python3 python3-dev tree
- name: Checkout - name: Checkout
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Setup non-root user - name: Setup non-root user
@@ -133,7 +133,7 @@ jobs:
make patch unzip which xz python3 python3-devel tree \ make patch unzip which xz python3 python3-devel tree \
cmake bison cmake bison
- name: Checkout - name: Checkout
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Setup repo - name: Setup repo
@@ -158,7 +158,7 @@ jobs:
run: | run: |
brew install cmake bison@2.7 tree brew install cmake bison@2.7 tree
- name: Checkout - name: Checkout
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
- name: Bootstrap clingo - name: Bootstrap clingo
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
@@ -179,7 +179,7 @@ jobs:
run: | run: |
brew install tree brew install tree
- name: Checkout - name: Checkout
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
- name: Bootstrap clingo - name: Bootstrap clingo
run: | run: |
set -ex set -ex
@@ -204,7 +204,7 @@ jobs:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Setup repo - name: Setup repo
@@ -247,7 +247,7 @@ jobs:
bzip2 curl file g++ gcc patchelf gfortran git gzip \ bzip2 curl file g++ gcc patchelf gfortran git gzip \
make patch unzip xz-utils python3 python3-dev tree make patch unzip xz-utils python3 python3-dev tree
- name: Checkout - name: Checkout
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Setup non-root user - name: Setup non-root user
@@ -283,7 +283,7 @@ jobs:
make patch unzip xz-utils python3 python3-dev tree \ make patch unzip xz-utils python3 python3-dev tree \
gawk gawk
- name: Checkout - name: Checkout
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Setup non-root user - name: Setup non-root user
@@ -316,7 +316,7 @@ jobs:
# Remove GnuPG since we want to bootstrap it # Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg sudo rm -rf /usr/local/bin/gpg
- name: Checkout - name: Checkout
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
- name: Bootstrap GnuPG - name: Bootstrap GnuPG
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh
@@ -333,7 +333,7 @@ jobs:
# Remove GnuPG since we want to bootstrap it # Remove GnuPG since we want to bootstrap it
sudo rm -rf /usr/local/bin/gpg sudo rm -rf /usr/local/bin/gpg
- name: Checkout - name: Checkout
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
- name: Bootstrap GnuPG - name: Bootstrap GnuPG
run: | run: |
source share/spack/setup-env.sh source share/spack/setup-env.sh

View File

@@ -45,18 +45,12 @@ jobs:
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'], [leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
[ubuntu-bionic, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:18.04'], [ubuntu-bionic, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:18.04'],
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'], [ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'], [ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04']]
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
[rockylinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'rockylinux:9'],
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
name: Build ${{ matrix.dockerfile[0] }} name: Build ${{ matrix.dockerfile[0] }}
if: github.repository == 'spack/spack' if: github.repository == 'spack/spack'
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2 uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
- name: Set Container Tag Normal (Nightly) - name: Set Container Tag Normal (Nightly)
run: | run: |
@@ -95,7 +89,7 @@ jobs:
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1 uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@4b4e9c3e2d4531116a6f8ba8e71fc6e2cb6e6c8c # @v1 uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 # @v1
- name: Log in to GitHub Container Registry - name: Log in to GitHub Container Registry
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1 uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
@@ -112,7 +106,7 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build & Deploy ${{ matrix.dockerfile[0] }} - name: Build & Deploy ${{ matrix.dockerfile[0] }}
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2 uses: docker/build-push-action@c56af957549030174b10d6867f20e78cfd7debc5 # @v2
with: with:
context: dockerfiles/${{ matrix.dockerfile[0] }} context: dockerfiles/${{ matrix.dockerfile[0] }}
platforms: ${{ matrix.dockerfile[1] }} platforms: ${{ matrix.dockerfile[1] }}

View File

@@ -35,7 +35,7 @@ jobs:
core: ${{ steps.filter.outputs.core }} core: ${{ steps.filter.outputs.core }}
packages: ${{ steps.filter.outputs.packages }} packages: ${{ steps.filter.outputs.packages }}
steps: steps:
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2 - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
if: ${{ github.event_name == 'push' }} if: ${{ github.event_name == 'push' }}
with: with:
fetch-depth: 0 fetch-depth: 0

View File

@@ -47,10 +47,10 @@ jobs:
on_develop: false on_develop: false
steps: steps:
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2 - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2 - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: Install System packages - name: Install System packages
@@ -62,7 +62,7 @@ jobs:
cmake bison libbison-dev kcov cmake bison libbison-dev kcov
- name: Install Python packages - name: Install Python packages
run: | run: |
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov pip install --upgrade pip six setuptools pytest codecov[toml] pytest-xdist pytest-cov
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black" pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
- name: Setup git configuration - name: Setup git configuration
run: | run: |
@@ -87,17 +87,17 @@ jobs:
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }} UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: | run: |
share/spack/qa/run-unit-tests share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
with: with:
flags: unittests,linux,${{ matrix.concretizer }} flags: unittests,linux,${{ matrix.concretizer }}
# Test shell integration # Test shell integration
shell: shell:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2 - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2 - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
with: with:
python-version: '3.11' python-version: '3.11'
- name: Install System packages - name: Install System packages
@@ -107,7 +107,7 @@ jobs:
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
- name: Install Python packages - name: Install Python packages
run: | run: |
pip install --upgrade pip setuptools pytest coverage[toml] pytest-xdist pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-xdist
- name: Setup git configuration - name: Setup git configuration
run: | run: |
# Need this for the git tests to succeed. # Need this for the git tests to succeed.
@@ -118,7 +118,7 @@ jobs:
COVERAGE: true COVERAGE: true
run: | run: |
share/spack/qa/run-shell-tests share/spack/qa/run-shell-tests
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
with: with:
flags: shelltests,linux flags: shelltests,linux
@@ -133,11 +133,10 @@ jobs:
dnf install -y \ dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz make patch tcl unzip which xz
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2 - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
- name: Setup repo and non-root user - name: Setup repo and non-root user
run: | run: |
git --version git --version
git config --global --add safe.directory /__w/spack/spack
git fetch --unshallow git fetch --unshallow
. .github/workflows/setup_git.sh . .github/workflows/setup_git.sh
useradd spack-test useradd spack-test
@@ -152,10 +151,10 @@ jobs:
clingo-cffi: clingo-cffi:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2 - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2 - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
with: with:
python-version: '3.11' python-version: '3.11'
- name: Install System packages - name: Install System packages
@@ -164,7 +163,7 @@ jobs:
sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov
- name: Install Python packages - name: Install Python packages
run: | run: |
pip install --upgrade pip setuptools pytest coverage[toml] pytest-cov clingo pytest-xdist pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-cov clingo pytest-xdist
- name: Setup git configuration - name: Setup git configuration
run: | run: |
# Need this for the git tests to succeed. # Need this for the git tests to succeed.
@@ -176,7 +175,7 @@ jobs:
SPACK_TEST_SOLVER: clingo SPACK_TEST_SOLVER: clingo
run: | run: |
share/spack/qa/run-unit-tests share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # @v2.1.0 - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0
with: with:
flags: unittests,linux,clingo flags: unittests,linux,clingo
# Run unit tests on MacOS # Run unit tests on MacOS
@@ -186,16 +185,16 @@ jobs:
matrix: matrix:
python-version: ["3.10"] python-version: ["3.10"]
steps: steps:
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2 - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2 - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: Install Python packages - name: Install Python packages
run: | run: |
pip install --upgrade pip setuptools pip install --upgrade pip six setuptools
pip install --upgrade pytest coverage[toml] pytest-xdist pytest-cov pip install --upgrade pytest codecov coverage[toml] pytest-xdist pytest-cov
- name: Setup Homebrew packages - name: Setup Homebrew packages
run: | run: |
brew install dash fish gcc gnupg2 kcov brew install dash fish gcc gnupg2 kcov
@@ -211,6 +210,6 @@ jobs:
$(which spack) solve zlib $(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x) common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}" $(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
with: with:
flags: unittests,macos flags: unittests,macos

View File

@@ -18,8 +18,8 @@ jobs:
validate: validate:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2 - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2 - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
with: with:
python-version: '3.11' python-version: '3.11'
cache: 'pip' cache: 'pip'
@@ -35,16 +35,16 @@ jobs:
style: style:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2 - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b # @v2
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # @v2 - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912 # @v2
with: with:
python-version: '3.11' python-version: '3.11'
cache: 'pip' cache: 'pip'
- name: Install Python packages - name: Install Python packages
run: | run: |
python3 -m pip install --upgrade pip setuptools types-six black==23.1.0 mypy isort clingo flake8 python3 -m pip install --upgrade pip six setuptools types-six black mypy isort clingo flake8
- name: Setup git configuration - name: Setup git configuration
run: | run: |
# Need this for the git tests to succeed. # Need this for the git tests to succeed.
@@ -58,29 +58,3 @@ jobs:
with: with:
with_coverage: ${{ inputs.with_coverage }} with_coverage: ${{ inputs.with_coverage }}
python_version: '3.11' python_version: '3.11'
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
bootstrap-dev-rhel8:
runs-on: ubuntu-latest
container: registry.access.redhat.com/ubi8/ubi
steps:
- name: Install dependencies
run: |
dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
make patch tcl unzip which xz
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab # @v2
- name: Setup repo and non-root user
run: |
git --version
git config --global --add safe.directory /__w/spack/spack
git fetch --unshallow
. .github/workflows/setup_git.sh
useradd spack-test
chown -R spack-test .
- name: Bootstrap Spack development environment
shell: runuser -u spack-test -- bash {0}
run: |
source share/spack/setup-env.sh
spack -d bootstrap now --dev
spack style -t black
spack unit-test -V

View File

@@ -15,15 +15,15 @@ jobs:
unit-tests: unit-tests:
runs-on: windows-latest runs-on: windows-latest
steps: steps:
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
with: with:
python-version: 3.9 python-version: 3.9
- name: Install Python packages - name: Install Python packages
run: | run: |
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov clingo
- name: Create local develop - name: Create local develop
run: | run: |
./.github/workflows/setup_git.ps1 ./.github/workflows/setup_git.ps1
@@ -33,21 +33,21 @@ jobs:
./share/spack/qa/validate_last_exit.ps1 ./share/spack/qa/validate_last_exit.ps1
coverage combine -a coverage combine -a
coverage xml coverage xml
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
with: with:
flags: unittests,windows flags: unittests,windows
unit-tests-cmd: unit-tests-cmd:
runs-on: windows-latest runs-on: windows-latest
steps: steps:
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
with: with:
python-version: 3.9 python-version: 3.9
- name: Install Python packages - name: Install Python packages
run: | run: |
python -m pip install --upgrade pip pywin32 setuptools coverage pytest-cov clingo python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov clingo
- name: Create local develop - name: Create local develop
run: | run: |
./.github/workflows/setup_git.ps1 ./.github/workflows/setup_git.ps1
@@ -57,24 +57,99 @@ jobs:
./share/spack/qa/validate_last_exit.ps1 ./share/spack/qa/validate_last_exit.ps1
coverage combine -a coverage combine -a
coverage xml coverage xml
- uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
with: with:
flags: unittests,windows flags: unittests,windows
build-abseil: build-abseil:
runs-on: windows-latest runs-on: windows-latest
steps: steps:
- uses: actions/checkout@8e5e7e5ab8b370d6c329ec480221332ada57f0ab - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
with: with:
fetch-depth: 0 fetch-depth: 0
- uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
with: with:
python-version: 3.9 python-version: 3.9
- name: Install Python packages - name: Install Python packages
run: | run: |
python -m pip install --upgrade pip pywin32 setuptools coverage python -m pip install --upgrade pip six pywin32 setuptools codecov coverage
- name: Build Test - name: Build Test
run: | run: |
spack compiler find spack compiler find
spack external find cmake spack external find cmake
spack external find ninja spack external find ninja
spack -d install abseil-cpp spack -d install abseil-cpp
# TODO: johnwparent - reduce the size of the installer operations
# make-installer:
# runs-on: windows-latest
# steps:
# - name: Disable Windows Symlinks
# run: |
# git config --global core.symlinks false
# shell:
# powershell
# - uses: actions/checkout@755da8c3cf115ac066823e79a1e1788f8940201b
# with:
# fetch-depth: 0
# - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
# with:
# python-version: 3.9
# - name: Install Python packages
# run: |
# python -m pip install --upgrade pip six pywin32 setuptools
# - name: Add Light and Candle to Path
# run: |
# $env:WIX >> $GITHUB_PATH
# - name: Run Installer
# run: |
# ./share/spack/qa/setup_spack_installer.ps1
# spack make-installer -s . -g SILENT pkg
# echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
# env:
# ProgressPreference: SilentlyContinue
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
# with:
# name: Windows Spack Installer Bundle
# path: ${{ env.installer_root }}\pkg\Spack.exe
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
# with:
# name: Windows Spack Installer
# path: ${{ env.installer_root}}\pkg\Spack.msi
# execute-installer:
# needs: make-installer
# runs-on: windows-latest
# defaults:
# run:
# shell: pwsh
# steps:
# - uses: actions/setup-python@5ccb29d8773c3f3f653e1705f474dfaa8a06a912
# with:
# python-version: 3.9
# - name: Install Python packages
# run: |
# python -m pip install --upgrade pip six pywin32 setuptools
# - name: Setup installer directory
# run: |
# mkdir -p spack_installer
# echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
# - uses: actions/download-artifact@v3
# with:
# name: Windows Spack Installer Bundle
# path: ${{ env.spack_installer }}
# - name: Execute Bundled Installer
# run: |
# $proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
# $handle = $proc.Handle # cache proc.Handle
# $proc.WaitForExit();
# $LASTEXITCODE
# env:
# ProgressPreference: SilentlyContinue
# - uses: actions/download-artifact@v3
# with:
# name: Windows Spack Installer
# path: ${{ env.spack_installer }}
# - name: Execute MSI
# run: |
# $proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
# $handle = $proc.Handle # cache proc.Handle
# $proc.WaitForExit();
# $LASTEXITCODE

View File

@@ -1,28 +1,3 @@
# v0.19.1 (2023-02-07)
### Spack Bugfixes
* `buildcache create`: make "file exists" less verbose (#35019)
* `spack mirror create`: don't change paths to urls (#34992)
* Improve error message for requirements (#33988)
* uninstall: fix accidental cubic complexity (#34005)
* scons: fix signature for `install_args` (#34481)
* Fix `combine_phase_logs` text encoding issues (#34657)
* Use a module-like object to propagate changes in the MRO, when setting build env (#34059)
* PackageBase should not define builder legacy attributes (#33942)
* Forward lookup of the "run_tests" attribute (#34531)
* Bugfix for timers (#33917, #33900)
* Fix path handling in prefix inspections (#35318)
* Fix libtool filter for Fujitsu compilers (#34916)
* Bug fix for duplicate rpath errors on macOS when creating build caches (#34375)
* FileCache: delete the new cache file on exception (#34623)
* Propagate exceptions from Spack python console (#34547)
* Tests: Fix a bug/typo in a `config_values.py` fixture (#33886)
* Various CI fixes (#33953, #34560, #34560, #34828)
* Docs: remove monitors and analyzers, typos (#34358, #33926)
* bump release version for tutorial command (#33859)
# v0.19.0 (2022-11-11) # v0.19.0 (2022-11-11)
`v0.19.0` is a major feature release. `v0.19.0` is a major feature release.

View File

@@ -1,6 +1,6 @@
MIT License MIT License
Copyright (c) 2013-2023 LLNS, LLC and other Spack Project Developers. Copyright (c) 2013-2022 LLNS, LLC and other Spack Project Developers.
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal

View File

@@ -1,4 +1,4 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,6 +1,6 @@
#!/bin/sh #!/bin/sh
# #
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# sbang project developers. See the top-level COPYRIGHT file for details. # sbang project developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,7 +1,7 @@
#!/bin/sh #!/bin/sh
# -*- python -*- # -*- python -*-
# #
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,6 +1,6 @@
#!/bin/sh #!/bin/sh
# #
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -72,7 +72,6 @@ config:
root: $TMP_DIR/install root: $TMP_DIR/install
misc_cache: $$user_cache_path/cache misc_cache: $$user_cache_path/cache
source_cache: $$user_cache_path/source source_cache: $$user_cache_path/source
environments_root: $TMP_DIR/envs
EOF EOF
cat >"$SPACK_USER_CONFIG_PATH/bootstrap.yaml" <<EOF cat >"$SPACK_USER_CONFIG_PATH/bootstrap.yaml" <<EOF
bootstrap: bootstrap:

View File

@@ -1,4 +1,4 @@
:: Copyright 2013-2023 Lawrence Livermore National Security, LLC and other :: Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
:: Spack Project Developers. See the top-level COPYRIGHT file for details. :: Spack Project Developers. See the top-level COPYRIGHT file for details.
:: ::
:: SPDX-License-Identifier: (Apache-2.0 OR MIT) :: SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -50,69 +50,24 @@ setlocal enabledelayedexpansion
:: flags will always start with '-', e.g. --help or -V :: flags will always start with '-', e.g. --help or -V
:: subcommands will never start with '-' :: subcommands will never start with '-'
:: everything after the subcommand is an arg :: everything after the subcommand is an arg
for %%x in (%*) do (
:: we cannot allow batch "for" loop to directly process CL args set t="%%~x"
:: a number of batch reserved characters are commonly passed to
:: spack and allowing batch's "for" method to process the raw inputs
:: results in a large number of formatting issues
:: instead, treat the entire CLI as one string
:: and split by space manually
:: capture cl args in variable named cl_args
set cl_args=%*
:process_cl_args
rem tokens=1* returns the first processed token produced
rem by tokenizing the input string cl_args on spaces into
rem the named variable %%g
rem While this make look like a for loop, it only
rem executes a single time for each of the cl args
rem the actual iterative loop is performed by the
rem goto process_cl_args stanza
rem we are simply leveraging the "for" method's string
rem tokenization
for /f "tokens=1*" %%g in ("%cl_args%") do (
set t=%%~g
rem remainder of string is composed into %%h
rem these are the cl args yet to be processed
rem assign cl_args var to only the args to be processed
rem effectively discarding the current arg %%g
rem this will be nul when we have no further tokens to process
set cl_args=%%h
rem process the first space delineated cl arg
rem of this iteration
if "!t:~0,1!" == "-" ( if "!t:~0,1!" == "-" (
if defined _sp_subcommand ( if defined _sp_subcommand (
rem We already have a subcommand, processing args now :: We already have a subcommand, processing args now
if not defined _sp_args (
set "_sp_args=!t!"
) else (
set "_sp_args=!_sp_args! !t!" set "_sp_args=!_sp_args! !t!"
)
) else (
if not defined _sp_flags (
set "_sp_flags=!t!"
shift
) else ( ) else (
set "_sp_flags=!_sp_flags! !t!" set "_sp_flags=!_sp_flags! !t!"
shift shift
) )
)
) else if not defined _sp_subcommand ( ) else if not defined _sp_subcommand (
set "_sp_subcommand=!t!" set "_sp_subcommand=!t!"
shift shift
) else (
if not defined _sp_args (
set "_sp_args=!t!"
shift
) else ( ) else (
set "_sp_args=!_sp_args! !t!" set "_sp_args=!_sp_args! !t!"
shift shift
) )
) )
)
rem if this is not nil, we have more tokens to process
rem start above process again with remaining unprocessed cl args
if defined cl_args goto :process_cl_args
:: --help, -h and -V flags don't require further output parsing. :: --help, -h and -V flags don't require further output parsing.
:: If we encounter, execute and exit :: If we encounter, execute and exit
@@ -128,24 +83,24 @@ if defined _sp_flags (
exit /B 0 exit /B 0
) )
) )
if not defined _sp_subcommand (
if not defined _sp_args (
if not defined _sp_flags (
python "%spack%" --help
exit /B 0
)
)
)
:: pass parsed variables outside of local scope. Need to do :: pass parsed variables outside of local scope. Need to do
:: this because delayedexpansion can only be set by setlocal :: this because delayedexpansion can only be set by setlocal
endlocal & ( echo %_sp_flags%>flags
set "_sp_flags=%_sp_flags%" echo %_sp_args%>args
set "_sp_args=%_sp_args%" echo %_sp_subcommand%>subcmd
set "_sp_subcommand=%_sp_subcommand%" endlocal
) set /p _sp_subcommand=<subcmd
set /p _sp_flags=<flags
set /p _sp_args=<args
set str_subcommand=%_sp_subcommand:"='%
set str_flags=%_sp_flags:"='%
set str_args=%_sp_args:"='%
if "%str_subcommand%"=="ECHO is off." (set "_sp_subcommand=")
if "%str_flags%"=="ECHO is off." (set "_sp_flags=")
if "%str_args%"=="ECHO is off." (set "_sp_args=")
del subcmd
del flags
del args
:: Filter out some commands. For any others, just run the command. :: Filter out some commands. For any others, just run the command.
if "%_sp_subcommand%" == "cd" ( if "%_sp_subcommand%" == "cd" (
@@ -188,9 +143,7 @@ goto :end_switch
:: If no args or args contain --bat or -h/--help: just execute. :: If no args or args contain --bat or -h/--help: just execute.
if NOT defined _sp_args ( if NOT defined _sp_args (
goto :default_case goto :default_case
) )else if NOT "%_sp_args%"=="%_sp_args:--help=%" (
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
goto :default_case goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" ( ) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
goto :default_case goto :default_case
@@ -198,11 +151,11 @@ if NOT "%_sp_args%"=="%_sp_args:--help=%" (
goto :default_case goto :default_case
) else if NOT "%_sp_args%"=="%_sp_args:deactivate=%" ( ) else if NOT "%_sp_args%"=="%_sp_args:deactivate=%" (
for /f "tokens=* USEBACKQ" %%I in ( for /f "tokens=* USEBACKQ" %%I in (
`call python %spack% %_sp_flags% env deactivate --bat %_sp_args:deactivate=%` `call python "%spack%" %_sp_flags% env deactivate --bat %_sp_args:deactivate=%`
) do %%I ) do %%I
) else if NOT "%_sp_args%"=="%_sp_args:activate=%" ( ) else if NOT "%_sp_args%"=="%_sp_args:activate=%" (
for /f "tokens=* USEBACKQ" %%I in ( for /f "tokens=* USEBACKQ" %%I in (
`python %spack% %_sp_flags% env activate --bat %_sp_args:activate=%` `call python "%spack%" %_sp_flags% env activate --bat %_sp_args:activate=%`
) do %%I ) do %%I
) else ( ) else (
goto :default_case goto :default_case
@@ -223,7 +176,7 @@ if defined _sp_args (
for /f "tokens=* USEBACKQ" %%I in ( for /f "tokens=* USEBACKQ" %%I in (
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I `python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
)
goto :end_switch goto :end_switch
:case_unload :case_unload

View File

@@ -1,4 +1,4 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -13,18 +13,16 @@ concretizer:
# Whether to consider installed packages or packages from buildcaches when # Whether to consider installed packages or packages from buildcaches when
# concretizing specs. If `true`, we'll try to use as many installs/binaries # concretizing specs. If `true`, we'll try to use as many installs/binaries
# as possible, rather than building. If `false`, we'll always give you a fresh # as possible, rather than building. If `false`, we'll always give you a fresh
# concretization. If `dependencies`, we'll only reuse dependencies but # concretization.
# give you a fresh concretization for your root specs. reuse: true
reuse: dependencies
# Options that tune which targets are considered for concretization. The # Options that tune which targets are considered for concretization. The
# concretization process is very sensitive to the number targets, and the time # concretization process is very sensitive to the number targets, and the time
# needed to reach a solution increases noticeably with the number of targets # needed to reach a solution increases noticeably with the number of targets
# considered. # considered.
targets: targets:
# Determine whether we want to target specific or generic # Determine whether we want to target specific or generic microarchitectures.
# microarchitectures. Valid values are: "microarchitectures" or "generic". # An example of the first kind might be for instance "skylake" or "bulldozer",
# An example of "microarchitectures" would be "skylake" or "bulldozer", # while generic microarchitectures are for instance "aarch64" or "x86_64_v4".
# while an example of "generic" would be "aarch64" or "x86_64_v4".
granularity: microarchitectures granularity: microarchitectures
# If "false" allow targets that are incompatible with the current host (for # If "false" allow targets that are incompatible with the current host (for
# instance concretize with target "icelake" while running on "haswell"). # instance concretize with target "icelake" while running on "haswell").

View File

@@ -54,11 +54,6 @@ config:
# are that it precludes its use as a system package and its ability to be # are that it precludes its use as a system package and its ability to be
# pip installable. # pip installable.
# #
# In Spack environment files, chaining onto existing system Spack
# installations, the $env variable can be used to download, cache and build
# into user-writable paths that are relative to the currently active
# environment.
#
# In any case, if the username is not already in the path, Spack will append # In any case, if the username is not already in the path, Spack will append
# the value of `$user` in an attempt to avoid potential conflicts between # the value of `$user` in an attempt to avoid potential conflicts between
# users in shared temporary spaces. # users in shared temporary spaces.
@@ -81,10 +76,6 @@ config:
source_cache: $spack/var/spack/cache source_cache: $spack/var/spack/cache
## Directory where spack managed environments are created and stored
# environments_root: $spack/var/spack/environments
# Cache directory for miscellaneous files, like the package index. # Cache directory for miscellaneous files, like the package index.
# This can be purged with `spack clean --misc-cache` # This can be purged with `spack clean --misc-cache`
misc_cache: $user_cache_path/cache misc_cache: $user_cache_path/cache
@@ -185,7 +176,7 @@ config:
# when Spack needs to manage its own package metadata and all operations are # when Spack needs to manage its own package metadata and all operations are
# expected to complete within the default time limit. The timeout should # expected to complete within the default time limit. The timeout should
# therefore generally be left untouched. # therefore generally be left untouched.
db_lock_timeout: 60 db_lock_timeout: 3
# How long to wait when attempting to modify a package (e.g. to install it). # How long to wait when attempting to modify a package (e.g. to install it).

View File

@@ -23,20 +23,8 @@ packages:
providers: providers:
elf: [libelf] elf: [libelf]
fuse: [macfuse] fuse: [macfuse]
gl: [apple-gl]
glu: [apple-glu]
unwind: [apple-libunwind] unwind: [apple-libunwind]
uuid: [apple-libuuid] uuid: [apple-libuuid]
apple-gl:
buildable: false
externals:
- spec: apple-gl@4.1.0
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
apple-glu:
buildable: false
externals:
- spec: apple-glu@1.3.0
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
apple-libunwind: apple-libunwind:
buildable: false buildable: false
externals: externals:

View File

@@ -40,12 +40,13 @@ modules:
roots: roots:
tcl: $spack/share/spack/modules tcl: $spack/share/spack/modules
lmod: $spack/share/spack/lmod lmod: $spack/share/spack/lmod
# What type of modules to use ("tcl" and/or "lmod") # What type of modules to use
enable: [] enable:
- tcl
tcl: tcl:
all: all:
autoload: direct autoload: none
# Default configurations if lmod is enabled # Default configurations if lmod is enabled
lmod: lmod:

View File

@@ -20,7 +20,7 @@ packages:
awk: [gawk] awk: [gawk]
blas: [openblas, amdblis] blas: [openblas, amdblis]
D: [ldc] D: [ldc]
daal: [intel-oneapi-daal] daal: [intel-daal]
elf: [elfutils] elf: [elfutils]
fftw-api: [fftw, amdfftw] fftw-api: [fftw, amdfftw]
flame: [libflame, amdlibflame] flame: [libflame, amdlibflame]
@@ -28,9 +28,9 @@ packages:
gl: [glx, osmesa] gl: [glx, osmesa]
glu: [mesa-glu, openglu] glu: [mesa-glu, openglu]
golang: [go, gcc] golang: [go, gcc]
go-or-gccgo-bootstrap: [go-bootstrap, gcc] go-external-or-gccgo-bootstrap: [go-bootstrap, gcc]
iconv: [libiconv] iconv: [libiconv]
ipp: [intel-oneapi-ipp] ipp: [intel-ipp]
java: [openjdk, jdk, ibm-java] java: [openjdk, jdk, ibm-java]
jpeg: [libjpeg-turbo, libjpeg] jpeg: [libjpeg-turbo, libjpeg]
lapack: [openblas, amdlibflame] lapack: [openblas, amdlibflame]
@@ -40,7 +40,7 @@ packages:
lua-lang: [lua, lua-luajit-openresty, lua-luajit] lua-lang: [lua, lua-luajit-openresty, lua-luajit]
luajit: [lua-luajit-openresty, lua-luajit] luajit: [lua-luajit-openresty, lua-luajit]
mariadb-client: [mariadb-c-client, mariadb] mariadb-client: [mariadb-c-client, mariadb]
mkl: [intel-oneapi-mkl] mkl: [intel-mkl]
mpe: [mpe2] mpe: [mpe2]
mpi: [openmpi, mpich] mpi: [openmpi, mpich]
mysql-client: [mysql, mariadb-c-client] mysql-client: [mysql, mariadb-c-client]

View File

@@ -3,4 +3,3 @@ config:
concretizer: clingo concretizer: clingo
build_stage:: build_stage::
- '$spack/.staging' - '$spack/.staging'
stage_name: '{name}-{version}-{hash:7}'

View File

@@ -19,4 +19,3 @@ packages:
- msvc - msvc
providers: providers:
mpi: [msmpi] mpi: [msmpi]
gl: [wgl]

View File

@@ -5,4 +5,3 @@ llnl*.rst
_build _build
.spack-env .spack-env
spack.lock spack.lock
_spack_root

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -942,7 +942,7 @@ first ``libelf`` above, you would run:
$ spack load /qmm4kso $ spack load /qmm4kso
To see which packages that you have loaded to your environment you would To see which packages that you have loaded to your enviornment you would
use ``spack find --loaded``. use ``spack find --loaded``.
.. code-block:: console .. code-block:: console
@@ -1103,31 +1103,16 @@ Below are more details about the specifiers that you can add to specs.
Version specifier Version specifier
^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
A version specifier ``pkg@<specifier>`` comes after a package name A version specifier comes somewhere after a package name and starts
and starts with ``@``. It can be something abstract that matches with ``@``. It can be a single version, e.g. ``@1.0``, ``@3``, or
multiple known versions, or a specific version. During concretization, ``@1.2a7``. Or, it can be a range of versions, such as ``@1.0:1.5``
Spack will pick the optimal version within the spec's constraints (all versions between ``1.0`` and ``1.5``, inclusive). Version ranges
according to policies set for the particular Spack installation. can be open, e.g. ``:3`` means any version up to and including ``3``.
This would include ``3.4`` and ``3.4.2``. ``4.2:`` means any version
The version specifier can be *a specific version*, such as ``@=1.0.0`` or above and including ``4.2``. Finally, a version specifier can be a
``@=1.2a7``. Or, it can be *a range of versions*, such as ``@1.0:1.5``. set of arbitrary versions, such as ``@1.0,1.5,1.7`` (``1.0``, ``1.5``,
Version ranges are inclusive, so this example includes both ``1.0`` or ``1.7``). When you supply such a specifier to ``spack install``,
and any ``1.5.x`` version. Version ranges can be unbounded, e.g. ``@:3`` it constrains the set of versions that Spack will install.
means any version up to and including ``3``. This would include ``3.4``
and ``3.4.2``. Similarly, ``@4.2:`` means any version above and including
``4.2``. As a short-hand, ``@3`` is equivalent to the range ``@3:3`` and
includes any version with major version ``3``.
Notice that you can distinguish between the specific version ``@=3.2`` and
the range ``@3.2``. This is useful for packages that follow a versioning
scheme that omits the zero patch version number: ``3.2``, ``3.2.1``,
``3.2.2``, etc. In general it is preferable to use the range syntax
``@3.2``, since ranges also match versions with one-off suffixes, such as
``3.2-custom``.
A version specifier can also be a list of ranges and specific versions,
separated by commas. For example, ``@1.0:1.5,=1.7.1`` matches any version
in the range ``1.0:1.5`` and the specific version ``1.7.1``.
For packages with a ``git`` attribute, ``git`` references For packages with a ``git`` attribute, ``git`` references
may be specified instead of a numerical version i.e. branches, tags may be specified instead of a numerical version i.e. branches, tags
@@ -1136,35 +1121,36 @@ reference provided. Acceptable syntaxes for this are:
.. code-block:: sh .. code-block:: sh
# commit hashes
foo@abcdef1234abcdef1234abcdef1234abcdef1234 # 40 character hashes are automatically treated as git commits
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234
# branches and tags # branches and tags
foo@git.develop # use the develop branch foo@git.develop # use the develop branch
foo@git.0.19 # use the 0.19 tag foo@git.0.19 # use the 0.19 tag
Spack always needs to associate a Spack version with the git reference, # commit hashes
which is used for version comparison. This Spack version is heuristically foo@abcdef1234abcdef1234abcdef1234abcdef1234 # 40 character hashes are automatically treated as git commits
taken from the closest valid git tag among ancestors of the git ref. foo@git.abcdef1234abcdef1234abcdef1234abcdef1234
Once a Spack version is associated with a git ref, it always printed with Spack versions from git reference either have an associated version supplied by the user,
the git ref. For example, if the commit ``@git.abcdefg`` is tagged or infer a relationship to known versions from the structure of the git repository. If an
``0.19``, then the spec will be shown as ``@git.abcdefg=0.19``. associated version is supplied by the user, Spack treats the git version as equivalent to that
version for all version comparisons in the package logic (e.g. ``depends_on('foo', when='@1.5')``).
If the git ref is not exactly a tag, then the distance to the nearest tag The associated version can be assigned with ``[git ref]=[version]`` syntax, with the caveat that the specified version is known to Spack from either the package definition, or in the configuration preferences (i.e. ``packages.yaml``).
is also part of the resolved version. ``@git.abcdefg=0.19.git.8`` means
that the commit is 8 commits away from the ``0.19`` tag.
In cases where Spack cannot resolve a sensible version from a git ref,
users can specify the Spack version to use for the git ref. This is done
by appending ``=`` and the Spack version to the git ref. For example:
.. code-block:: sh .. code-block:: sh
foo@git.my_ref=3.2 # use the my_ref tag or branch, but treat it as version 3.2 for version comparisons foo@git.my_ref=3.2 # use the my_ref tag or branch, but treat it as version 3.2 for version comparisons
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234=develop # use the given commit, but treat it as develop for version comparisons foo@git.abcdef1234abcdef1234abcdef1234abcdef1234=develop # use the given commit, but treat it as develop for version comparisons
If an associated version is not supplied then the tags in the git repo are used to determine
the most recent previous version known to Spack. Details about how versions are compared
and how Spack determines if one version is less than another are discussed in the developer guide.
If the version spec is not provided, then Spack will choose one
according to policies set for the particular spack installation. If
the spec is ambiguous, i.e. it could match multiple versions, Spack
will choose a version within the spec's constraints according to
policies set for the particular Spack installation.
Details about how versions are compared and how Spack determines if Details about how versions are compared and how Spack determines if
one version is less than another are discussed in the developer guide. one version is less than another are discussed in the developer guide.

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -13,51 +13,49 @@ Some sites may encourage users to set up their own test environments
before carrying out central installations, or some users may prefer to set before carrying out central installations, or some users may prefer to set
up these environments on their own motivation. To reduce the load of up these environments on their own motivation. To reduce the load of
recompiling otherwise identical package specs in different installations, recompiling otherwise identical package specs in different installations,
installed packages can be put into build cache tarballs, pushed to installed packages can be put into build cache tarballs, uploaded to
your Spack mirror and then downloaded and installed by others. your Spack mirror and then downloaded and installed by others.
Whenever a mirror provides prebuilt packages, Spack will take these packages
into account during concretization and installation, making ``spack install``
significantly faster.
--------------------------
Creating build cache files
--------------------------
.. note:: A compressed tarball of an installed package is created. Tarballs are created
for all of its link and run dependency packages as well. Compressed tarballs are
We use the terms "build cache" and "mirror" often interchangeably. Mirrors signed with gpg and signature and tarball and put in a ``.spack`` file. Optionally,
are used during installation both for sources and prebuilt packages. Build the rpaths (and ids and deps on macOS) can be changed to paths relative to
caches refer to mirrors that provide prebuilt packages. the Spack install tree before the tarball is created.
----------------------
Creating a build cache
----------------------
Build caches are created via: Build caches are created via:
.. code-block:: console .. code-block:: console
$ spack buildcache push <path/url/mirror name> <spec> $ spack buildcache create <spec>
This command takes the locally installed spec and its dependencies, and
creates tarballs of their install prefixes. It also generates metadata files,
signed with GPG. These tarballs and metadata files are then pushed to the
provided binary cache, which can be a local directory or a remote URL.
Here is an example where a build cache is created in a local directory named If you wanted to create a build cache in a local directory, you would provide
"spack-cache", to which we push the "ninja" spec: the ``-d`` argument to target that directory, again also specifying the spec.
Here is an example creating a local directory, "spack-cache" and creating
build cache files for the "ninja" spec:
.. code-block:: console .. code-block:: console
$ spack buildcache push --allow-root ./spack-cache ninja $ mkdir -p ./spack-cache
==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache $ spack buildcache create -d ./spack-cache ninja
==> Buildcache files will be output to file:///home/spackuser/spack/spack-cache/build_cache
gpgconf: socketdir is '/run/user/1000/gnupg'
gpg: using "E6DF6A8BD43208E4D6F392F23777740B7DBD643D" as default secret key for signing
Not that ``ninja`` must be installed locally for this to work. Note that the targeted spec must already be installed. Once you have a build cache,
you can add it as a mirror, discussed next.
We're using the ``--allow-root`` flag to tell Spack that is OK when any of .. warning::
the binaries we're pushing contain references to the local Spack install
directory.
Once you have a build cache, you can add it as a mirror, discussed next. Spack improved the format used for binary caches in v0.18. The entire v0.18 series
will be able to verify and install binary caches both in the new and in the old format.
Support for using the old format is expected to end in v0.19, so we advise users to
recreate relevant buildcaches using Spack v0.18 or higher.
--------------------------------------- ---------------------------------------
Finding or installing build cache files Finding or installing build cache files
@@ -68,10 +66,10 @@ with:
.. code-block:: console .. code-block:: console
$ spack mirror add <name> <url or path> $ spack mirror add <name> <url>
Both web URLs and local paths on the filesystem can be specified. In the previous Note that the url can be a web url _or_ a local filesystem location. In the previous
example, you might add the directory "spack-cache" and call it ``mymirror``: example, you might add the directory "spack-cache" and call it ``mymirror``:
@@ -96,7 +94,7 @@ this new build cache as follows:
.. code-block:: console .. code-block:: console
$ spack buildcache update-index ./spack-cache $ spack buildcache update-index -d spack-cache/
Now you can use list: Now you can use list:
@@ -107,38 +105,46 @@ Now you can use list:
-- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------ -- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------
ninja@1.10.2 ninja@1.10.2
With ``mymirror`` configured and an index available, Spack will automatically
use it during concretization and installation. That means that you can expect Great! So now let's say you have a different spack installation, or perhaps just
``spack install ninja`` to fetch prebuilt packages from the mirror. Let's a different environment for the same one, and you want to install a package from
verify by re-installing ninja: that build cache. Let's first uninstall the actual library "ninja" to see if we can
re-install it from the cache.
.. code-block:: console .. code-block:: console
$ spack uninstall ninja $ spack uninstall ninja
$ spack install ninja
==> Installing ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spec.json.sig
gpg: Signature made Do 12 Jan 2023 16:01:04 CET
gpg: using RSA key 61B82B2B2350E171BD17A1744E3A689061D57BF6
gpg: Good signature from "example (GPG created for Spack) <example@example.com>" [ultimate]
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.10.2/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
==> Extracting ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz from binary cache
==> ninja: Successfully installed ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
Search: 0.00s. Fetch: 0.17s. Install: 0.12s. Total: 0.29s
[+] /home/harmen/spack/opt/spack/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
It worked! You've just completed a full example of creating a build cache with And now reinstall from the buildcache
a spec of interest, adding it as a mirror, updating its index, listing the contents,
and finally, installing from it.
By default Spack falls back to building from sources when the mirror is not available
or when the package is simply not already available. To force Spack to only install
prebuilt packages, you can use
.. code-block:: console .. code-block:: console
$ spack install --use-buildcache only <package> $ spack buildcache install ninja
==> buildcache spec(s) matching ninja
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.10.2/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-i4e5luour7jxdpc3bkiykd4imke3mkym.spack
####################################################################################################################################### 100.0%
==> Installing buildcache for spec ninja@1.10.2%gcc@9.3.0 arch=linux-ubuntu20.04-skylake
gpgconf: socketdir is '/run/user/1000/gnupg'
gpg: Signature made Tue 23 Mar 2021 10:16:29 PM MDT
gpg: using RSA key E6DF6A8BD43208E4D6F392F23777740B7DBD643D
gpg: Good signature from "spackuser (GPG created for Spack) <spackuser@noreply.users.github.com>" [ultimate]
It worked! You've just completed a full example of creating a build cache with
a spec of interest, adding it as a mirror, updating it's index, listing the contents,
and finally, installing from it.
Note that the above command is intended to install a particular package to a
build cache you have created, and not to install a package from a build cache.
For the latter, once a mirror is added, by default when you do ``spack install`` the ``--use-cache``
flag is set, and you will install a package from a build cache if it is available.
If you want to always use the cache, you can do:
.. code-block:: console
$ spack install --cache-only <package>
For example, to combine all of the commands above to add the E4S build cache For example, to combine all of the commands above to add the E4S build cache
and then install from it exclusively, you would do: and then install from it exclusively, you would do:
@@ -147,7 +153,7 @@ and then install from it exclusively, you would do:
$ spack mirror add E4S https://cache.e4s.io $ spack mirror add E4S https://cache.e4s.io
$ spack buildcache keys --install --trust $ spack buildcache keys --install --trust
$ spack install --use-buildache only <package> $ spack install --cache-only <package>
We use ``--install`` and ``--trust`` to say that we are installing keys to our We use ``--install`` and ``--trust`` to say that we are installing keys to our
keyring, and trusting all downloaded keys. keyring, and trusting all downloaded keys.
@@ -177,7 +183,7 @@ need to be adjusted for better re-locatability.
-------------------- --------------------
^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^
``spack buildcache push`` ``spack buildcache create``
^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^
Create tarball of installed Spack package and all dependencies. Create tarball of installed Spack package and all dependencies.

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -272,7 +272,7 @@ Selection of the target microarchitectures
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
The options under the ``targets`` attribute control which targets are considered during a solve. The options under the ``targets`` attribute control which targets are considered during a solve.
Currently the options in this section are only configurable from the ``concretizer.yaml`` file Currently the options in this section are only configurable from the ``concretization.yaml`` file
and there are no corresponding command line arguments to enable them for a single solve. and there are no corresponding command line arguments to enable them for a single solve.
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``. The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
@@ -325,99 +325,42 @@ on the command line, because it can specify constraints on packages
is not possible to specify constraints on dependencies while also keeping is not possible to specify constraints on dependencies while also keeping
those dependencies optional. those dependencies optional.
^^^^^^^^^^^^^^^^^^^ The package requirements configuration is specified in ``packages.yaml``
Requirements syntax keyed by package name:
^^^^^^^^^^^^^^^^^^^
The package requirements configuration is specified in ``packages.yaml``,
keyed by package name and expressed using the Spec syntax. In the simplest
case you can specify attributes that you always want the package to have
by providing a single spec string to ``require``:
.. code-block:: yaml .. code-block:: yaml
packages: packages:
libfabric: libfabric:
require: "@1.13.2" require: "@1.13.2"
In the above example, ``libfabric`` will always build with version 1.13.2. If you
need to compose multiple configuration scopes ``require`` accepts a list of
strings:
.. code-block:: yaml
packages:
libfabric:
require:
- "@1.13.2"
- "%gcc"
In this case ``libfabric`` will always build with version 1.13.2 **and** using GCC
as a compiler.
For more complex use cases, require accepts also a list of objects. These objects
must have either a ``any_of`` or a ``one_of`` field, containing a list of spec strings,
and they can optionally have a ``when`` and a ``message`` attribute:
.. code-block:: yaml
packages:
openmpi: openmpi:
require: require:
- any_of: ["@4.1.5", "%gcc"] - any_of: ["~cuda", "%gcc"]
message: "in this example only 4.1.5 can build with other compilers"
``any_of`` is a list of specs. One of those specs must be satisfied
and it is also allowed for the concretized spec to match more than one.
In the above example, that means you could build ``openmpi@4.1.5%gcc``,
``openmpi@4.1.5%clang`` or ``openmpi@3.9%gcc``, but
not ``openmpi@3.9%clang``.
If a custom message is provided, and the requirement is not satisfiable,
Spack will print the custom error message:
.. code-block:: console
$ spack spec openmpi@3.9%clang
==> Error: in this example only 4.1.5 can build with other compilers
We could express a similar requirement using the ``when`` attribute:
.. code-block:: yaml
packages:
openmpi:
require:
- any_of: ["%gcc"]
when: "@:4.1.4"
message: "in this example only 4.1.5 can build with other compilers"
In the example above, if the version turns out to be 4.1.4 or less, we require the compiler to be GCC.
For readability, Spack also allows a ``spec`` key accepting a string when there is only a single
constraint:
.. code-block:: yaml
packages:
openmpi:
require:
- spec: "%gcc"
when: "@:4.1.4"
message: "in this example only 4.1.5 can build with other compilers"
This code snippet and the one before it are semantically equivalent.
Finally, instead of ``any_of`` you can use ``one_of`` which also takes a list of specs. The final
concretized spec must match one and only one of them:
.. code-block:: yaml
packages:
mpich: mpich:
require: require:
- one_of: ["+cuda", "+rocm"] - one_of: ["+cuda", "+rocm"]
In the example above, that means you could build ``mpich+cuda`` or ``mpich+rocm`` but not ``mpich+cuda+rocm``. Requirements are expressed using Spec syntax (the same as what is provided
to ``spack install``). In the simplest case, you can specify attributes
that you always want the package to have by providing a single spec to
``require``; in the above example, ``libfabric`` will always build
with version 1.13.2.
You can provide a more-relaxed constraint and allow the concretizer to
choose between a set of options using ``any_of`` or ``one_of``:
* ``any_of`` is a list of specs. One of those specs must be satisfied
and it is also allowed for the concretized spec to match more than one.
In the above example, that means you could build ``openmpi+cuda%gcc``,
``openmpi~cuda%clang`` or ``openmpi~cuda%gcc`` (in the last case,
note that both specs in the ``any_of`` for ``openmpi`` are
satisfied).
* ``one_of`` is also a list of specs, and the final concretized spec
must match exactly one of them. In the above example, that means
you could build ``mpich+cuda`` or ``mpich+rocm`` but not
``mpich+cuda+rocm`` (note the current package definition for
``mpich`` already includes a conflict, so this is redundant but
still demonstrates the concept).
.. note:: .. note::
@@ -425,13 +368,6 @@ In the example above, that means you could build ``mpich+cuda`` or ``mpich+rocm`
preference: items that appear earlier in the list are preferred preference: items that appear earlier in the list are preferred
(note that these preferences can be ignored in favor of others). (note that these preferences can be ignored in favor of others).
.. note::
When using a conditional requirement, Spack is allowed to actively avoid the triggering
condition (the ``when=...`` spec) if that leads to a concrete spec with better scores in
the optimization criteria. To check the current optimization criteria and their
priorities you can run ``spack solve zlib``.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Setting default requirements Setting default requirements
^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -28,15 +28,12 @@ This package provides the following variants:
* **cuda_arch** * **cuda_arch**
This variant supports the optional specification of one or multiple architectures. This variant supports the optional specification of the architecture.
Valid values are maintained in the ``cuda_arch_values`` property and Valid values are maintained in the ``cuda_arch_values`` property and
are the numeric character equivalent of the compute capability version are the numeric character equivalent of the compute capability version
(e.g., '10' for version 1.0). Each provided value affects associated (e.g., '10' for version 1.0). Each provided value affects associated
``CUDA`` dependencies and compiler conflicts. ``CUDA`` dependencies and compiler conflicts.
The variant builds both PTX code for the _virtual_ architecture
(e.g. ``compute_10``) and binary code for the _real_ architecture (e.g. ``sm_10``).
GPUs and their compute capability versions are listed at GPUs and their compute capability versions are listed at
https://developer.nvidia.com/cuda-gpus . https://developer.nvidia.com/cuda-gpus .

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -124,7 +124,7 @@ Using oneAPI Tools Installed by Spack
===================================== =====================================
Spack can be a convenient way to install and configure compilers and Spack can be a convenient way to install and configure compilers and
libraries, even if you do not intend to build a Spack package. If you libaries, even if you do not intend to build a Spack package. If you
want to build a Makefile project using Spack-installed oneAPI compilers, want to build a Makefile project using Spack-installed oneAPI compilers,
then use spack to configure your environment:: then use spack to configure your environment::

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -397,7 +397,7 @@ for specifics and examples for ``packages.yaml`` files.
.. If your system administrator did not provide modules for pre-installed Intel .. If your system administrator did not provide modules for pre-installed Intel
tools, you could do well to ask for them, because installing multiple copies tools, you could do well to ask for them, because installing multiple copies
of the Intel tools, as is won't to happen once Spack is in the picture, is of the Intel tools, as is wont to happen once Spack is in the picture, is
bound to stretch disk space and patience thin. If you *are* the system bound to stretch disk space and patience thin. If you *are* the system
administrator and are still new to modules, then perhaps it's best to follow administrator and are still new to modules, then perhaps it's best to follow
the `next section <Installing Intel tools within Spack_>`_ and install the tools the `next section <Installing Intel tools within Spack_>`_ and install the tools

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -366,7 +366,7 @@ If the ``pyproject.toml`` lists ``mesonpy`` as the ``build-backend``,
it uses the meson build system. Meson uses the default it uses the meson build system. Meson uses the default
``pyproject.toml`` keys to list dependencies. ``pyproject.toml`` keys to list dependencies.
See https://meson-python.readthedocs.io/en/latest/tutorials/introduction.html See https://meson-python.readthedocs.io/en/latest/usage/start.html
for more information. for more information.
""" """
@@ -582,7 +582,7 @@ libraries. Make sure not to add modules/packages containing the word
"test", as these likely won't end up in the installation directory, "test", as these likely won't end up in the installation directory,
or may require test dependencies like pytest to be installed. or may require test dependencies like pytest to be installed.
Instead of defining the ``import_modules`` explicitly, only the subset Instead of defining the ``import_modules`` explicity, only the subset
of module names to be skipped can be defined by using ``skip_modules``. of module names to be skipped can be defined by using ``skip_modules``.
If a defined module has submodules, they are skipped as well, e.g., If a defined module has submodules, they are skipped as well, e.g.,
in case the ``plotting`` modules should be excluded from the in case the ``plotting`` modules should be excluded from the

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -58,7 +58,9 @@ Testing
``WafPackage`` also provides ``test`` and ``installtest`` methods, ``WafPackage`` also provides ``test`` and ``installtest`` methods,
which are run after the ``build`` and ``install`` phases, respectively. which are run after the ``build`` and ``install`` phases, respectively.
By default, these phases do nothing, but you can override them to By default, these phases do nothing, but you can override them to
run package-specific unit tests. run package-specific unit tests. For example, the
`py-py2cairo <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-py2cairo/package.py>`_
package uses:
.. code-block:: python .. code-block:: python

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -89,7 +89,6 @@
# Enable todo items # Enable todo items
todo_include_todos = True todo_include_todos = True
# #
# Disable duplicate cross-reference warnings. # Disable duplicate cross-reference warnings.
# #
@@ -164,7 +163,7 @@ def setup(sphinx):
# General information about the project. # General information about the project.
project = "Spack" project = "Spack"
copyright = "2013-2023, Lawrence Livermore National Laboratory." copyright = "2013-2021, Lawrence Livermore National Laboratory."
# The version info for the project you're documenting, acts as replacement for # The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the # |version| and |release|, also used in various other places throughout the
@@ -215,9 +214,8 @@ def setup(sphinx):
("py:class", "spack.repo._PrependFileLoader"), ("py:class", "spack.repo._PrependFileLoader"),
("py:class", "spack.build_systems._checks.BaseBuilder"), ("py:class", "spack.build_systems._checks.BaseBuilder"),
# Spack classes that intersphinx is unable to resolve # Spack classes that intersphinx is unable to resolve
("py:class", "spack.version.StandardVersion"), ("py:class", "spack.version.VersionBase"),
("py:class", "spack.spec.DependencySpec"), ("py:class", "spack.spec.DependencySpec"),
("py:class", "spack.install_test.Pb"),
] ]
# The reST default role (used for this markup: `text`) to use for all documents. # The reST default role (used for this markup: `text`) to use for all documents.
@@ -355,7 +353,9 @@ class SpackStyle(DefaultStyle):
# Grouping the document tree into LaTeX files. List of tuples # Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]). # (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual")] latex_documents = [
("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of # The name of an image file (relative to this directory) to place at the top of
# the title page. # the title page.
@@ -402,7 +402,7 @@ class SpackStyle(DefaultStyle):
"Spack", "Spack",
"One line description of project.", "One line description of project.",
"Miscellaneous", "Miscellaneous",
) ),
] ]
# Documents to append as an appendix to all manuals. # Documents to append as an appendix to all manuals.
@@ -418,4 +418,6 @@ class SpackStyle(DefaultStyle):
# -- Extension configuration ------------------------------------------------- # -- Extension configuration -------------------------------------------------
# sphinx.ext.intersphinx # sphinx.ext.intersphinx
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
}

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -222,7 +222,7 @@ and location. (See the *Configuration settings* section of ``man
ccache`` to learn more about the default settings and how to change ccache`` to learn more about the default settings and how to change
them). Please note that we currently disable ccache's ``hash_dir`` them). Please note that we currently disable ccache's ``hash_dir``
feature to avoid an issue with the stage directory (see feature to avoid an issue with the stage directory (see
https://github.com/spack/spack/pull/3761#issuecomment-294352232). https://github.com/LLNL/spack/pull/3761#issuecomment-294352232).
----------------------- -----------------------
``shared_linking:type`` ``shared_linking:type``

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -20,9 +20,8 @@ case you want to skip directly to specific docs:
* :ref:`packages.yaml <build-settings>` * :ref:`packages.yaml <build-settings>`
* :ref:`repos.yaml <repositories>` * :ref:`repos.yaml <repositories>`
You can also add any of these as inline configuration in the YAML You can also add any of these as inline configuration in ``spack.yaml``
manifest file (``spack.yaml``) describing an :ref:`environment in an :ref:`environment <environment-configuration>`.
<environment-configuration>`.
----------- -----------
YAML Format YAML Format
@@ -228,9 +227,6 @@ You can get the name to use for ``<platform>`` by running ``spack arch
--platform``. The system config scope has a ``<platform>`` section for --platform``. The system config scope has a ``<platform>`` section for
sites at which ``/etc`` is mounted on multiple heterogeneous machines. sites at which ``/etc`` is mounted on multiple heterogeneous machines.
.. _config-scope-precedence:
---------------- ----------------
Scope Precedence Scope Precedence
---------------- ----------------
@@ -243,11 +239,6 @@ lower-precedence settings. Completely ignoring higher-level configuration
options is supported with the ``::`` notation for keys (see options is supported with the ``::`` notation for keys (see
:ref:`config-overrides` below). :ref:`config-overrides` below).
There are also special notations for string concatenation and precendense override.
Using the ``+:`` notation can be used to force *prepending* strings or lists. For lists, this is identical
to the default behavior. Using the ``-:`` works similarly, but for *appending* values.
:ref:`config-prepend-append`
^^^^^^^^^^^ ^^^^^^^^^^^
Simple keys Simple keys
^^^^^^^^^^^ ^^^^^^^^^^^
@@ -288,47 +279,6 @@ command:
- ~/.spack/stage - ~/.spack/stage
.. _config-prepend-append:
^^^^^^^^^^^^^^^^^^^^
String Concatenation
^^^^^^^^^^^^^^^^^^^^
Above, the user ``config.yaml`` *completely* overrides specific settings in the
default ``config.yaml``. Sometimes, it is useful to add a suffix/prefix
to a path or name. To do this, you can use the ``-:`` notation for *append*
string concatenation at the end of a key in a configuration file. For example:
.. code-block:: yaml
:emphasize-lines: 1
:caption: ~/.spack/config.yaml
config:
install_tree-: /my/custom/suffix/
Spack will then append to the lower-precedence configuration under the
``install_tree-:`` section:
.. code-block:: console
$ spack config get config
config:
install_tree: /some/other/directory/my/custom/suffix
build_stage:
- $tempdir/$user/spack-stage
- ~/.spack/stage
Similarly, ``+:`` can be used to *prepend* to a path or name:
.. code-block:: yaml
:emphasize-lines: 1
:caption: ~/.spack/config.yaml
config:
install_tree+: /my/custom/suffix/
.. _config-overrides: .. _config-overrides:
^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -143,26 +143,6 @@ The OS that are currently supported are summarized in the table below:
* - Amazon Linux 2 * - Amazon Linux 2
- ``amazonlinux:2`` - ``amazonlinux:2``
- ``spack/amazon-linux`` - ``spack/amazon-linux``
* - AlmaLinux 8
- ``almalinux:8``
- ``spack/almalinux8``
* - AlmaLinux 9
- ``almalinux:9``
- ``spack/almalinux9``
* - Rocky Linux 8
- ``rockylinux:8``
- ``spack/rockylinux8``
* - Rocky Linux 9
- ``rockylinux:9``
- ``spack/rockylinux9``
* - Fedora Linux 37
- ``fedora:37``
- ``spack/fedora37``
* - Fedora Linux 38
- ``fedora:38``
- ``spack/fedora38``
All the images are tagged with the corresponding release of Spack: All the images are tagged with the corresponding release of Spack:
@@ -464,120 +444,6 @@ attribute:
The minimum version of Singularity required to build a SIF (Singularity Image Format) The minimum version of Singularity required to build a SIF (Singularity Image Format)
image from the recipes generated by Spack is ``3.5.3``. image from the recipes generated by Spack is ``3.5.3``.
------------------------------
Extending the Jinja2 Templates
------------------------------
The Dockerfile and the Singularity definition file that Spack can generate are based on
a few Jinja2 templates that are rendered according to the environment being containerized.
Even though Spack allows a great deal of customization by just setting appropriate values for
the configuration options, sometimes that is not enough.
In those cases, a user can directly extend the template that Spack uses to render the image
to e.g. set additional environment variables or perform specific operations either before or
after a given stage of the build. Let's consider as an example the following structure:
.. code-block:: console
$ tree /opt/environment
/opt/environment
├── data
│ └── data.csv
├── spack.yaml
├── data
└── templates
└── container
└── CustomDockerfile
containing both the custom template extension and the environment manifest file. To use a custom
template, the environment must register the directory containing it, and declare its use under the
``container`` configuration:
.. code-block:: yaml
:emphasize-lines: 7-8,12
spack:
specs:
- hdf5~mpi
concretizer:
unify: true
config:
template_dirs:
- /opt/environment/templates
container:
format: docker
depfile: true
template: container/CustomDockerfile
The template extension can override two blocks, named ``build_stage`` and ``final_stage``, similarly to
the example below:
.. code-block::
:emphasize-lines: 3,8
{% extends "container/Dockerfile" %}
{% block build_stage %}
RUN echo "Start building"
{{ super() }}
{% endblock %}
{% block final_stage %}
{{ super() }}
COPY data /share/myapp/data
{% endblock %}
The recipe that gets generated contains the two extra instruction that we added in our template extension:
.. code-block:: Dockerfile
:emphasize-lines: 4,43
# Build stage with Spack pre-installed and ready to be used
FROM spack/ubuntu-jammy:latest as builder
RUN echo "Start building"
# What we want to install and how we want to install it
# is specified in a manifest file (spack.yaml)
RUN mkdir /opt/spack-environment \
&& (echo "spack:" \
&& echo " specs:" \
&& echo " - hdf5~mpi" \
&& echo " concretizer:" \
&& echo " unify: true" \
&& echo " config:" \
&& echo " template_dirs:" \
&& echo " - /tmp/environment/templates" \
&& echo " install_tree: /opt/software" \
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
# Install the software, remove unnecessary deps
RUN cd /opt/spack-environment && spack env activate . && spack concretize && spack env depfile -o Makefile && make -j $(nproc) && spack gc -y
# Strip all the binaries
RUN find -L /opt/view/* -type f -exec readlink -f '{}' \; | \
xargs file -i | \
grep 'charset=binary' | \
grep 'x-executable\|x-archive\|x-sharedlib' | \
awk -F: '{print $1}' | xargs strip -s
# Modifications to the environment that are necessary to run
RUN cd /opt/spack-environment && \
spack env activate --sh -d . >> /etc/profile.d/z10_spack_environment.sh
# Bare OS image to run the installed executables
FROM ubuntu:22.04
COPY --from=builder /opt/spack-environment /opt/spack-environment
COPY --from=builder /opt/software /opt/software
COPY --from=builder /opt/._view /opt/._view
COPY --from=builder /opt/view /opt/view
COPY --from=builder /etc/profile.d/z10_spack_environment.sh /etc/profile.d/z10_spack_environment.sh
COPY data /share/myapp/data
ENTRYPOINT ["/bin/bash", "--rcfile", "/etc/profile", "-l", "-c", "$*", "--" ]
CMD [ "/bin/bash" ]
.. _container_config_options: .. _container_config_options:
----------------------- -----------------------
@@ -598,10 +464,6 @@ to customize the generation of container recipes:
- The format of the recipe - The format of the recipe
- ``docker`` or ``singularity`` - ``docker`` or ``singularity``
- Yes - Yes
* - ``depfile``
- Whether to use a depfile for installation, or not
- True or False (default)
- No
* - ``images:os`` * - ``images:os``
- Operating system used as a base for the image - Operating system used as a base for the image
- See :ref:`containers-supported-os` - See :ref:`containers-supported-os`
@@ -636,7 +498,7 @@ to customize the generation of container recipes:
- No - No
* - ``os_packages:command`` * - ``os_packages:command``
- Tool used to manage system packages - Tool used to manage system packages
- ``apt``, ``yum``, ``zypper``, ``apk``, ``yum_amazon`` - ``apt``, ``yum``
- Only with custom base images - Only with custom base images
* - ``os_packages:update`` * - ``os_packages:update``
- Whether or not to update the list of available packages - Whether or not to update the list of available packages
@@ -650,6 +512,14 @@ to customize the generation of container recipes:
- System packages needed at run-time - System packages needed at run-time
- Valid packages for the current OS - Valid packages for the current OS
- No - No
* - ``extra_instructions:build``
- Extra instructions (e.g. `RUN`, `COPY`, etc.) at the end of the ``build`` stage
- Anything understood by the current ``format``
- No
* - ``extra_instructions:final``
- Extra instructions (e.g. `RUN`, `COPY`, etc.) at the end of the ``final`` stage
- Anything understood by the current ``format``
- No
* - ``labels`` * - ``labels``
- Labels to tag the image - Labels to tag the image
- Pairs of key-value strings - Pairs of key-value strings

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -118,7 +118,7 @@ make another change, test that change, etc. We use `pytest
<http://pytest.org/>`_ as our tests framework, and these types of <http://pytest.org/>`_ as our tests framework, and these types of
arguments are just passed to the ``pytest`` command underneath. See `the arguments are just passed to the ``pytest`` command underneath. See `the
pytest docs pytest docs
<https://doc.pytest.org/en/latest/how-to/usage.html#specifying-which-tests-to-run>`_ <http://doc.pytest.org/en/latest/usage.html#specifying-tests-selecting-tests>`_
for more details on test selection syntax. for more details on test selection syntax.
``spack unit-test`` has a few special options that can help you ``spack unit-test`` has a few special options that can help you
@@ -147,7 +147,7 @@ you want to know about. For example, to see just the tests in
You can also combine any of these options with a ``pytest`` keyword You can also combine any of these options with a ``pytest`` keyword
search. See the `pytest usage docs search. See the `pytest usage docs
<https://doc.pytest.org/en/latest/how-to/usage.html#specifying-which-tests-to-run>`_ <https://docs.pytest.org/en/stable/usage.html#specifying-tests-selecting-tests>`_:
for more details on test selection syntax. For example, to see the names of all tests that have "spec" for more details on test selection syntax. For example, to see the names of all tests that have "spec"
or "concretize" somewhere in their names: or "concretize" somewhere in their names:

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -472,7 +472,7 @@ use my new hook as follows:
.. code-block:: python .. code-block:: python
def post_log_write(message, level): def post_log_write(message, level):
"""Do something custom with the message and level every time we write """Do something custom with the messsage and level every time we write
to the log to the log
""" """
print('running post_log_write!') print('running post_log_write!')

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -58,9 +58,9 @@ Using Environments
Here we follow a typical use case of creating, concretizing, Here we follow a typical use case of creating, concretizing,
installing and loading an environment. installing and loading an environment.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Creating a managed Environment Creating a named Environment
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
An environment is created by: An environment is created by:
@@ -72,8 +72,7 @@ Spack then creates the directory ``var/spack/environments/myenv``.
.. note:: .. note::
All managed environments by default are stored in the ``var/spack/environments`` folder. All named environments are stored in the ``var/spack/environments`` folder.
This location can be changed by setting the ``environments_root`` variable in ``config.yaml``.
In the ``var/spack/environments/myenv`` directory, Spack creates the In the ``var/spack/environments/myenv`` directory, Spack creates the
file ``spack.yaml`` and the hidden directory ``.spack-env``. file ``spack.yaml`` and the hidden directory ``.spack-env``.
@@ -94,9 +93,9 @@ an Environment, the ``.spack-env`` directory also contains:
* ``logs/``: A directory containing the build logs for the packages * ``logs/``: A directory containing the build logs for the packages
in this Environment. in this Environment.
Spack Environments can also be created from either a manifest file Spack Environments can also be created from either a ``spack.yaml``
(usually but not necessarily named, ``spack.yaml``) or a lockfile. manifest or a ``spack.lock`` lockfile. To create an Environment from a
To create an Environment from a manifest: ``spack.yaml`` manifest:
.. code-block:: console .. code-block:: console
@@ -174,7 +173,7 @@ Anonymous specs can be created in place using the command:
$ spack env create -d . $ spack env create -d .
In this case Spack simply creates a ``spack.yaml`` file in the requested In this case Spack simply creates a spack.yaml file in the requested
directory. directory.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -589,11 +588,10 @@ user support groups providing a large software stack for their HPC center.
.. admonition:: Re-concretization of user specs .. admonition:: Re-concretization of user specs
The ``spack concretize`` command without additional arguments will *not* change any When using *unified* concretization (when possible), the entire set of specs will be
previously concretized specs. This may prevent it from finding a solution when using re-concretized after any addition of new user specs, to ensure that
``unify: true``, and it may prevent it from finding a minimal solution when using the environment remains consistent / minimal. When instead unified concretization is
``unify: when_possible``. You can force Spack to ignore the existing concrete environment disabled, only the new specs will be concretized after any addition.
with ``spack concretize -f``.
^^^^^^^^^^^^^ ^^^^^^^^^^^^^
Spec Matrices Spec Matrices
@@ -1041,7 +1039,7 @@ gets installed and is available for use in the ``env`` target.
$(SPACK) -e . concretize -f $(SPACK) -e . concretize -f
env.mk: spack.lock env.mk: spack.lock
$(SPACK) -e . env depfile -o $@ --make-prefix spack $(SPACK) -e . env depfile -o $@ --make-target-prefix spack
env: spack/env env: spack/env
$(info Environment installed!) $(info Environment installed!)
@@ -1064,9 +1062,9 @@ the include is conditional.
.. note:: .. note::
When including generated ``Makefile``\s, it is important to use When including generated ``Makefile``\s, it is important to use
the ``--make-prefix`` flag and use the non-phony target the ``--make-target-prefix`` flag and use the non-phony target
``<prefix>/env`` as prerequisite, instead of the phony target ``<target-prefix>/env`` as prerequisite, instead of the phony target
``<prefix>/all``. ``<target-prefix>/all``.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Building a subset of the environment Building a subset of the environment
@@ -1092,51 +1090,3 @@ output (``spack install --verbose``) while its dependencies are installed silent
# Install the root spec with verbose output. # Install the root spec with verbose output.
$ make -j16 install/python-3.11.0-<hash> SPACK_INSTALL_FLAGS=--verbose $ make -j16 install/python-3.11.0-<hash> SPACK_INSTALL_FLAGS=--verbose
^^^^^^^^^^^^^^^^^^^^^^^^^
Adding post-install hooks
^^^^^^^^^^^^^^^^^^^^^^^^^
Another advanced use-case of generated ``Makefile``\s is running a post-install
command for each package. These "hooks" could be anything from printing a
post-install message, running tests, or pushing just-built binaries to a buildcache.
This can be accomplished through the generated ``[<prefix>/]SPACK_PACKAGE_IDS``
variable. Assuming we have an active and concrete environment, we generate the
associated ``Makefile`` with a prefix ``example``:
.. code:: console
$ spack env depfile -o env.mk --make-prefix example
And we now include it in a different ``Makefile``, in which we create a target
``example/push/%`` with ``%`` referring to a package identifier. This target
depends on the particular package installation. In this target we automatically
have the target-specific ``HASH`` and ``SPEC`` variables at our disposal. They
are respectively the spec hash (excluding leading ``/``), and a human-readable spec.
Finally, we have an entrypoint target ``push`` that will update the buildcache
index once every package is pushed. Note how this target uses the generated
``example/SPACK_PACKAGE_IDS`` variable to define its prerequisites.
.. code:: Makefile
SPACK ?= spack
BUILDCACHE_DIR = $(CURDIR)/tarballs
.PHONY: all
all: push
include env.mk
example/push/%: example/install/%
@mkdir -p $(dir $@)
$(info About to push $(SPEC) to a buildcache)
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
@touch $@
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
$(info Updating the buildcache index)
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
$(info Done!)
@touch $@

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -116,7 +116,7 @@ creates a simple python file:
# FIXME: Add a list of GitHub accounts to # FIXME: Add a list of GitHub accounts to
# notify when the package is updated. # notify when the package is updated.
# maintainers("github_user1", "github_user2") # maintainers = ["github_user1", "github_user2"]
version("0.8.13", sha256="591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d") version("0.8.13", sha256="591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d")

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -41,9 +41,12 @@ A build matrix showing which packages are working on which systems is shown belo
.. code-block:: console .. code-block:: console
dnf install epel-release yum update -y
dnf group install "Development Tools" yum install -y epel-release
dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3 yum update -y
yum --enablerepo epel groupinstall -y "Development Tools"
yum --enablerepo epel install -y curl findutils gcc-c++ gcc gcc-gfortran git gnupg2 hostname iproute redhat-lsb-core make patch python3 python3-pip python3-setuptools unzip
python3 -m pip install boto3
.. tab-item:: macOS Brew .. tab-item:: macOS Brew
@@ -365,8 +368,7 @@ Manual compiler configuration
If auto-detection fails, you can manually configure a compiler by If auto-detection fails, you can manually configure a compiler by
editing your ``~/.spack/<platform>/compilers.yaml`` file. You can do this by running editing your ``~/.spack/<platform>/compilers.yaml`` file. You can do this by running
``spack config edit compilers``, which will open the file in ``spack config edit compilers``, which will open the file in your ``$EDITOR``.
:ref:`your favorite editor <controlling-the-editor>`.
Each compiler configuration in the file looks like this: Each compiler configuration in the file looks like this:
@@ -1546,7 +1548,7 @@ Intel Fortran
For Fortran-based packages on Windows, we strongly recommend Intel's oneAPI Fortran compilers. For Fortran-based packages on Windows, we strongly recommend Intel's oneAPI Fortran compilers.
The suite is free to download from Intel's website, located at The suite is free to download from Intel's website, located at
https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html. https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html#gs.70t5tw.
The executable of choice for Spack will be Intel's Beta Compiler, ifx, which supports the classic The executable of choice for Spack will be Intel's Beta Compiler, ifx, which supports the classic
compiler's (ifort's) frontend and runtime libraries by using LLVM. compiler's (ifort's) frontend and runtime libraries by using LLVM.
@@ -1595,8 +1597,8 @@ in a Windows CMD prompt.
.. note:: .. note::
If you chose to install Spack into a directory on Windows that is set up to require Administrative If you chose to install Spack into a directory on Windows that is set up to require Administrative
Privileges, Spack will require elevated privileges to run. Privleges, Spack will require elevated privleges to run.
Administrative Privileges can be denoted either by default such as Administrative Privleges can be denoted either by default such as
``C:\Program Files``, or aministrator applied administrative restrictions ``C:\Program Files``, or aministrator applied administrative restrictions
on a directory that spack installs files to such as ``C:\Users`` on a directory that spack installs files to such as ``C:\Users``
@@ -1692,21 +1694,35 @@ Spack console via:
spack install cpuinfo spack install cpuinfo
If in the previous step, you did not have CMake or Ninja installed, running the command above should bootstrap both packages If in the previous step, you did not have CMake or Ninja installed, running the command above should boostrap both packages
""""""""""""""""""""""""""" """""""""""""""""""""""""""
Windows Compatible Packages Windows Compatible Packages
""""""""""""""""""""""""""" """""""""""""""""""""""""""
Not all spack packages currently have Windows support. Some are inherently incompatible with the Many Spack packages are not currently compatible with Windows, due to Unix
platform, and others simply have yet to be ported. To view the current set of packages with Windows dependencies or incompatible build tools like autoconf. Here are several
support, the list command should be used via `spack list -t windows`. If there's a package you'd like packages known to work on Windows:
to install on Windows but is not in that list, feel free to reach out to request the port or contribute
the port yourself. * abseil-cpp
* bzip2
* clingo
* cpuinfo
* cmake
* hdf5
* glm
* nasm
* netlib-lapack (requires Intel Fortran)
* ninja
* openssl
* perl
* python
* ruby
* wrf
* zlib
.. note:: .. note::
This is by no means a comprehensive list, some packages may have ports that were not tagged This is by no means a comprehensive list
while others may just work out of the box on Windows and have not been tagged as such.
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
For developers For developers
@@ -1718,4 +1734,3 @@ Instructions for creating the installer are at
https://github.com/spack/spack/blob/develop/lib/spack/spack/cmd/installer/README.md https://github.com/spack/spack/blob/develop/lib/spack/spack/cmd/installer/README.md
Alternatively a pre-built copy of the Windows installer is available as an artifact of Spack's Windows CI Alternatively a pre-built copy of the Windows installer is available as an artifact of Spack's Windows CI
available at each run of the CI on develop or any PR.

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -163,7 +163,7 @@ your site.
Mirror environment Mirror environment
^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^
To create a mirror of all packages required by a concrete environment, activate the environment and call ``spack mirror create -a``. To create a mirror of all packages required by a concerte environment, activate the environment and call ``spack mirror create -a``.
This is especially useful to create a mirror of an environment concretized on another machine. This is especially useful to create a mirror of an environment concretized on another machine.
.. code-block:: console .. code-block:: console

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -13,7 +13,7 @@ The use of module systems to manage user environment in a controlled way
is a common practice at HPC centers that is often embraced also by is a common practice at HPC centers that is often embraced also by
individual programmers on their development machines. To support this individual programmers on their development machines. To support this
common practice Spack integrates with `Environment Modules common practice Spack integrates with `Environment Modules
<http://modules.sourceforge.net/>`_ and `Lmod <http://modules.sourceforge.net/>`_ and `LMod
<http://lmod.readthedocs.io/en/latest/>`_ by providing post-install hooks <http://lmod.readthedocs.io/en/latest/>`_ by providing post-install hooks
that generate module files and commands to manipulate them. that generate module files and commands to manipulate them.
@@ -26,7 +26,7 @@ Using module files via Spack
---------------------------- ----------------------------
If you have installed a supported module system you should be able to If you have installed a supported module system you should be able to
run ``module avail`` to see what module run either ``module avail`` or ``use -l spack`` to see what module
files have been installed. Here is sample output of those programs, files have been installed. Here is sample output of those programs,
showing lots of installed packages: showing lots of installed packages:
@@ -51,7 +51,12 @@ showing lots of installed packages:
help2man-1.47.4-gcc-4.8-kcnqmau lua-luaposix-33.4.0-gcc-4.8-mdod2ry netlib-scalapack-2.0.2-gcc-6.3.0-rgqfr6d py-scipy-0.19.0-gcc-6.3.0-kr7nat4 zlib-1.2.11-gcc-6.3.0-7cqp6cj help2man-1.47.4-gcc-4.8-kcnqmau lua-luaposix-33.4.0-gcc-4.8-mdod2ry netlib-scalapack-2.0.2-gcc-6.3.0-rgqfr6d py-scipy-0.19.0-gcc-6.3.0-kr7nat4 zlib-1.2.11-gcc-6.3.0-7cqp6cj
The names should look familiar, as they resemble the output from ``spack find``. The names should look familiar, as they resemble the output from ``spack find``.
For example, you could type the following command to load the ``cmake`` module: You *can* use the modules here directly. For example, you could type either of these commands
to load the ``cmake`` module:
.. code-block:: console
$ use cmake-3.7.2-gcc-6.3.0-fowuuby
.. code-block:: console .. code-block:: console
@@ -88,9 +93,9 @@ the different file formats that can be generated by Spack:
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+ +-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
| | **Hook name** | **Default root directory** | **Default template file** | **Compatible tools** | | | **Hook name** | **Default root directory** | **Default template file** | **Compatible tools** |
+=============================+====================+===============================+==============================================+======================+ +=============================+====================+===============================+==============================================+======================+
| **Tcl - Non-Hierarchical** | ``tcl`` | share/spack/modules | share/spack/templates/modules/modulefile.tcl | Env. Modules/Lmod | | **TCL - Non-Hierarchical** | ``tcl`` | share/spack/modules | share/spack/templates/modules/modulefile.tcl | Env. Modules/LMod |
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+ +-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
| **Lua - Hierarchical** | ``lmod`` | share/spack/lmod | share/spack/templates/modules/modulefile.lua | Lmod | | **Lua - Hierarchical** | ``lmod`` | share/spack/lmod | share/spack/templates/modules/modulefile.lua | LMod |
+-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+ +-----------------------------+--------------------+-------------------------------+----------------------------------------------+----------------------+
@@ -391,13 +396,13 @@ name and version for all packages that depend on mpi.
When specifying module names by projection for Lmod modules, we When specifying module names by projection for Lmod modules, we
recommend NOT including names of dependencies (e.g., MPI, compilers) recommend NOT including names of dependencies (e.g., MPI, compilers)
that are already in the Lmod hierarchy. that are already in the LMod hierarchy.
.. note:: .. note::
Tcl modules TCL modules
Tcl modules also allow for explicit conflicts between modulefiles. TCL modules also allow for explicit conflicts between modulefiles.
.. code-block:: yaml .. code-block:: yaml
@@ -421,9 +426,9 @@ that are already in the Lmod hierarchy.
.. note:: .. note::
Lmod hierarchical module files LMod hierarchical module files
When ``lmod`` is activated Spack will generate a set of hierarchical lua module When ``lmod`` is activated Spack will generate a set of hierarchical lua module
files that are understood by Lmod. The hierarchy will always contain the files that are understood by LMod. The hierarchy will always contain the
two layers ``Core`` / ``Compiler`` but can be further extended to two layers ``Core`` / ``Compiler`` but can be further extended to
any of the virtual dependencies present in Spack. A case that could be useful in any of the virtual dependencies present in Spack. A case that could be useful in
practice is for instance: practice is for instance:
@@ -445,7 +450,7 @@ that are already in the Lmod hierarchy.
that will generate a hierarchy in which the ``lapack`` and ``mpi`` layer can be switched that will generate a hierarchy in which the ``lapack`` and ``mpi`` layer can be switched
independently. This allows a site to build the same libraries or applications against different independently. This allows a site to build the same libraries or applications against different
implementations of ``mpi`` and ``lapack``, and let Lmod switch safely from one to the implementations of ``mpi`` and ``lapack``, and let LMod switch safely from one to the
other. other.
All packages built with a compiler in ``core_compilers`` and all All packages built with a compiler in ``core_compilers`` and all
@@ -455,12 +460,12 @@ that are already in the Lmod hierarchy.
.. warning:: .. warning::
Consistency of Core packages Consistency of Core packages
The user is responsible for maintining consistency among core packages, as ``core_specs`` The user is responsible for maintining consistency among core packages, as ``core_specs``
bypasses the hierarchy that allows Lmod to safely switch between coherent software stacks. bypasses the hierarchy that allows LMod to safely switch between coherent software stacks.
.. warning:: .. warning::
Deep hierarchies and ``lmod spider`` Deep hierarchies and ``lmod spider``
For hierarchies that are deeper than three layers ``lmod spider`` may have some issues. For hierarchies that are deeper than three layers ``lmod spider`` may have some issues.
See `this discussion on the Lmod project <https://github.com/TACC/Lmod/issues/114>`_. See `this discussion on the LMod project <https://github.com/TACC/Lmod/issues/114>`_.
"""""""""""""""""""""" """"""""""""""""""""""
Select default modules Select default modules
@@ -529,7 +534,7 @@ installed to ``/spack/prefix/foo``, if ``foo`` installs executables to
update ``MANPATH``. update ``MANPATH``.
The default list of environment variables in this config section The default list of environment variables in this config section
includes ``PATH``, ``MANPATH``, ``ACLOCAL_PATH``, ``PKG_CONFIG_PATH`` inludes ``PATH``, ``MANPATH``, ``ACLOCAL_PATH``, ``PKG_CONFIG_PATH``
and ``CMAKE_PREFIX_PATH``, as well as ``DYLD_FALLBACK_LIBRARY_PATH`` and ``CMAKE_PREFIX_PATH``, as well as ``DYLD_FALLBACK_LIBRARY_PATH``
on macOS. On Linux however, the corresponding ``LD_LIBRARY_PATH`` on macOS. On Linux however, the corresponding ``LD_LIBRARY_PATH``
variable is *not* set, because it affects the behavior of variable is *not* set, because it affects the behavior of
@@ -629,9 +634,8 @@ by its dependency; when the dependency is autoloaded, the executable will be in
PATH. Similarly for scripting languages such as Python, packages and their dependencies PATH. Similarly for scripting languages such as Python, packages and their dependencies
have to be loaded together. have to be loaded together.
Autoloading is enabled by default for Lmod and Environment Modules. The former Autoloading is enabled by default for LMod, as it has great builtin support for through
has builtin support for through the ``depends_on`` function. The latter uses the ``depends_on`` function. For Environment Modules it is disabled by default.
``module load`` statement to load and track dependencies.
Autoloading can also be enabled conditionally: Autoloading can also be enabled conditionally:
@@ -651,14 +655,12 @@ The allowed values for the ``autoload`` statement are either ``none``,
``direct`` or ``all``. ``direct`` or ``all``.
.. note:: .. note::
Tcl prerequisites TCL prerequisites
In the ``tcl`` section of the configuration file it is possible to use In the ``tcl`` section of the configuration file it is possible to use
the ``prerequisites`` directive that accepts the same values as the ``prerequisites`` directive that accepts the same values as
``autoload``. It will produce module files that have a ``prereq`` ``autoload``. It will produce module files that have a ``prereq``
statement, which autoloads dependencies on Environment Modules when its statement, which can be used to autoload dependencies in some versions
``auto_handling`` configuration option is enabled. If Environment Modules of Environment Modules.
is installed with Spack, ``auto_handling`` is enabled by default starting
version 4.2. Otherwise it is enabled by default since version 5.0.
------------------------ ------------------------
Maintaining Module Files Maintaining Module Files

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -9,32 +9,27 @@
CI Pipelines CI Pipelines
============ ============
Spack provides commands that support generating and running automated build pipelines in CI instances. At the highest Spack provides commands that support generating and running automated build
level it works like this: provide a spack environment describing the set of packages you care about, and include a pipelines designed for Gitlab CI. At the highest level it works like this:
description of how those packages should be mapped to Gitlab runners. Spack can then generate a ``.gitlab-ci.yml`` provide a spack environment describing the set of packages you care about,
file containing job descriptions for all your packages that can be run by a properly configured CI instance. When and include within that environment file a description of how those packages
run, the generated pipeline will build and deploy binaries, and it can optionally report to a CDash instance should be mapped to Gitlab runners. Spack can then generate a ``.gitlab-ci.yml``
file containing job descriptions for all your packages that can be run by a
properly configured Gitlab CI instance. When run, the generated pipeline will
build and deploy binaries, and it can optionally report to a CDash instance
regarding the health of the builds as they evolve over time. regarding the health of the builds as they evolve over time.
------------------------------ ------------------------------
Getting started with pipelines Getting started with pipelines
------------------------------ ------------------------------
To get started with automated build pipelines a Gitlab instance with version ``>= 12.9`` It is fairly straightforward to get started with automated build pipelines. At
(more about Gitlab CI `here <https://about.gitlab.com/product/continuous-integration/>`_) a minimum, you'll need to set up a Gitlab instance (more about Gitlab CI
with at least one `runner <https://docs.gitlab.com/runner/>`_ configured is required. This `here <https://about.gitlab.com/product/continuous-integration/>`_) and configure
can be done quickly by setting up a local Gitlab instance. at least one `runner <https://docs.gitlab.com/runner/>`_. Then the basic steps
for setting up a build pipeline are as follows:
It is possible to set up pipelines on gitlab.com, but the builds there are limited to #. Create a repository on your gitlab instance
60 minutes and generic hardware. It is possible to
`hook up <https://about.gitlab.com/blog/2018/04/24/getting-started-gitlab-ci-gcp>`_
Gitlab to Google Kubernetes Engine (`GKE <https://cloud.google.com/kubernetes-engine/>`_)
or Amazon Elastic Kubernetes Service (`EKS <https://aws.amazon.com/eks>`_), though those
topics are outside the scope of this document.
After setting up a Gitlab instance for running CI, the basic steps for setting up a build pipeline are as follows:
#. Create a repository in the Gitlab instance with CI and a runner enabled.
#. Add a ``spack.yaml`` at the root containing your pipeline environment #. Add a ``spack.yaml`` at the root containing your pipeline environment
#. Add a ``.gitlab-ci.yml`` at the root containing two jobs (one to generate #. Add a ``.gitlab-ci.yml`` at the root containing two jobs (one to generate
the pipeline dynamically, and one to run the generated jobs). the pipeline dynamically, and one to run the generated jobs).
@@ -45,6 +40,13 @@ See the :ref:`functional_example` section for a minimal working example. See al
the :ref:`custom_Workflow` section for a link to an example of a custom workflow the :ref:`custom_Workflow` section for a link to an example of a custom workflow
based on spack pipelines. based on spack pipelines.
While it is possible to set up pipelines on gitlab.com, as illustrated above, the
builds there are limited to 60 minutes and generic hardware. It is also possible to
`hook up <https://about.gitlab.com/blog/2018/04/24/getting-started-gitlab-ci-gcp>`_
Gitlab to Google Kubernetes Engine (`GKE <https://cloud.google.com/kubernetes-engine/>`_)
or Amazon Elastic Kubernetes Service (`EKS <https://aws.amazon.com/eks>`_), though those
topics are outside the scope of this document.
Spack's pipelines are now making use of the Spack's pipelines are now making use of the
`trigger <https://docs.gitlab.com/ee/ci/yaml/#trigger>`_ syntax to run `trigger <https://docs.gitlab.com/ee/ci/yaml/#trigger>`_ syntax to run
dynamically generated dynamically generated
@@ -130,35 +132,29 @@ And here's the spack environment built by the pipeline represented as a
mirrors: { "mirror": "s3://spack-public/mirror" } mirrors: { "mirror": "s3://spack-public/mirror" }
ci: gitlab-ci:
enable-artifacts-buildcache: True
rebuild-index: False
pipeline-gen:
- any-job:
before_script: before_script:
- git clone ${SPACK_REPO} - git clone ${SPACK_REPO}
- pushd spack && git checkout ${SPACK_CHECKOUT_VERSION} && popd - pushd spack && git checkout ${SPACK_CHECKOUT_VERSION} && popd
- . "./spack/share/spack/setup-env.sh" - . "./spack/share/spack/setup-env.sh"
- build-job: script:
tags: [docker] - pushd ${SPACK_CONCRETE_ENV_DIR} && spack env activate --without-view . && popd
- spack -d ci rebuild
mappings:
- match: ["os=ubuntu18.04"]
runner-attributes:
image: image:
name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01 name: ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01
entrypoint: [""] entrypoint: [""]
tags:
- docker
enable-artifacts-buildcache: True
rebuild-index: False
The elements of this file important to spack ci pipelines are described in more The elements of this file important to spack ci pipelines are described in more
detail below, but there are a couple of things to note about the above working detail below, but there are a couple of things to note about the above working
example: example:
.. note::
There is no ``script`` attribute specified for here. The reason for this is
Spack CI will automatically generate reasonable default scripts. More
detail on what is in these scripts can be found below.
Also notice the ``before_script`` section. It is required when using any of the
default scripts to source the ``setup-env.sh`` script in order to inform
the default scripts where to find the ``spack`` executable.
Normally ``enable-artifacts-buildcache`` is not recommended in production as it Normally ``enable-artifacts-buildcache`` is not recommended in production as it
results in large binary artifacts getting transferred back and forth between results in large binary artifacts getting transferred back and forth between
gitlab and the runners. But in this example on gitlab.com where there is no gitlab and the runners. But in this example on gitlab.com where there is no
@@ -178,7 +174,7 @@ during subsequent pipeline runs.
With the addition of reproducible builds (#22887) a previously working With the addition of reproducible builds (#22887) a previously working
pipeline will require some changes: pipeline will require some changes:
* In the build-jobs, the environment location changed. * In the build jobs (``runner-attributes``), the environment location changed.
This will typically show as a ``KeyError`` in the failing job. Be sure to This will typically show as a ``KeyError`` in the failing job. Be sure to
point to ``${SPACK_CONCRETE_ENV_DIR}``. point to ``${SPACK_CONCRETE_ENV_DIR}``.
@@ -200,9 +196,9 @@ ci pipelines. These commands are covered in more detail in this section.
.. _cmd-spack-ci: .. _cmd-spack-ci:
^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^
``spack ci`` ``spack ci``
^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^
Super-command for functionality related to generating pipelines and executing Super-command for functionality related to generating pipelines and executing
pipeline jobs. pipeline jobs.
@@ -231,7 +227,7 @@ Using ``--prune-dag`` or ``--no-prune-dag`` configures whether or not jobs are
generated for specs that are already up to date on the mirror. If enabling generated for specs that are already up to date on the mirror. If enabling
DAG pruning using ``--prune-dag``, more information may be required in your DAG pruning using ``--prune-dag``, more information may be required in your
``spack.yaml`` file, see the :ref:`noop_jobs` section below regarding ``spack.yaml`` file, see the :ref:`noop_jobs` section below regarding
``noop-job``. ``service-job-attributes``.
The optional ``--check-index-only`` argument can be used to speed up pipeline The optional ``--check-index-only`` argument can be used to speed up pipeline
generation by telling spack to consider only remote buildcache indices when generation by telling spack to consider only remote buildcache indices when
@@ -267,11 +263,11 @@ generated by jobs in the pipeline.
.. _cmd-spack-ci-rebuild: .. _cmd-spack-ci-rebuild:
^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
``spack ci rebuild`` ``spack ci rebuild``
^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^
The purpose of ``spack ci rebuild`` is to take an assigned The purpose of ``spack ci rebuild`` is straightforward: take its assigned
spec and ensure a binary of a successful build exists on the target mirror. spec and ensure a binary of a successful build exists on the target mirror.
If the binary does not already exist, it is built from source and pushed If the binary does not already exist, it is built from source and pushed
to the mirror. The associated stand-alone tests are optionally run against to the mirror. The associated stand-alone tests are optionally run against
@@ -284,7 +280,7 @@ directory. The script is run in a job to install the spec from source. The
resulting binary package is pushed to the mirror. If ``cdash`` is configured resulting binary package is pushed to the mirror. If ``cdash`` is configured
for the environment, then the build results will be uploaded to the site. for the environment, then the build results will be uploaded to the site.
Environment variables and values in the ``ci::pipeline-gen`` section of the Environment variables and values in the ``gitlab-ci`` section of the
``spack.yaml`` environment file provide inputs to this process. The ``spack.yaml`` environment file provide inputs to this process. The
two main sources of environment variables are variables written into two main sources of environment variables are variables written into
``.gitlab-ci.yml`` by ``spack ci generate`` and the GitLab CI runtime. ``.gitlab-ci.yml`` by ``spack ci generate`` and the GitLab CI runtime.
@@ -302,13 +298,11 @@ A snippet from an example ``spack.yaml`` file illustrating use of this
option *and* specification of a package with broken tests is given below. option *and* specification of a package with broken tests is given below.
The inclusion of a spec for building ``gptune`` is not shown here. Note The inclusion of a spec for building ``gptune`` is not shown here. Note
that ``--tests`` is passed to ``spack ci rebuild`` as part of the that ``--tests`` is passed to ``spack ci rebuild`` as part of the
``build-job`` script. ``gitlab-ci`` script.
.. code-block:: yaml .. code-block:: yaml
ci: gitlab-ci:
pipeline-gen:
- build-job
script: script:
- . "./share/spack/setup-env.sh" - . "./share/spack/setup-env.sh"
- spack --version - spack --version
@@ -360,31 +354,113 @@ arguments you can pass to ``spack ci reproduce-build`` in order to reproduce
a particular build locally. a particular build locally.
------------------------------------ ------------------------------------
Job Types A pipeline-enabled spack environment
------------------------------------ ------------------------------------
^^^^^^^^^^^^^^^ Here's an example of a spack environment file that has been enhanced with
Rebuild (build) sections describing a build pipeline:
^^^^^^^^^^^^^^^
Rebuild jobs, denoted as ``build-job``'s in the ``pipeline-gen`` list, are jobs .. code-block:: yaml
associated with concrete specs that have been marked for rebuild. By default a simple
script for doing rebuild is generated, but may be modified as needed.
The default script does three main steps, change directories to the pipelines concrete spack:
environment, activate the concrete environment, and run the ``spack ci rebuild`` command: definitions:
- pkgs:
- readline@7.0
- compilers:
- '%gcc@5.5.0'
- oses:
- os=ubuntu18.04
- os=centos7
specs:
- matrix:
- [$pkgs]
- [$compilers]
- [$oses]
mirrors:
cloud_gitlab: https://mirror.spack.io
gitlab-ci:
mappings:
- match:
- os=ubuntu18.04
runner-attributes:
tags:
- spack-kube
image: spack/ubuntu-bionic
- match:
- os=centos7
runner-attributes:
tags:
- spack-kube
image: spack/centos7
cdash:
build-group: Release Testing
url: https://cdash.spack.io
project: Spack
site: Spack AWS Gitlab Instance
.. code-block:: bash Hopefully, the ``definitions``, ``specs``, ``mirrors``, etc. sections are already
familiar, as they are part of spack :ref:`environments`. So let's take a more
in-depth look some of the pipeline-related sections in that environment file
that might not be as familiar.
cd ${concrete_environment_dir} The ``gitlab-ci`` section is used to configure how the pipeline workload should be
spack env activate --without-view . generated, mainly how the jobs for building specs should be assigned to the
spack ci rebuild configured runners on your instance. Each entry within the list of ``mappings``
corresponds to a known gitlab runner, where the ``match`` section is used
in assigning a release spec to one of the runners, and the ``runner-attributes``
section is used to configure the spec/job for that particular runner.
Both the top-level ``gitlab-ci`` section as well as each ``runner-attributes``
section can also contain the following keys: ``image``, ``tags``, ``variables``,
``before_script``, ``script``, and ``after_script``. If any of these keys are
provided at the ``gitlab-ci`` level, they will be used as the defaults for any
``runner-attributes``, unless they are overridden in those sections. Specifying
any of these keys at the ``runner-attributes`` level generally overrides the
keys specified at the higher level, with a couple exceptions. Any ``variables``
specified at both levels result in those dictionaries getting merged in the
resulting generated job, and any duplicate variable names get assigned the value
provided in the specific ``runner-attributes``. If ``tags`` are specified both
at the ``gitlab-ci`` level as well as the ``runner-attributes`` level, then the
lists of tags are combined, and any duplicates are removed.
See the section below on using a custom spack for an example of how these keys
could be used.
There are other pipeline options you can configure within the ``gitlab-ci`` section
as well.
The ``bootstrap`` section allows you to specify lists of specs from
your ``definitions`` that should be staged ahead of the environment's ``specs`` (this
section is described in more detail below). The ``enable-artifacts-buildcache`` key
takes a boolean and determines whether the pipeline uses artifacts to store and
pass along the buildcaches from one stage to the next (the default if you don't
provide this option is ``False``).
The optional ``broken-specs-url`` key tells Spack to check against a list of
specs that are known to be currently broken in ``develop``. If any such specs
are found, the ``spack ci generate`` command will fail with an error message
informing the user what broken specs were encountered. This allows the pipeline
to fail early and avoid wasting compute resources attempting to build packages
that will not succeed.
The optional ``cdash`` section provides information that will be used by the
``spack ci generate`` command (invoked by ``spack ci start``) for reporting
to CDash. All the jobs generated from this environment will belong to a
"build group" within CDash that can be tracked over time. As the release
progresses, this build group may have jobs added or removed. The url, project,
and site are used to specify the CDash instance to which build results should
be reported.
Take a look at the
`schema <https://github.com/spack/spack/blob/develop/lib/spack/spack/schema/gitlab_ci.py>`_
for the gitlab-ci section of the spack environment file, to see precisely what
syntax is allowed there.
.. _rebuild_index: .. _rebuild_index:
^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Update Index (reindex) Note about rebuilding buildcache index
^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
By default, while a pipeline job may rebuild a package, create a buildcache By default, while a pipeline job may rebuild a package, create a buildcache
entry, and push it to the mirror, it does not automatically re-generate the entry, and push it to the mirror, it does not automatically re-generate the
@@ -399,44 +475,21 @@ not correctly reflect the mirror's contents at the end of a pipeline.
To make sure the buildcache index is up to date at the end of your pipeline, To make sure the buildcache index is up to date at the end of your pipeline,
spack generates a job to update the buildcache index of the target mirror spack generates a job to update the buildcache index of the target mirror
at the end of each pipeline by default. You can disable this behavior by at the end of each pipeline by default. You can disable this behavior by
adding ``rebuild-index: False`` inside the ``ci`` section of your adding ``rebuild-index: False`` inside the ``gitlab-ci`` section of your
spack environment. spack environment. Spack will assign the job any runner attributes found
on the ``service-job-attributes``, if you have provided that in your
Reindex jobs do not allow modifying the ``script`` attribute since it is automatically ``spack.yaml``.
generated using the target mirror listed in the ``mirrors::mirror`` configuration.
^^^^^^^^^^^^^^^^^
Signing (signing)
^^^^^^^^^^^^^^^^^
This job is run after all of the rebuild jobs are completed and is intended to be used
to sign the package binaries built by a protected CI run. Signing jobs are generated
only if a signing job ``script`` is specified and the spack CI job type is protected.
Note, if an ``any-job`` section contains a script, this will not implicitly create a
``signing`` job, a signing job may only exist if it is explicitly specified in the
configuration with a ``script`` attribute. Specifying a signing job without a script
does not create a signing job and the job configuration attributes will be ignored.
Signing jobs are always assigned the runner tags ``aws``, ``protected``, and ``notary``.
^^^^^^^^^^^^^^^^^
Cleanup (cleanup)
^^^^^^^^^^^^^^^^^
When using ``temporary-storage-url-prefix`` the cleanup job will destroy the mirror
created for the associated Gitlab pipeline. Cleanup jobs do not allow modifying the
script, but do expect that the spack command is in the path and require a
``before_script`` to be specified that sources the ``setup-env.sh`` script.
.. _noop_jobs: .. _noop_jobs:
^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^
No Op (noop) Note about "no-op" jobs
^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^
If no specs in an environment need to be rebuilt during a given pipeline run If no specs in an environment need to be rebuilt during a given pipeline run
(meaning all are already up to date on the mirror), a single successful job (meaning all are already up to date on the mirror), a single successful job
(a NO-OP) is still generated to avoid an empty pipeline (which GitLab (a NO-OP) is still generated to avoid an empty pipeline (which GitLab
considers to be an error). The ``noop-job*`` sections considers to be an error). An optional ``service-job-attributes`` section
can be added to your ``spack.yaml`` where you can provide ``tags`` and can be added to your ``spack.yaml`` where you can provide ``tags`` and
``image`` or ``variables`` for the generated NO-OP job. This section also ``image`` or ``variables`` for the generated NO-OP job. This section also
supports providing ``before_script``, ``script``, and ``after_script``, in supports providing ``before_script``, ``script``, and ``after_script``, in
@@ -447,99 +500,50 @@ Following is an example of this section added to a ``spack.yaml``:
.. code-block:: yaml .. code-block:: yaml
spack: spack:
ci: specs:
pipeline-gen: - openmpi
- noop-job: mirrors:
cloud_gitlab: https://mirror.spack.io
gitlab-ci:
mappings:
- match:
- os=centos8
runner-attributes:
tags:
- custom
- tag
image: spack/centos7
service-job-attributes:
tags: ['custom', 'tag'] tags: ['custom', 'tag']
image: image:
name: 'some.image.registry/custom-image:latest' name: 'some.image.registry/custom-image:latest'
entrypoint: ['/bin/bash'] entrypoint: ['/bin/bash']
script:: script:
- echo "Custom message in a custom script" - echo "Custom message in a custom script"
The example above illustrates how you can provide the attributes used to run The example above illustrates how you can provide the attributes used to run
the NO-OP job in the case of an empty pipeline. The only field for the NO-OP the NO-OP job in the case of an empty pipeline. The only field for the NO-OP
job that might be generated for you is ``script``, but that will only happen job that might be generated for you is ``script``, but that will only happen
if you do not provide one yourself. Notice in this example the ``script`` if you do not provide one yourself.
uses the ``::`` notation to prescribe override behavior. Without this, the
``echo`` command would have been prepended to the automatically generated script
rather than replacing it.
------------------------------------ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
ci.yaml Assignment of specs to runners
------------------------------------ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Here's an example of a spack configuration file describing a build pipeline: The ``mappings`` section corresponds to a list of runners, and during assignment
of specs to runners, the list is traversed in order looking for matches, the
first runner that matches a release spec is assigned to build that spec. The
``match`` section within each runner mapping section is a list of specs, and
if any of those specs match the release spec (the ``spec.satisfies()`` method
is used), then that runner is considered a match.
.. code-block:: yaml ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Configuration of specs/jobs for a runner
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
ci: Once a runner has been chosen to build a release spec, the ``runner-attributes``
target: gitlab section provides information determining details of the job in the context of
the runner. The ``runner-attributes`` section must have a ``tags`` key, which
rebuild_index: True
broken-specs-url: https://broken.specs.url
broken-tests-packages:
- gptune
pipeline-gen:
- submapping:
- match:
- os=ubuntu18.04
build-job:
tags:
- spack-kube
image: spack/ubuntu-bionic
- match:
- os=centos7
build-job:
tags:
- spack-kube
image: spack/centos7
cdash:
build-group: Release Testing
url: https://cdash.spack.io
project: Spack
site: Spack AWS Gitlab Instance
The ``ci`` config section is used to configure how the pipeline workload should be
generated, mainly how the jobs for building specs should be assigned to the
configured runners on your instance. The main section for configuring pipelines
is ``pipeline-gen``, which is a list of job attribute sections that are merged,
using the same rules as Spack configs (:ref:`config-scope-precedence`), from the bottom up.
The order sections are applied is to be consistent with how spack orders scope precedence when merging lists.
There are two main section types, ``<type>-job`` sections and ``submapping``
sections.
^^^^^^^^^^^^^^^^^^^^^^
Job Attribute Sections
^^^^^^^^^^^^^^^^^^^^^^
Each type of job may have attributes added or removed via sections in the ``pipeline-gen``
list. Job type specific attributes may be specified using the keys ``<type>-job`` to
add attributes to all jobs of type ``<type>`` or ``<type>-job-remove`` to remove attributes
of type ``<type>``. Each section may only contain one type of job attribute specification, ie. ,
``build-job`` and ``noop-job`` may not coexist but ``build-job`` and ``build-job-remove`` may.
.. note::
The ``*-remove`` specifications are applied before the additive attribute specification.
For example, in the case where both ``build-job`` and ``build-job-remove`` are listed in
the same ``pipeline-gen`` section, the value will still exist in the merged build-job after
applying the section.
All of the attributes specified are forwarded to the generated CI jobs, however special
treatment is applied to the attributes ``tags``, ``image``, ``variables``, ``script``,
``before_script``, and ``after_script`` as they are components recognized explicitly by the
Spack CI generator. For the ``tags`` attribute, Spack will remove reserved tags
(:ref:`reserved_tags`) from all jobs specified in the config. In some cases, such as for
``signing`` jobs, reserved tags will be added back based on the type of CI that is being run.
Once a runner has been chosen to build a release spec, the ``build-job*``
sections provide information determining details of the job in the context of
the runner. At lease one of the ``build-job*`` sections must contain a ``tags`` key, which
is a list containing at least one tag used to select the runner from among the is a list containing at least one tag used to select the runner from among the
runners known to the gitlab instance. For Docker executor type runners, the runners known to the gitlab instance. For Docker executor type runners, the
``image`` key is used to specify the Docker image used to build the release spec ``image`` key is used to specify the Docker image used to build the release spec
@@ -550,7 +554,7 @@ information on to the runner that it needs to do its work (e.g. scheduler
parameters, etc.). Any ``variables`` provided here will be added, verbatim, to parameters, etc.). Any ``variables`` provided here will be added, verbatim, to
each job. each job.
The ``build-job`` section also allows users to supply custom ``script``, The ``runner-attributes`` section also allows users to supply custom ``script``,
``before_script``, and ``after_script`` sections to be applied to every job ``before_script``, and ``after_script`` sections to be applied to every job
scheduled on that runner. This allows users to do any custom preparation or scheduled on that runner. This allows users to do any custom preparation or
cleanup tasks that fit their particular workflow, as well as completely cleanup tasks that fit their particular workflow, as well as completely
@@ -561,45 +565,46 @@ environment directory is located within your ``--artifacts_root`` (or if not
provided, within your ``$CI_PROJECT_DIR``), activates that environment for provided, within your ``$CI_PROJECT_DIR``), activates that environment for
you, and invokes ``spack ci rebuild``. you, and invokes ``spack ci rebuild``.
Sections that specify scripts (``script``, ``before_script``, ``after_script``) are all .. _staging_algorithm:
read as lists of commands or lists of lists of commands. It is recommended to write scripts
as lists of lists if scripts will be composed via merging. The default behavior of merging
lists will remove duplicate commands and potentially apply unwanted reordering, whereas
merging lists of lists will preserve the local ordering and never removes duplicate
commands. When writing commands to the CI target script, all lists are expanded and
flattened into a single list.
^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Submapping Sections Summary of ``.gitlab-ci.yml`` generation algorithm
^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A special case of attribute specification is the ``submapping`` section which may be used All specs yielded by the matrix (or all the specs in the environment) have their
to apply job attributes to build jobs based on the package spec associated with the rebuild dependencies computed, and the entire resulting set of specs are staged together
job. Submapping is specified as a list of spec ``match`` lists associated with before being run through the ``gitlab-ci/mappings`` entries, where each staged
``build-job``/``build-job-remove`` sections. There are two options for ``match_behavior``, spec is assigned a runner. "Staging" is the name given to the process of
either ``first`` or ``merge`` may be specified. In either case, the ``submapping`` list is figuring out in what order the specs should be built, taking into consideration
processed from the bottom up, and then each ``match`` list is searched for a string that Gitlab CI rules about jobs/stages. In the staging process the goal is to maximize
satisfies the check ``spec.satisfies({match_item})`` for each concrete spec. the number of jobs in any stage of the pipeline, while ensuring that the jobs in
any stage only depend on jobs in previous stages (since those jobs are guaranteed
to have completed already). As a runner is determined for a job, the information
in the ``runner-attributes`` is used to populate various parts of the job
description that will be used by Gitlab CI. Once all the jobs have been assigned
a runner, the ``.gitlab-ci.yml`` is written to disk.
The the case of ``match_behavior: first``, the first ``match`` section in the list of The short example provided above would result in the ``readline``, ``ncurses``,
``submappings`` that contains a string that satisfies the spec will apply it's and ``pkgconf`` packages getting staged and built on the runner chosen by the
``build-job*`` attributes to the rebuild job associated with that spec. This is the ``spack-k8s`` tag. In this example, spack assumes the runner is a Docker executor
default behavior and will be the method if no ``match_behavior`` is specified. type runner, and thus certain jobs will be run in the ``centos7`` container,
and others in the ``ubuntu-18.04`` container. The resulting ``.gitlab-ci.yml``
will contain 6 jobs in three stages. Once the jobs have been generated, the
presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
``spack ci generate`` command would result in all of the jobs being put in a
build group on CDash called "Release Testing" (that group will be created if
it didn't already exist).
The the case of ``merge`` match, all of the ``match`` sections in the list of ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
``submappings`` that contain a string that satisfies the spec will have the associated Optional compiler bootstrapping
``build-job*`` attributes applied to the rebuild job associated with that spec. Again, ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
the attributes will be merged starting from the bottom match going up to the top match.
In the case that no match is found in a submapping section, no additional attributes will be applied. Spack pipelines also have support for bootstrapping compilers on systems that
may not already have the desired compilers installed. The idea here is that
^^^^^^^^^^^^^ you can specify a list of things to bootstrap in your ``definitions``, and
Bootstrapping spack will guarantee those will be installed in a phase of the pipeline before
^^^^^^^^^^^^^ your release specs, so that you can rely on those packages being available in
the binary mirror when you need them later on in the pipeline. At the moment
The ``bootstrap`` section allows you to specify lists of specs from
your ``definitions`` that should be staged ahead of the environment's ``specs``. At the moment
the only viable use-case for bootstrapping is to install compilers. the only viable use-case for bootstrapping is to install compilers.
Here's an example of what bootstrapping some compilers might look like: Here's an example of what bootstrapping some compilers might look like:
@@ -632,18 +637,18 @@ Here's an example of what bootstrapping some compilers might look like:
exclude: exclude:
- '%gcc@7.3.0 os=centos7' - '%gcc@7.3.0 os=centos7'
- '%gcc@5.5.0 os=ubuntu18.04' - '%gcc@5.5.0 os=ubuntu18.04'
ci: gitlab-ci:
bootstrap: bootstrap:
- name: compiler-pkgs - name: compiler-pkgs
compiler-agnostic: true compiler-agnostic: true
pipeline-gen: mappings:
# similar to the example higher up in this description # mappings similar to the example higher up in this description
... ...
The example above adds a list to the ``definitions`` called ``compiler-pkgs`` The example above adds a list to the ``definitions`` called ``compiler-pkgs``
(you can add any number of these), which lists compiler packages that should (you can add any number of these), which lists compiler packages that should
be staged ahead of the full matrix of release specs (in this example, only be staged ahead of the full matrix of release specs (in this example, only
readline). Then within the ``ci`` section, note the addition of a readline). Then within the ``gitlab-ci`` section, note the addition of a
``bootstrap`` section, which can contain a list of items, each referring to ``bootstrap`` section, which can contain a list of items, each referring to
a list in the ``definitions`` section. These items can either a list in the ``definitions`` section. These items can either
be a dictionary or a string. If you supply a dictionary, it must have a name be a dictionary or a string. If you supply a dictionary, it must have a name
@@ -675,86 +680,6 @@ environment/stack file, and in that case no bootstrapping will be done (only the
specs will be staged for building) and the runners will be expected to already specs will be staged for building) and the runners will be expected to already
have all needed compilers installed and configured for spack to use. have all needed compilers installed and configured for spack to use.
^^^^^^^^^^^^^^^^^^^
Pipeline Buildcache
^^^^^^^^^^^^^^^^^^^
The ``enable-artifacts-buildcache`` key
takes a boolean and determines whether the pipeline uses artifacts to store and
pass along the buildcaches from one stage to the next (the default if you don't
provide this option is ``False``).
^^^^^^^^^^^^^^^^
Broken Specs URL
^^^^^^^^^^^^^^^^
The optional ``broken-specs-url`` key tells Spack to check against a list of
specs that are known to be currently broken in ``develop``. If any such specs
are found, the ``spack ci generate`` command will fail with an error message
informing the user what broken specs were encountered. This allows the pipeline
to fail early and avoid wasting compute resources attempting to build packages
that will not succeed.
^^^^^
CDash
^^^^^
The optional ``cdash`` section provides information that will be used by the
``spack ci generate`` command (invoked by ``spack ci start``) for reporting
to CDash. All the jobs generated from this environment will belong to a
"build group" within CDash that can be tracked over time. As the release
progresses, this build group may have jobs added or removed. The url, project,
and site are used to specify the CDash instance to which build results should
be reported.
Take a look at the
`schema <https://github.com/spack/spack/blob/develop/lib/spack/spack/schema/ci.py>`_
for the ci section of the spack environment file, to see precisely what
syntax is allowed there.
.. _reserved_tags:
^^^^^^^^^^^^^
Reserved Tags
^^^^^^^^^^^^^
Spack has a subset of tags (``public``, ``protected``, and ``notary``) that it reserves
for classifying runners that may require special permissions or access. The tags
``public`` and ``protected`` are used to distinguish between runners that use public
permissions and runners with protected permissions. The ``notary`` tag is a special tag
that is used to indicate runners that have access to the highly protected information
used for signing binaries using the ``signing`` job.
.. _staging_algorithm:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Summary of ``.gitlab-ci.yml`` generation algorithm
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
All specs yielded by the matrix (or all the specs in the environment) have their
dependencies computed, and the entire resulting set of specs are staged together
before being run through the ``ci/pipeline-gen`` entries, where each staged
spec is assigned a runner. "Staging" is the name given to the process of
figuring out in what order the specs should be built, taking into consideration
Gitlab CI rules about jobs/stages. In the staging process the goal is to maximize
the number of jobs in any stage of the pipeline, while ensuring that the jobs in
any stage only depend on jobs in previous stages (since those jobs are guaranteed
to have completed already). As a runner is determined for a job, the information
in the merged ``any-job*`` and ``build-job*`` sections is used to populate various parts of the job
description that will be used by the target CI pipelines. Once all the jobs have been assigned
a runner, the ``.gitlab-ci.yml`` is written to disk.
The short example provided above would result in the ``readline``, ``ncurses``,
and ``pkgconf`` packages getting staged and built on the runner chosen by the
``spack-k8s`` tag. In this example, spack assumes the runner is a Docker executor
type runner, and thus certain jobs will be run in the ``centos7`` container,
and others in the ``ubuntu-18.04`` container. The resulting ``.gitlab-ci.yml``
will contain 6 jobs in three stages. Once the jobs have been generated, the
presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
``spack ci generate`` command would result in all of the jobs being put in a
build group on CDash called "Release Testing" (that group will be created if
it didn't already exist).
------------------------------------- -------------------------------------
Using a custom spack in your pipeline Using a custom spack in your pipeline
------------------------------------- -------------------------------------
@@ -801,9 +726,11 @@ generated by ``spack ci generate``. You also want your generated rebuild jobs
spack: spack:
... ...
ci: gitlab-ci:
pipeline-gen: mappings:
- build-job: - match:
- os=ubuntu18.04
runner-attributes:
tags: tags:
- spack-kube - spack-kube
image: spack/ubuntu-bionic image: spack/ubuntu-bionic
@@ -904,4 +831,3 @@ verify binary packages (when installing or creating buildcaches). You could
also have already trusted a key spack know about, or if no key is present anywhere, also have already trusted a key spack know about, or if no key is present anywhere,
spack will install specs using ``--no-check-signature`` and create buildcaches spack will install specs using ``--no-check-signature`` and create buildcaches
using ``-u`` (for unsigned binaries). using ``-u`` (for unsigned binaries).

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)

View File

@@ -1,4 +1,4 @@
.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other .. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
Spack Project Developers. See the top-level COPYRIGHT file for details. Spack Project Developers. See the top-level COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT) SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -32,16 +32,11 @@ A package repository a directory structured like this::
... ...
The top-level ``repo.yaml`` file contains configuration metadata for the The top-level ``repo.yaml`` file contains configuration metadata for the
repository. The packages subdirectory, typically ``packages``, contains repository, and the ``packages`` directory contains subdirectories for
subdirectories for each package in the repository. Each package directory each package in the repository. Each package directory contains a
contains a ``package.py`` file and any patches or other files needed to build the ``package.py`` file and any patches or other files needed to build the
package. package.
The ``repo.yaml`` file may also contain a ``subdirectory`` key,
which can modify the name of the subdirectory used for packages. As seen above,
the default value is ``packages``. An empty string (``subdirectory: ''``) requires
a flattened repo structure in which the package names are top-level subdirectories.
Package repositories allow you to: Package repositories allow you to:
1. Maintain your own packages separately from Spack; 1. Maintain your own packages separately from Spack;
@@ -378,24 +373,6 @@ You can supply a custom namespace with a second argument, e.g.:
repo: repo:
namespace: 'llnl.comp' namespace: 'llnl.comp'
You can also create repositories with custom structure with the ``-d/--subdirectory``
argument, e.g.:
.. code-block:: console
$ spack repo create -d applications myrepo apps
==> Created repo with namespace 'apps'.
==> To register it with Spack, run this command:
spack repo add ~/myrepo
$ ls myrepo
applications/ repo.yaml
$ cat myrepo/repo.yaml
repo:
namespace: apps
subdirectory: applications
^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^
``spack repo add`` ``spack repo add``
^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^

View File

@@ -10,4 +10,3 @@ python-levenshtein
# https://stackoverflow.com/questions/67542699 # https://stackoverflow.com/questions/67542699
docutils <0.17 docutils <0.17
pygments <2.13 pygments <2.13
urllib3 <2

View File

@@ -1,4 +1,4 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)

185
lib/spack/env/cc vendored
View File

@@ -1,7 +1,7 @@
#!/bin/sh -f #!/bin/sh -f
# shellcheck disable=SC2034 # evals in this script fool shellcheck # shellcheck disable=SC2034 # evals in this script fool shellcheck
# #
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -427,62 +427,6 @@ isystem_include_dirs_list=""
libs_list="" libs_list=""
other_args_list="" other_args_list=""
# Global state for keeping track of -Wl,-rpath -Wl,/path
wl_expect_rpath=no
# Same, but for -Xlinker -rpath -Xlinker /path
xlinker_expect_rpath=no
parse_Wl() {
while [ $# -ne 0 ]; do
if [ "$wl_expect_rpath" = yes ]; then
if system_dir "$1"; then
append system_rpath_dirs_list "$1"
else
append rpath_dirs_list "$1"
fi
wl_expect_rpath=no
else
case "$1" in
-rpath=*)
arg="${1#-rpath=}"
if [ -z "$arg" ]; then
shift; continue
elif system_dir "$arg"; then
append system_rpath_dirs_list "$arg"
else
append rpath_dirs_list "$arg"
fi
;;
--rpath=*)
arg="${1#--rpath=}"
if [ -z "$arg" ]; then
shift; continue
elif system_dir "$arg"; then
append system_rpath_dirs_list "$arg"
else
append rpath_dirs_list "$arg"
fi
;;
-rpath|--rpath)
wl_expect_rpath=yes
;;
"$dtags_to_strip")
;;
-Wl)
# Nested -Wl,-Wl means we're in NAG compiler territory, we don't support
# it.
return 1
;;
*)
append other_args_list "-Wl,$1"
;;
esac
fi
shift
done
}
while [ $# -ne 0 ]; do while [ $# -ne 0 ]; do
@@ -582,79 +526,88 @@ while [ $# -ne 0 ]; do
append other_args_list "-l$arg" append other_args_list "-l$arg"
;; ;;
-Wl,*) -Wl,*)
IFS=, arg="${1#-Wl,}"
if ! parse_Wl ${1#-Wl,}; then if [ -z "$arg" ]; then shift; arg="$1"; fi
append other_args_list "$1" case "$arg" in
fi -rpath=*) rp="${arg#-rpath=}" ;;
unset IFS --rpath=*) rp="${arg#--rpath=}" ;;
-rpath,*) rp="${arg#-rpath,}" ;;
--rpath,*) rp="${arg#--rpath,}" ;;
-rpath|--rpath)
shift; arg="$1"
case "$arg" in
-Wl,*)
rp="${arg#-Wl,}"
;;
*)
die "-Wl,-rpath was not followed by -Wl,*"
;;
esac
;;
"$dtags_to_strip")
: # We want to remove explicitly this flag
;;
*)
append other_args_list "-Wl,$arg"
;;
esac
;;
-Xlinker,*)
arg="${1#-Xlinker,}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
case "$arg" in
-rpath=*) rp="${arg#-rpath=}" ;;
--rpath=*) rp="${arg#--rpath=}" ;;
-rpath|--rpath)
shift; arg="$1"
case "$arg" in
-Xlinker,*)
rp="${arg#-Xlinker,}"
;;
*)
die "-Xlinker,-rpath was not followed by -Xlinker,*"
;;
esac
;;
*)
append other_args_list "-Xlinker,$arg"
;;
esac
;; ;;
-Xlinker) -Xlinker)
shift if [ "$2" = "-rpath" ]; then
if [ $# -eq 0 ]; then if [ "$3" != "-Xlinker" ]; then
# -Xlinker without value: let the compiler error about it. die "-Xlinker,-rpath was not followed by -Xlinker,*"
append other_args_list -Xlinker
xlinker_expect_rpath=no
break
elif [ "$xlinker_expect_rpath" = yes ]; then
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
if system_dir "$1"; then
append system_rpath_dirs_list "$1"
else
append rpath_dirs_list "$1"
fi fi
xlinker_expect_rpath=no shift 3;
rp="$1"
elif [ "$2" = "$dtags_to_strip" ]; then
shift # We want to remove explicitly this flag
else else
case "$1" in append other_args_list "$1"
-rpath=*)
arg="${1#-rpath=}"
if system_dir "$arg"; then
append system_rpath_dirs_list "$arg"
else
append rpath_dirs_list "$arg"
fi fi
;; ;;
--rpath=*)
arg="${1#--rpath=}"
if system_dir "$arg"; then
append system_rpath_dirs_list "$arg"
else
append rpath_dirs_list "$arg"
fi
;;
-rpath|--rpath)
xlinker_expect_rpath=yes
;;
"$dtags_to_strip")
;;
*) *)
append other_args_list -Xlinker if [ "$1" = "$dtags_to_strip" ]; then
: # We want to remove explicitly this flag
else
append other_args_list "$1" append other_args_list "$1"
;;
esac
fi fi
;; ;;
"$dtags_to_strip")
;;
*)
append other_args_list "$1"
;;
esac esac
# test rpaths against system directories in one place.
if [ -n "$rp" ]; then
if system_dir "$rp"; then
append system_rpath_dirs_list "$rp"
else
append rpath_dirs_list "$rp"
fi
fi
shift shift
done done
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
# parsing.
if [ "$xlinker_expect_rpath" = yes ]; then
append other_args_list -Xlinker
append other_args_list -rpath
fi
# Same, but for -Wl flags.
if [ "$wl_expect_rpath" = yes ]; then
append other_args_list -Wl,-rpath
fi
# #
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and # Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
# ldflags. We stick to the order that gmake puts the flags in by default. # ldflags. We stick to the order that gmake puts the flags in by default.

View File

@@ -1,4 +1,4 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -18,7 +18,7 @@
* Homepage: https://pypi.python.org/pypi/archspec * Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures * Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.1 (commit 4b1f21802a23b536bbcce73d3c631a566b20e8bd) * Version: 0.2.0 (commit e44bad9c7b6defac73696f64078b2fe634719b62)
astunparse astunparse
---------------- ----------------
@@ -101,7 +101,10 @@
* Usage: Used for config files. Ruamel is based on PyYAML but is more * Usage: Used for config files. Ruamel is based on PyYAML but is more
actively maintained and has more features, including round-tripping actively maintained and has more features, including round-tripping
comments read from config files. comments read from config files.
* Version: 0.17.21 * Version: 0.11.15 (last version supporting Python 2.6)
* Note: This package has been slightly modified to improve Python 2.6
compatibility -- some ``{}`` format strings were replaced, and the
import for ``OrderedDict`` was tweaked.
six six
--- ---

View File

@@ -1 +0,0 @@
from ruamel import *

View File

@@ -1,57 +0,0 @@
# coding: utf-8
if False: # MYPY
from typing import Dict, Any # NOQA
_package_data = dict(
full_package_name='ruamel.yaml',
version_info=(0, 17, 21),
__version__='0.17.21',
version_timestamp='2022-02-12 09:49:22',
author='Anthon van der Neut',
author_email='a.van.der.neut@ruamel.eu',
description='ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order', # NOQA
entry_points=None,
since=2014,
extras_require={
':platform_python_implementation=="CPython" and python_version<"3.11"': ['ruamel.yaml.clib>=0.2.6'], # NOQA
'jinja2': ['ruamel.yaml.jinja2>=0.2'],
'docs': ['ryd'],
},
classifiers=[
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup',
'Typing :: Typed',
],
keywords='yaml 1.2 parser round-trip preserve quotes order config',
read_the_docs='yaml',
supported=[(3, 5)], # minimum
tox=dict(
env='*f', # f for 3.5
fl8excl='_test/lib',
),
# universal=True,
python_requires='>=3',
rtfd='yaml',
) # type: Dict[Any, Any]
version_info = _package_data['version_info']
__version__ = _package_data['__version__']
try:
from .cyaml import * # NOQA
__with_libyaml__ = True
except (ImportError, ValueError): # for Jython
__with_libyaml__ = False
from ruamel.yaml.main import * # NOQA

View File

@@ -1,20 +0,0 @@
# coding: utf-8
if False: # MYPY
from typing import Any, Dict, Optional, List, Union, Optional, Iterator # NOQA
anchor_attrib = '_yaml_anchor'
class Anchor:
__slots__ = 'value', 'always_dump'
attrib = anchor_attrib
def __init__(self):
# type: () -> None
self.value = None
self.always_dump = False
def __repr__(self):
# type: () -> Any
ad = ', (always dump)' if self.always_dump else ""
return 'Anchor({!r}{})'.format(self.value, ad)

File diff suppressed because it is too large Load Diff

View File

@@ -1,268 +0,0 @@
# coding: utf-8
# partially from package six by Benjamin Peterson
import sys
import os
import io
import traceback
from abc import abstractmethod
import collections.abc
# fmt: off
if False: # MYPY
from typing import Any, Dict, Optional, List, Union, BinaryIO, IO, Text, Tuple # NOQA
from typing import Optional # NOQA
# fmt: on
_DEFAULT_YAML_VERSION = (1, 2)
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict # type: ignore
# to get the right name import ... as ordereddict doesn't do that
class ordereddict(OrderedDict): # type: ignore
if not hasattr(OrderedDict, 'insert'):
def insert(self, pos, key, value):
# type: (int, Any, Any) -> None
if pos >= len(self):
self[key] = value
return
od = ordereddict()
od.update(self)
for k in od:
del self[k]
for index, old_key in enumerate(od):
if pos == index:
self[key] = value
self[old_key] = od[old_key]
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
# replace with f-strings when 3.5 support is dropped
# ft = '42'
# assert _F('abc {ft!r}', ft=ft) == 'abc %r' % ft
# 'abc %r' % ft -> _F('abc {ft!r}' -> f'abc {ft!r}'
def _F(s, *superfluous, **kw):
# type: (Any, Any, Any) -> Any
if superfluous:
raise TypeError
return s.format(**kw)
StringIO = io.StringIO
BytesIO = io.BytesIO
if False: # MYPY
# StreamType = Union[BinaryIO, IO[str], IO[unicode], StringIO]
# StreamType = Union[BinaryIO, IO[str], StringIO] # type: ignore
StreamType = Any
StreamTextType = StreamType # Union[Text, StreamType]
VersionType = Union[List[int], str, Tuple[int, int]]
builtins_module = 'builtins'
def with_metaclass(meta, *bases):
# type: (Any, Any) -> Any
"""Create a base class with a metaclass."""
return meta('NewBase', bases, {})
DBG_TOKEN = 1
DBG_EVENT = 2
DBG_NODE = 4
_debug = None # type: Optional[int]
if 'RUAMELDEBUG' in os.environ:
_debugx = os.environ.get('RUAMELDEBUG')
if _debugx is None:
_debug = 0
else:
_debug = int(_debugx)
if bool(_debug):
class ObjectCounter:
def __init__(self):
# type: () -> None
self.map = {} # type: Dict[Any, Any]
def __call__(self, k):
# type: (Any) -> None
self.map[k] = self.map.get(k, 0) + 1
def dump(self):
# type: () -> None
for k in sorted(self.map):
sys.stdout.write('{} -> {}'.format(k, self.map[k]))
object_counter = ObjectCounter()
# used from yaml util when testing
def dbg(val=None):
# type: (Any) -> Any
global _debug
if _debug is None:
# set to true or false
_debugx = os.environ.get('YAMLDEBUG')
if _debugx is None:
_debug = 0
else:
_debug = int(_debugx)
if val is None:
return _debug
return _debug & val
class Nprint:
def __init__(self, file_name=None):
# type: (Any) -> None
self._max_print = None # type: Any
self._count = None # type: Any
self._file_name = file_name
def __call__(self, *args, **kw):
# type: (Any, Any) -> None
if not bool(_debug):
return
out = sys.stdout if self._file_name is None else open(self._file_name, 'a')
dbgprint = print # to fool checking for print statements by dv utility
kw1 = kw.copy()
kw1['file'] = out
dbgprint(*args, **kw1)
out.flush()
if self._max_print is not None:
if self._count is None:
self._count = self._max_print
self._count -= 1
if self._count == 0:
dbgprint('forced exit\n')
traceback.print_stack()
out.flush()
sys.exit(0)
if self._file_name:
out.close()
def set_max_print(self, i):
# type: (int) -> None
self._max_print = i
self._count = None
def fp(self, mode='a'):
# type: (str) -> Any
out = sys.stdout if self._file_name is None else open(self._file_name, mode)
return out
nprint = Nprint()
nprintf = Nprint('/var/tmp/ruamel.yaml.log')
# char checkers following production rules
def check_namespace_char(ch):
# type: (Any) -> bool
if '\x21' <= ch <= '\x7E': # ! to ~
return True
if '\xA0' <= ch <= '\uD7FF':
return True
if ('\uE000' <= ch <= '\uFFFD') and ch != '\uFEFF': # excl. byte order mark
return True
if '\U00010000' <= ch <= '\U0010FFFF':
return True
return False
def check_anchorname_char(ch):
# type: (Any) -> bool
if ch in ',[]{}':
return False
return check_namespace_char(ch)
def version_tnf(t1, t2=None):
# type: (Any, Any) -> Any
"""
return True if ruamel.yaml version_info < t1, None if t2 is specified and bigger else False
"""
from ruamel.yaml import version_info # NOQA
if version_info < t1:
return True
if t2 is not None and version_info < t2:
return None
return False
class MutableSliceableSequence(collections.abc.MutableSequence): # type: ignore
__slots__ = ()
def __getitem__(self, index):
# type: (Any) -> Any
if not isinstance(index, slice):
return self.__getsingleitem__(index)
return type(self)([self[i] for i in range(*index.indices(len(self)))]) # type: ignore
def __setitem__(self, index, value):
# type: (Any, Any) -> None
if not isinstance(index, slice):
return self.__setsingleitem__(index, value)
assert iter(value)
# nprint(index.start, index.stop, index.step, index.indices(len(self)))
if index.step is None:
del self[index.start : index.stop]
for elem in reversed(value):
self.insert(0 if index.start is None else index.start, elem)
else:
range_parms = index.indices(len(self))
nr_assigned_items = (range_parms[1] - range_parms[0] - 1) // range_parms[2] + 1
# need to test before changing, in case TypeError is caught
if nr_assigned_items < len(value):
raise TypeError(
'too many elements in value {} < {}'.format(nr_assigned_items, len(value))
)
elif nr_assigned_items > len(value):
raise TypeError(
'not enough elements in value {} > {}'.format(
nr_assigned_items, len(value)
)
)
for idx, i in enumerate(range(*range_parms)):
self[i] = value[idx]
def __delitem__(self, index):
# type: (Any) -> None
if not isinstance(index, slice):
return self.__delsingleitem__(index)
# nprint(index.start, index.stop, index.step, index.indices(len(self)))
for i in reversed(range(*index.indices(len(self)))):
del self[i]
@abstractmethod
def __getsingleitem__(self, index):
# type: (Any) -> Any
raise IndexError
@abstractmethod
def __setsingleitem__(self, index, value):
# type: (Any, Any) -> None
raise IndexError
@abstractmethod
def __delsingleitem__(self, index):
# type: (Any) -> None
raise IndexError

View File

@@ -1,243 +0,0 @@
# coding: utf-8
import warnings
from ruamel.yaml.error import MarkedYAMLError, ReusedAnchorWarning
from ruamel.yaml.compat import _F, nprint, nprintf # NOQA
from ruamel.yaml.events import (
StreamStartEvent,
StreamEndEvent,
MappingStartEvent,
MappingEndEvent,
SequenceStartEvent,
SequenceEndEvent,
AliasEvent,
ScalarEvent,
)
from ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode
if False: # MYPY
from typing import Any, Dict, Optional, List # NOQA
__all__ = ['Composer', 'ComposerError']
class ComposerError(MarkedYAMLError):
pass
class Composer:
def __init__(self, loader=None):
# type: (Any) -> None
self.loader = loader
if self.loader is not None and getattr(self.loader, '_composer', None) is None:
self.loader._composer = self
self.anchors = {} # type: Dict[Any, Any]
@property
def parser(self):
# type: () -> Any
if hasattr(self.loader, 'typ'):
self.loader.parser
return self.loader._parser
@property
def resolver(self):
# type: () -> Any
# assert self.loader._resolver is not None
if hasattr(self.loader, 'typ'):
self.loader.resolver
return self.loader._resolver
def check_node(self):
# type: () -> Any
# Drop the STREAM-START event.
if self.parser.check_event(StreamStartEvent):
self.parser.get_event()
# If there are more documents available?
return not self.parser.check_event(StreamEndEvent)
def get_node(self):
# type: () -> Any
# Get the root node of the next document.
if not self.parser.check_event(StreamEndEvent):
return self.compose_document()
def get_single_node(self):
# type: () -> Any
# Drop the STREAM-START event.
self.parser.get_event()
# Compose a document if the stream is not empty.
document = None # type: Any
if not self.parser.check_event(StreamEndEvent):
document = self.compose_document()
# Ensure that the stream contains no more documents.
if not self.parser.check_event(StreamEndEvent):
event = self.parser.get_event()
raise ComposerError(
'expected a single document in the stream',
document.start_mark,
'but found another document',
event.start_mark,
)
# Drop the STREAM-END event.
self.parser.get_event()
return document
def compose_document(self):
# type: (Any) -> Any
# Drop the DOCUMENT-START event.
self.parser.get_event()
# Compose the root node.
node = self.compose_node(None, None)
# Drop the DOCUMENT-END event.
self.parser.get_event()
self.anchors = {}
return node
def return_alias(self, a):
# type: (Any) -> Any
return a
def compose_node(self, parent, index):
# type: (Any, Any) -> Any
if self.parser.check_event(AliasEvent):
event = self.parser.get_event()
alias = event.anchor
if alias not in self.anchors:
raise ComposerError(
None,
None,
_F('found undefined alias {alias!r}', alias=alias),
event.start_mark,
)
return self.return_alias(self.anchors[alias])
event = self.parser.peek_event()
anchor = event.anchor
if anchor is not None: # have an anchor
if anchor in self.anchors:
# raise ComposerError(
# "found duplicate anchor %r; first occurrence"
# % (anchor), self.anchors[anchor].start_mark,
# "second occurrence", event.start_mark)
ws = (
'\nfound duplicate anchor {!r}\nfirst occurrence {}\nsecond occurrence '
'{}'.format((anchor), self.anchors[anchor].start_mark, event.start_mark)
)
warnings.warn(ws, ReusedAnchorWarning)
self.resolver.descend_resolver(parent, index)
if self.parser.check_event(ScalarEvent):
node = self.compose_scalar_node(anchor)
elif self.parser.check_event(SequenceStartEvent):
node = self.compose_sequence_node(anchor)
elif self.parser.check_event(MappingStartEvent):
node = self.compose_mapping_node(anchor)
self.resolver.ascend_resolver()
return node
def compose_scalar_node(self, anchor):
# type: (Any) -> Any
event = self.parser.get_event()
tag = event.tag
if tag is None or tag == '!':
tag = self.resolver.resolve(ScalarNode, event.value, event.implicit)
node = ScalarNode(
tag,
event.value,
event.start_mark,
event.end_mark,
style=event.style,
comment=event.comment,
anchor=anchor,
)
if anchor is not None:
self.anchors[anchor] = node
return node
def compose_sequence_node(self, anchor):
# type: (Any) -> Any
start_event = self.parser.get_event()
tag = start_event.tag
if tag is None or tag == '!':
tag = self.resolver.resolve(SequenceNode, None, start_event.implicit)
node = SequenceNode(
tag,
[],
start_event.start_mark,
None,
flow_style=start_event.flow_style,
comment=start_event.comment,
anchor=anchor,
)
if anchor is not None:
self.anchors[anchor] = node
index = 0
while not self.parser.check_event(SequenceEndEvent):
node.value.append(self.compose_node(node, index))
index += 1
end_event = self.parser.get_event()
if node.flow_style is True and end_event.comment is not None:
if node.comment is not None:
nprint(
'Warning: unexpected end_event commment in sequence '
'node {}'.format(node.flow_style)
)
node.comment = end_event.comment
node.end_mark = end_event.end_mark
self.check_end_doc_comment(end_event, node)
return node
def compose_mapping_node(self, anchor):
# type: (Any) -> Any
start_event = self.parser.get_event()
tag = start_event.tag
if tag is None or tag == '!':
tag = self.resolver.resolve(MappingNode, None, start_event.implicit)
node = MappingNode(
tag,
[],
start_event.start_mark,
None,
flow_style=start_event.flow_style,
comment=start_event.comment,
anchor=anchor,
)
if anchor is not None:
self.anchors[anchor] = node
while not self.parser.check_event(MappingEndEvent):
# key_event = self.parser.peek_event()
item_key = self.compose_node(node, None)
# if item_key in node.value:
# raise ComposerError("while composing a mapping",
# start_event.start_mark,
# "found duplicate key", key_event.start_mark)
item_value = self.compose_node(node, item_key)
# node.value[item_key] = item_value
node.value.append((item_key, item_value))
end_event = self.parser.get_event()
if node.flow_style is True and end_event.comment is not None:
node.comment = end_event.comment
node.end_mark = end_event.end_mark
self.check_end_doc_comment(end_event, node)
return node
def check_end_doc_comment(self, end_event, node):
# type: (Any, Any) -> None
if end_event.comment and end_event.comment[1]:
# pre comments on an end_event, no following to move to
if node.comment is None:
node.comment = [None, None]
assert not isinstance(node, ScalarEvent)
# this is a post comment on a mapping node, add as third element
# in the list
node.comment.append(end_event.comment[1])
end_event.comment[1] = None

View File

@@ -1,14 +0,0 @@
# coding: utf-8
import warnings
from ruamel.yaml.util import configobj_walker as new_configobj_walker
if False: # MYPY
from typing import Any # NOQA
def configobj_walker(cfg):
# type: (Any) -> Any
warnings.warn('configobj_walker has moved to ruamel.yaml.util, please update your code')
return new_configobj_walker(cfg)

File diff suppressed because it is too large Load Diff

View File

@@ -1,183 +0,0 @@
# coding: utf-8
from _ruamel_yaml import CParser, CEmitter # type: ignore
from ruamel.yaml.constructor import Constructor, BaseConstructor, SafeConstructor
from ruamel.yaml.representer import Representer, SafeRepresenter, BaseRepresenter
from ruamel.yaml.resolver import Resolver, BaseResolver
if False: # MYPY
from typing import Any, Union, Optional # NOQA
from ruamel.yaml.compat import StreamTextType, StreamType, VersionType # NOQA
__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', 'CBaseDumper', 'CSafeDumper', 'CDumper']
# this includes some hacks to solve the usage of resolver by lower level
# parts of the parser
class CBaseLoader(CParser, BaseConstructor, BaseResolver): # type: ignore
def __init__(self, stream, version=None, preserve_quotes=None):
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
CParser.__init__(self, stream)
self._parser = self._composer = self
BaseConstructor.__init__(self, loader=self)
BaseResolver.__init__(self, loadumper=self)
# self.descend_resolver = self._resolver.descend_resolver
# self.ascend_resolver = self._resolver.ascend_resolver
# self.resolve = self._resolver.resolve
class CSafeLoader(CParser, SafeConstructor, Resolver): # type: ignore
def __init__(self, stream, version=None, preserve_quotes=None):
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
CParser.__init__(self, stream)
self._parser = self._composer = self
SafeConstructor.__init__(self, loader=self)
Resolver.__init__(self, loadumper=self)
# self.descend_resolver = self._resolver.descend_resolver
# self.ascend_resolver = self._resolver.ascend_resolver
# self.resolve = self._resolver.resolve
class CLoader(CParser, Constructor, Resolver): # type: ignore
def __init__(self, stream, version=None, preserve_quotes=None):
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
CParser.__init__(self, stream)
self._parser = self._composer = self
Constructor.__init__(self, loader=self)
Resolver.__init__(self, loadumper=self)
# self.descend_resolver = self._resolver.descend_resolver
# self.ascend_resolver = self._resolver.ascend_resolver
# self.resolve = self._resolver.resolve
class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): # type: ignore
def __init__(
self,
stream,
default_style=None,
default_flow_style=None,
canonical=None,
indent=None,
width=None,
allow_unicode=None,
line_break=None,
encoding=None,
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
block_seq_indent=None,
top_level_colon_align=None,
prefix_colon=None,
):
# type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
CEmitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
encoding=encoding,
allow_unicode=allow_unicode,
line_break=line_break,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
)
self._emitter = self._serializer = self._representer = self
BaseRepresenter.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=self,
)
BaseResolver.__init__(self, loadumper=self)
class CSafeDumper(CEmitter, SafeRepresenter, Resolver): # type: ignore
def __init__(
self,
stream,
default_style=None,
default_flow_style=None,
canonical=None,
indent=None,
width=None,
allow_unicode=None,
line_break=None,
encoding=None,
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
block_seq_indent=None,
top_level_colon_align=None,
prefix_colon=None,
):
# type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
self._emitter = self._serializer = self._representer = self
CEmitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
encoding=encoding,
allow_unicode=allow_unicode,
line_break=line_break,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
)
self._emitter = self._serializer = self._representer = self
SafeRepresenter.__init__(
self, default_style=default_style, default_flow_style=default_flow_style
)
Resolver.__init__(self)
class CDumper(CEmitter, Representer, Resolver): # type: ignore
def __init__(
self,
stream,
default_style=None,
default_flow_style=None,
canonical=None,
indent=None,
width=None,
allow_unicode=None,
line_break=None,
encoding=None,
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
block_seq_indent=None,
top_level_colon_align=None,
prefix_colon=None,
):
# type: (StreamType, Any, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
CEmitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
encoding=encoding,
allow_unicode=allow_unicode,
line_break=line_break,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
)
self._emitter = self._serializer = self._representer = self
Representer.__init__(
self, default_style=default_style, default_flow_style=default_flow_style
)
Resolver.__init__(self)

View File

@@ -1,219 +0,0 @@
# coding: utf-8
from ruamel.yaml.emitter import Emitter
from ruamel.yaml.serializer import Serializer
from ruamel.yaml.representer import (
Representer,
SafeRepresenter,
BaseRepresenter,
RoundTripRepresenter,
)
from ruamel.yaml.resolver import Resolver, BaseResolver, VersionedResolver
if False: # MYPY
from typing import Any, Dict, List, Union, Optional # NOQA
from ruamel.yaml.compat import StreamType, VersionType # NOQA
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper', 'RoundTripDumper']
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
def __init__(
self,
stream,
default_style=None,
default_flow_style=None,
canonical=None,
indent=None,
width=None,
allow_unicode=None,
line_break=None,
encoding=None,
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
block_seq_indent=None,
top_level_colon_align=None,
prefix_colon=None,
):
# type: (Any, StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
block_seq_indent=block_seq_indent,
dumper=self,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
dumper=self,
)
BaseRepresenter.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=self,
)
BaseResolver.__init__(self, loadumper=self)
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
def __init__(
self,
stream,
default_style=None,
default_flow_style=None,
canonical=None,
indent=None,
width=None,
allow_unicode=None,
line_break=None,
encoding=None,
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
block_seq_indent=None,
top_level_colon_align=None,
prefix_colon=None,
):
# type: (StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
block_seq_indent=block_seq_indent,
dumper=self,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
dumper=self,
)
SafeRepresenter.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=self,
)
Resolver.__init__(self, loadumper=self)
class Dumper(Emitter, Serializer, Representer, Resolver):
def __init__(
self,
stream,
default_style=None,
default_flow_style=None,
canonical=None,
indent=None,
width=None,
allow_unicode=None,
line_break=None,
encoding=None,
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
block_seq_indent=None,
top_level_colon_align=None,
prefix_colon=None,
):
# type: (StreamType, Any, Any, Optional[bool], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
block_seq_indent=block_seq_indent,
dumper=self,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
dumper=self,
)
Representer.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=self,
)
Resolver.__init__(self, loadumper=self)
class RoundTripDumper(Emitter, Serializer, RoundTripRepresenter, VersionedResolver):
def __init__(
self,
stream,
default_style=None,
default_flow_style=None,
canonical=None,
indent=None,
width=None,
allow_unicode=None,
line_break=None,
encoding=None,
explicit_start=None,
explicit_end=None,
version=None,
tags=None,
block_seq_indent=None,
top_level_colon_align=None,
prefix_colon=None,
):
# type: (StreamType, Any, Optional[bool], Optional[int], Optional[int], Optional[int], Optional[bool], Any, Any, Optional[bool], Optional[bool], Any, Any, Any, Any, Any) -> None # NOQA
Emitter.__init__(
self,
stream,
canonical=canonical,
indent=indent,
width=width,
allow_unicode=allow_unicode,
line_break=line_break,
block_seq_indent=block_seq_indent,
top_level_colon_align=top_level_colon_align,
prefix_colon=prefix_colon,
dumper=self,
)
Serializer.__init__(
self,
encoding=encoding,
explicit_start=explicit_start,
explicit_end=explicit_end,
version=version,
tags=tags,
dumper=self,
)
RoundTripRepresenter.__init__(
self,
default_style=default_style,
default_flow_style=default_flow_style,
dumper=self,
)
VersionedResolver.__init__(self, loader=self)

File diff suppressed because it is too large Load Diff

View File

@@ -1,332 +0,0 @@
# coding: utf-8
import warnings
import textwrap
from ruamel.yaml.compat import _F
if False: # MYPY
from typing import Any, Dict, Optional, List, Text # NOQA
__all__ = [
'FileMark',
'StringMark',
'CommentMark',
'YAMLError',
'MarkedYAMLError',
'ReusedAnchorWarning',
'UnsafeLoaderWarning',
'MarkedYAMLWarning',
'MarkedYAMLFutureWarning',
]
class StreamMark:
__slots__ = 'name', 'index', 'line', 'column'
def __init__(self, name, index, line, column):
# type: (Any, int, int, int) -> None
self.name = name
self.index = index
self.line = line
self.column = column
def __str__(self):
# type: () -> Any
where = _F(
' in "{sname!s}", line {sline1:d}, column {scolumn1:d}',
sname=self.name,
sline1=self.line + 1,
scolumn1=self.column + 1,
)
return where
def __eq__(self, other):
# type: (Any) -> bool
if self.line != other.line or self.column != other.column:
return False
if self.name != other.name or self.index != other.index:
return False
return True
def __ne__(self, other):
# type: (Any) -> bool
return not self.__eq__(other)
class FileMark(StreamMark):
__slots__ = ()
class StringMark(StreamMark):
__slots__ = 'name', 'index', 'line', 'column', 'buffer', 'pointer'
def __init__(self, name, index, line, column, buffer, pointer):
# type: (Any, int, int, int, Any, Any) -> None
StreamMark.__init__(self, name, index, line, column)
self.buffer = buffer
self.pointer = pointer
def get_snippet(self, indent=4, max_length=75):
# type: (int, int) -> Any
if self.buffer is None: # always False
return None
head = ""
start = self.pointer
while start > 0 and self.buffer[start - 1] not in '\0\r\n\x85\u2028\u2029':
start -= 1
if self.pointer - start > max_length / 2 - 1:
head = ' ... '
start += 5
break
tail = ""
end = self.pointer
while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029':
end += 1
if end - self.pointer > max_length / 2 - 1:
tail = ' ... '
end -= 5
break
snippet = self.buffer[start:end]
caret = '^'
caret = '^ (line: {})'.format(self.line + 1)
return (
' ' * indent
+ head
+ snippet
+ tail
+ '\n'
+ ' ' * (indent + self.pointer - start + len(head))
+ caret
)
def __str__(self):
# type: () -> Any
snippet = self.get_snippet()
where = _F(
' in "{sname!s}", line {sline1:d}, column {scolumn1:d}',
sname=self.name,
sline1=self.line + 1,
scolumn1=self.column + 1,
)
if snippet is not None:
where += ':\n' + snippet
return where
def __repr__(self):
# type: () -> Any
snippet = self.get_snippet()
where = _F(
' in "{sname!s}", line {sline1:d}, column {scolumn1:d}',
sname=self.name,
sline1=self.line + 1,
scolumn1=self.column + 1,
)
if snippet is not None:
where += ':\n' + snippet
return where
class CommentMark:
__slots__ = ('column',)
def __init__(self, column):
# type: (Any) -> None
self.column = column
class YAMLError(Exception):
pass
class MarkedYAMLError(YAMLError):
def __init__(
self,
context=None,
context_mark=None,
problem=None,
problem_mark=None,
note=None,
warn=None,
):
# type: (Any, Any, Any, Any, Any, Any) -> None
self.context = context
self.context_mark = context_mark
self.problem = problem
self.problem_mark = problem_mark
self.note = note
# warn is ignored
def __str__(self):
# type: () -> Any
lines = [] # type: List[str]
if self.context is not None:
lines.append(self.context)
if self.context_mark is not None and (
self.problem is None
or self.problem_mark is None
or self.context_mark.name != self.problem_mark.name
or self.context_mark.line != self.problem_mark.line
or self.context_mark.column != self.problem_mark.column
):
lines.append(str(self.context_mark))
if self.problem is not None:
lines.append(self.problem)
if self.problem_mark is not None:
lines.append(str(self.problem_mark))
if self.note is not None and self.note:
note = textwrap.dedent(self.note)
lines.append(note)
return '\n'.join(lines)
class YAMLStreamError(Exception):
pass
class YAMLWarning(Warning):
pass
class MarkedYAMLWarning(YAMLWarning):
def __init__(
self,
context=None,
context_mark=None,
problem=None,
problem_mark=None,
note=None,
warn=None,
):
# type: (Any, Any, Any, Any, Any, Any) -> None
self.context = context
self.context_mark = context_mark
self.problem = problem
self.problem_mark = problem_mark
self.note = note
self.warn = warn
def __str__(self):
# type: () -> Any
lines = [] # type: List[str]
if self.context is not None:
lines.append(self.context)
if self.context_mark is not None and (
self.problem is None
or self.problem_mark is None
or self.context_mark.name != self.problem_mark.name
or self.context_mark.line != self.problem_mark.line
or self.context_mark.column != self.problem_mark.column
):
lines.append(str(self.context_mark))
if self.problem is not None:
lines.append(self.problem)
if self.problem_mark is not None:
lines.append(str(self.problem_mark))
if self.note is not None and self.note:
note = textwrap.dedent(self.note)
lines.append(note)
if self.warn is not None and self.warn:
warn = textwrap.dedent(self.warn)
lines.append(warn)
return '\n'.join(lines)
class ReusedAnchorWarning(YAMLWarning):
pass
class UnsafeLoaderWarning(YAMLWarning):
text = """
The default 'Loader' for 'load(stream)' without further arguments can be unsafe.
Use 'load(stream, Loader=ruamel.yaml.Loader)' explicitly if that is OK.
Alternatively include the following in your code:
import warnings
warnings.simplefilter('ignore', ruamel.yaml.error.UnsafeLoaderWarning)
In most other cases you should consider using 'safe_load(stream)'"""
pass
warnings.simplefilter('once', UnsafeLoaderWarning)
class MantissaNoDotYAML1_1Warning(YAMLWarning):
def __init__(self, node, flt_str):
# type: (Any, Any) -> None
self.node = node
self.flt = flt_str
def __str__(self):
# type: () -> Any
line = self.node.start_mark.line
col = self.node.start_mark.column
return """
In YAML 1.1 floating point values should have a dot ('.') in their mantissa.
See the Floating-Point Language-Independent Type for YAML Version 1.1 specification
( http://yaml.org/type/float.html ). This dot is not required for JSON nor for YAML 1.2
Correct your float: "{}" on line: {}, column: {}
or alternatively include the following in your code:
import warnings
warnings.simplefilter('ignore', ruamel.yaml.error.MantissaNoDotYAML1_1Warning)
""".format(
self.flt, line, col
)
warnings.simplefilter('once', MantissaNoDotYAML1_1Warning)
class YAMLFutureWarning(Warning):
pass
class MarkedYAMLFutureWarning(YAMLFutureWarning):
def __init__(
self,
context=None,
context_mark=None,
problem=None,
problem_mark=None,
note=None,
warn=None,
):
# type: (Any, Any, Any, Any, Any, Any) -> None
self.context = context
self.context_mark = context_mark
self.problem = problem
self.problem_mark = problem_mark
self.note = note
self.warn = warn
def __str__(self):
# type: () -> Any
lines = [] # type: List[str]
if self.context is not None:
lines.append(self.context)
if self.context_mark is not None and (
self.problem is None
or self.problem_mark is None
or self.context_mark.name != self.problem_mark.name
or self.context_mark.line != self.problem_mark.line
or self.context_mark.column != self.problem_mark.column
):
lines.append(str(self.context_mark))
if self.problem is not None:
lines.append(self.problem)
if self.problem_mark is not None:
lines.append(str(self.problem_mark))
if self.note is not None and self.note:
note = textwrap.dedent(self.note)
lines.append(note)
if self.warn is not None and self.warn:
warn = textwrap.dedent(self.warn)
lines.append(warn)
return '\n'.join(lines)

View File

@@ -1,196 +0,0 @@
# coding: utf-8
from ruamel.yaml.compat import _F
# Abstract classes.
if False: # MYPY
from typing import Any, Dict, Optional, List # NOQA
SHOW_LINES = False
def CommentCheck():
# type: () -> None
pass
class Event:
__slots__ = 'start_mark', 'end_mark', 'comment'
def __init__(self, start_mark=None, end_mark=None, comment=CommentCheck):
# type: (Any, Any, Any) -> None
self.start_mark = start_mark
self.end_mark = end_mark
# assert comment is not CommentCheck
if comment is CommentCheck:
comment = None
self.comment = comment
def __repr__(self):
# type: () -> Any
if True:
arguments = []
if hasattr(self, 'value'):
# if you use repr(getattr(self, 'value')) then flake8 complains about
# abuse of getattr with a constant. When you change to self.value
# then mypy throws an error
arguments.append(repr(self.value)) # type: ignore
for key in ['anchor', 'tag', 'implicit', 'flow_style', 'style']:
v = getattr(self, key, None)
if v is not None:
arguments.append(_F('{key!s}={v!r}', key=key, v=v))
if self.comment not in [None, CommentCheck]:
arguments.append('comment={!r}'.format(self.comment))
if SHOW_LINES:
arguments.append(
'({}:{}/{}:{})'.format(
self.start_mark.line,
self.start_mark.column,
self.end_mark.line,
self.end_mark.column,
)
)
arguments = ', '.join(arguments) # type: ignore
else:
attributes = [
key
for key in ['anchor', 'tag', 'implicit', 'value', 'flow_style', 'style']
if hasattr(self, key)
]
arguments = ', '.join(
[_F('{k!s}={attr!r}', k=key, attr=getattr(self, key)) for key in attributes]
)
if self.comment not in [None, CommentCheck]:
arguments += ', comment={!r}'.format(self.comment)
return _F(
'{self_class_name!s}({arguments!s})',
self_class_name=self.__class__.__name__,
arguments=arguments,
)
class NodeEvent(Event):
__slots__ = ('anchor',)
def __init__(self, anchor, start_mark=None, end_mark=None, comment=None):
# type: (Any, Any, Any, Any) -> None
Event.__init__(self, start_mark, end_mark, comment)
self.anchor = anchor
class CollectionStartEvent(NodeEvent):
__slots__ = 'tag', 'implicit', 'flow_style', 'nr_items'
def __init__(
self,
anchor,
tag,
implicit,
start_mark=None,
end_mark=None,
flow_style=None,
comment=None,
nr_items=None,
):
# type: (Any, Any, Any, Any, Any, Any, Any, Optional[int]) -> None
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
self.tag = tag
self.implicit = implicit
self.flow_style = flow_style
self.nr_items = nr_items
class CollectionEndEvent(Event):
__slots__ = ()
# Implementations.
class StreamStartEvent(Event):
__slots__ = ('encoding',)
def __init__(self, start_mark=None, end_mark=None, encoding=None, comment=None):
# type: (Any, Any, Any, Any) -> None
Event.__init__(self, start_mark, end_mark, comment)
self.encoding = encoding
class StreamEndEvent(Event):
__slots__ = ()
class DocumentStartEvent(Event):
__slots__ = 'explicit', 'version', 'tags'
def __init__(
self,
start_mark=None,
end_mark=None,
explicit=None,
version=None,
tags=None,
comment=None,
):
# type: (Any, Any, Any, Any, Any, Any) -> None
Event.__init__(self, start_mark, end_mark, comment)
self.explicit = explicit
self.version = version
self.tags = tags
class DocumentEndEvent(Event):
__slots__ = ('explicit',)
def __init__(self, start_mark=None, end_mark=None, explicit=None, comment=None):
# type: (Any, Any, Any, Any) -> None
Event.__init__(self, start_mark, end_mark, comment)
self.explicit = explicit
class AliasEvent(NodeEvent):
__slots__ = 'style'
def __init__(self, anchor, start_mark=None, end_mark=None, style=None, comment=None):
# type: (Any, Any, Any, Any, Any) -> None
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
self.style = style
class ScalarEvent(NodeEvent):
__slots__ = 'tag', 'implicit', 'value', 'style'
def __init__(
self,
anchor,
tag,
implicit,
value,
start_mark=None,
end_mark=None,
style=None,
comment=None,
):
# type: (Any, Any, Any, Any, Any, Any, Any, Any) -> None
NodeEvent.__init__(self, anchor, start_mark, end_mark, comment)
self.tag = tag
self.implicit = implicit
self.value = value
self.style = style
class SequenceStartEvent(CollectionStartEvent):
__slots__ = ()
class SequenceEndEvent(CollectionEndEvent):
__slots__ = ()
class MappingStartEvent(CollectionStartEvent):
__slots__ = ()
class MappingEndEvent(CollectionEndEvent):
__slots__ = ()

View File

@@ -1,75 +0,0 @@
# coding: utf-8
from ruamel.yaml.reader import Reader
from ruamel.yaml.scanner import Scanner, RoundTripScanner
from ruamel.yaml.parser import Parser, RoundTripParser
from ruamel.yaml.composer import Composer
from ruamel.yaml.constructor import (
BaseConstructor,
SafeConstructor,
Constructor,
RoundTripConstructor,
)
from ruamel.yaml.resolver import VersionedResolver
if False: # MYPY
from typing import Any, Dict, List, Union, Optional # NOQA
from ruamel.yaml.compat import StreamTextType, VersionType # NOQA
__all__ = ['BaseLoader', 'SafeLoader', 'Loader', 'RoundTripLoader']
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, VersionedResolver):
def __init__(self, stream, version=None, preserve_quotes=None):
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
self.comment_handling = None
Reader.__init__(self, stream, loader=self)
Scanner.__init__(self, loader=self)
Parser.__init__(self, loader=self)
Composer.__init__(self, loader=self)
BaseConstructor.__init__(self, loader=self)
VersionedResolver.__init__(self, version, loader=self)
class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, VersionedResolver):
def __init__(self, stream, version=None, preserve_quotes=None):
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
self.comment_handling = None
Reader.__init__(self, stream, loader=self)
Scanner.__init__(self, loader=self)
Parser.__init__(self, loader=self)
Composer.__init__(self, loader=self)
SafeConstructor.__init__(self, loader=self)
VersionedResolver.__init__(self, version, loader=self)
class Loader(Reader, Scanner, Parser, Composer, Constructor, VersionedResolver):
def __init__(self, stream, version=None, preserve_quotes=None):
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
self.comment_handling = None
Reader.__init__(self, stream, loader=self)
Scanner.__init__(self, loader=self)
Parser.__init__(self, loader=self)
Composer.__init__(self, loader=self)
Constructor.__init__(self, loader=self)
VersionedResolver.__init__(self, version, loader=self)
class RoundTripLoader(
Reader,
RoundTripScanner,
RoundTripParser,
Composer,
RoundTripConstructor,
VersionedResolver,
):
def __init__(self, stream, version=None, preserve_quotes=None):
# type: (StreamTextType, Optional[VersionType], Optional[bool]) -> None
# self.reader = Reader.__init__(self, stream)
self.comment_handling = None # issue 385
Reader.__init__(self, stream, loader=self)
RoundTripScanner.__init__(self, loader=self)
RoundTripParser.__init__(self, loader=self)
Composer.__init__(self, loader=self)
RoundTripConstructor.__init__(self, preserve_quotes=preserve_quotes, loader=self)
VersionedResolver.__init__(self, version, loader=self)

File diff suppressed because it is too large Load Diff

View File

@@ -1,135 +0,0 @@
# coding: utf-8
import sys
from ruamel.yaml.compat import _F
if False: # MYPY
from typing import Dict, Any, Text # NOQA
class Node:
__slots__ = 'tag', 'value', 'start_mark', 'end_mark', 'comment', 'anchor'
def __init__(self, tag, value, start_mark, end_mark, comment=None, anchor=None):
# type: (Any, Any, Any, Any, Any, Any) -> None
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.comment = comment
self.anchor = anchor
def __repr__(self):
# type: () -> Any
value = self.value
# if isinstance(value, list):
# if len(value) == 0:
# value = '<empty>'
# elif len(value) == 1:
# value = '<1 item>'
# else:
# value = f'<{len(value)} items>'
# else:
# if len(value) > 75:
# value = repr(value[:70]+' ... ')
# else:
# value = repr(value)
value = repr(value)
return _F(
'{class_name!s}(tag={self_tag!r}, value={value!s})',
class_name=self.__class__.__name__,
self_tag=self.tag,
value=value,
)
def dump(self, indent=0):
# type: (int) -> None
if isinstance(self.value, str):
sys.stdout.write(
'{}{}(tag={!r}, value={!r})\n'.format(
' ' * indent, self.__class__.__name__, self.tag, self.value
)
)
if self.comment:
sys.stdout.write(' {}comment: {})\n'.format(' ' * indent, self.comment))
return
sys.stdout.write(
'{}{}(tag={!r})\n'.format(' ' * indent, self.__class__.__name__, self.tag)
)
if self.comment:
sys.stdout.write(' {}comment: {})\n'.format(' ' * indent, self.comment))
for v in self.value:
if isinstance(v, tuple):
for v1 in v:
v1.dump(indent + 1)
elif isinstance(v, Node):
v.dump(indent + 1)
else:
sys.stdout.write('Node value type? {}\n'.format(type(v)))
class ScalarNode(Node):
"""
styles:
? -> set() ? key, no value
" -> double quoted
' -> single quoted
| -> literal style
> -> folding style
"""
__slots__ = ('style',)
id = 'scalar'
def __init__(
self, tag, value, start_mark=None, end_mark=None, style=None, comment=None, anchor=None
):
# type: (Any, Any, Any, Any, Any, Any, Any) -> None
Node.__init__(self, tag, value, start_mark, end_mark, comment=comment, anchor=anchor)
self.style = style
class CollectionNode(Node):
__slots__ = ('flow_style',)
def __init__(
self,
tag,
value,
start_mark=None,
end_mark=None,
flow_style=None,
comment=None,
anchor=None,
):
# type: (Any, Any, Any, Any, Any, Any, Any) -> None
Node.__init__(self, tag, value, start_mark, end_mark, comment=comment)
self.flow_style = flow_style
self.anchor = anchor
class SequenceNode(CollectionNode):
__slots__ = ()
id = 'sequence'
class MappingNode(CollectionNode):
__slots__ = ('merge',)
id = 'mapping'
def __init__(
self,
tag,
value,
start_mark=None,
end_mark=None,
flow_style=None,
comment=None,
anchor=None,
):
# type: (Any, Any, Any, Any, Any, Any, Any) -> None
CollectionNode.__init__(
self, tag, value, start_mark, end_mark, flow_style, comment, anchor
)
self.merge = None

View File

@@ -1,884 +0,0 @@
# coding: utf-8
# The following YAML grammar is LL(1) and is parsed by a recursive descent
# parser.
#
# stream ::= STREAM-START implicit_document? explicit_document*
# STREAM-END
# implicit_document ::= block_node DOCUMENT-END*
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
# block_node_or_indentless_sequence ::=
# ALIAS
# | properties (block_content |
# indentless_block_sequence)?
# | block_content
# | indentless_block_sequence
# block_node ::= ALIAS
# | properties block_content?
# | block_content
# flow_node ::= ALIAS
# | properties flow_content?
# | flow_content
# properties ::= TAG ANCHOR? | ANCHOR TAG?
# block_content ::= block_collection | flow_collection | SCALAR
# flow_content ::= flow_collection | SCALAR
# block_collection ::= block_sequence | block_mapping
# flow_collection ::= flow_sequence | flow_mapping
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
# BLOCK-END
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
# block_mapping ::= BLOCK-MAPPING_START
# ((KEY block_node_or_indentless_sequence?)?
# (VALUE block_node_or_indentless_sequence?)?)*
# BLOCK-END
# flow_sequence ::= FLOW-SEQUENCE-START
# (flow_sequence_entry FLOW-ENTRY)*
# flow_sequence_entry?
# FLOW-SEQUENCE-END
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
# flow_mapping ::= FLOW-MAPPING-START
# (flow_mapping_entry FLOW-ENTRY)*
# flow_mapping_entry?
# FLOW-MAPPING-END
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
#
# FIRST sets:
#
# stream: { STREAM-START <}
# explicit_document: { DIRECTIVE DOCUMENT-START }
# implicit_document: FIRST(block_node)
# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START
# BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START
# FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
# block_sequence: { BLOCK-SEQUENCE-START }
# block_mapping: { BLOCK-MAPPING-START }
# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR
# BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START
# FLOW-MAPPING-START BLOCK-ENTRY }
# indentless_sequence: { ENTRY }
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
# flow_sequence: { FLOW-SEQUENCE-START }
# flow_mapping: { FLOW-MAPPING-START }
# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START
# FLOW-MAPPING-START KEY }
# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START
# FLOW-MAPPING-START KEY }
# need to have full path with import, as pkg_resources tries to load parser.py in __init__.py
# only to not do anything with the package afterwards
# and for Jython too
from ruamel.yaml.error import MarkedYAMLError
from ruamel.yaml.tokens import * # NOQA
from ruamel.yaml.events import * # NOQA
from ruamel.yaml.scanner import Scanner, RoundTripScanner, ScannerError # NOQA
from ruamel.yaml.scanner import BlankLineComment
from ruamel.yaml.comments import C_PRE, C_POST, C_SPLIT_ON_FIRST_BLANK
from ruamel.yaml.compat import _F, nprint, nprintf # NOQA
if False: # MYPY
from typing import Any, Dict, Optional, List, Optional # NOQA
__all__ = ['Parser', 'RoundTripParser', 'ParserError']
def xprintf(*args, **kw):
# type: (Any, Any) -> Any
return nprintf(*args, **kw)
pass
class ParserError(MarkedYAMLError):
pass
class Parser:
# Since writing a recursive-descendant parser is a straightforward task, we
# do not give many comments here.
DEFAULT_TAGS = {'!': '!', '!!': 'tag:yaml.org,2002:'}
def __init__(self, loader):
# type: (Any) -> None
self.loader = loader
if self.loader is not None and getattr(self.loader, '_parser', None) is None:
self.loader._parser = self
self.reset_parser()
def reset_parser(self):
# type: () -> None
# Reset the state attributes (to clear self-references)
self.current_event = self.last_event = None
self.tag_handles = {} # type: Dict[Any, Any]
self.states = [] # type: List[Any]
self.marks = [] # type: List[Any]
self.state = self.parse_stream_start # type: Any
def dispose(self):
# type: () -> None
self.reset_parser()
@property
def scanner(self):
# type: () -> Any
if hasattr(self.loader, 'typ'):
return self.loader.scanner
return self.loader._scanner
@property
def resolver(self):
# type: () -> Any
if hasattr(self.loader, 'typ'):
return self.loader.resolver
return self.loader._resolver
def check_event(self, *choices):
# type: (Any) -> bool
# Check the type of the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
if self.current_event is not None:
if not choices:
return True
for choice in choices:
if isinstance(self.current_event, choice):
return True
return False
def peek_event(self):
# type: () -> Any
# Get the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
return self.current_event
def get_event(self):
# type: () -> Any
# Get the next event and proceed further.
if self.current_event is None:
if self.state:
self.current_event = self.state()
# assert self.current_event is not None
# if self.current_event.end_mark.line != self.peek_event().start_mark.line:
xprintf('get_event', repr(self.current_event), self.peek_event().start_mark.line)
self.last_event = value = self.current_event
self.current_event = None
return value
# stream ::= STREAM-START implicit_document? explicit_document*
# STREAM-END
# implicit_document ::= block_node DOCUMENT-END*
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
def parse_stream_start(self):
# type: () -> Any
# Parse the stream start.
token = self.scanner.get_token()
self.move_token_comment(token)
event = StreamStartEvent(token.start_mark, token.end_mark, encoding=token.encoding)
# Prepare the next state.
self.state = self.parse_implicit_document_start
return event
def parse_implicit_document_start(self):
# type: () -> Any
# Parse an implicit document.
if not self.scanner.check_token(DirectiveToken, DocumentStartToken, StreamEndToken):
self.tag_handles = self.DEFAULT_TAGS
token = self.scanner.peek_token()
start_mark = end_mark = token.start_mark
event = DocumentStartEvent(start_mark, end_mark, explicit=False)
# Prepare the next state.
self.states.append(self.parse_document_end)
self.state = self.parse_block_node
return event
else:
return self.parse_document_start()
def parse_document_start(self):
# type: () -> Any
# Parse any extra document end indicators.
while self.scanner.check_token(DocumentEndToken):
self.scanner.get_token()
# Parse an explicit document.
if not self.scanner.check_token(StreamEndToken):
version, tags = self.process_directives()
if not self.scanner.check_token(DocumentStartToken):
raise ParserError(
None,
None,
_F(
"expected '<document start>', but found {pt!r}",
pt=self.scanner.peek_token().id,
),
self.scanner.peek_token().start_mark,
)
token = self.scanner.get_token()
start_mark = token.start_mark
end_mark = token.end_mark
# if self.loader is not None and \
# end_mark.line != self.scanner.peek_token().start_mark.line:
# self.loader.scalar_after_indicator = False
event = DocumentStartEvent(
start_mark, end_mark, explicit=True, version=version, tags=tags,
comment=token.comment
) # type: Any
self.states.append(self.parse_document_end)
self.state = self.parse_document_content
else:
# Parse the end of the stream.
token = self.scanner.get_token()
event = StreamEndEvent(token.start_mark, token.end_mark, comment=token.comment)
assert not self.states
assert not self.marks
self.state = None
return event
def parse_document_end(self):
# type: () -> Any
# Parse the document end.
token = self.scanner.peek_token()
start_mark = end_mark = token.start_mark
explicit = False
if self.scanner.check_token(DocumentEndToken):
token = self.scanner.get_token()
end_mark = token.end_mark
explicit = True
event = DocumentEndEvent(start_mark, end_mark, explicit=explicit)
# Prepare the next state.
if self.resolver.processing_version == (1, 1):
self.state = self.parse_document_start
else:
self.state = self.parse_implicit_document_start
return event
def parse_document_content(self):
# type: () -> Any
if self.scanner.check_token(
DirectiveToken, DocumentStartToken, DocumentEndToken, StreamEndToken
):
event = self.process_empty_scalar(self.scanner.peek_token().start_mark)
self.state = self.states.pop()
return event
else:
return self.parse_block_node()
def process_directives(self):
# type: () -> Any
yaml_version = None
self.tag_handles = {}
while self.scanner.check_token(DirectiveToken):
token = self.scanner.get_token()
if token.name == 'YAML':
if yaml_version is not None:
raise ParserError(
None, None, 'found duplicate YAML directive', token.start_mark
)
major, minor = token.value
if major != 1:
raise ParserError(
None,
None,
'found incompatible YAML document (version 1.* is required)',
token.start_mark,
)
yaml_version = token.value
elif token.name == 'TAG':
handle, prefix = token.value
if handle in self.tag_handles:
raise ParserError(
None,
None,
_F('duplicate tag handle {handle!r}', handle=handle),
token.start_mark,
)
self.tag_handles[handle] = prefix
if bool(self.tag_handles):
value = yaml_version, self.tag_handles.copy() # type: Any
else:
value = yaml_version, None
if self.loader is not None and hasattr(self.loader, 'tags'):
self.loader.version = yaml_version
if self.loader.tags is None:
self.loader.tags = {}
for k in self.tag_handles:
self.loader.tags[k] = self.tag_handles[k]
for key in self.DEFAULT_TAGS:
if key not in self.tag_handles:
self.tag_handles[key] = self.DEFAULT_TAGS[key]
return value
# block_node_or_indentless_sequence ::= ALIAS
# | properties (block_content | indentless_block_sequence)?
# | block_content
# | indentless_block_sequence
# block_node ::= ALIAS
# | properties block_content?
# | block_content
# flow_node ::= ALIAS
# | properties flow_content?
# | flow_content
# properties ::= TAG ANCHOR? | ANCHOR TAG?
# block_content ::= block_collection | flow_collection | SCALAR
# flow_content ::= flow_collection | SCALAR
# block_collection ::= block_sequence | block_mapping
# flow_collection ::= flow_sequence | flow_mapping
def parse_block_node(self):
# type: () -> Any
return self.parse_node(block=True)
def parse_flow_node(self):
# type: () -> Any
return self.parse_node()
def parse_block_node_or_indentless_sequence(self):
# type: () -> Any
return self.parse_node(block=True, indentless_sequence=True)
def transform_tag(self, handle, suffix):
# type: (Any, Any) -> Any
return self.tag_handles[handle] + suffix
def parse_node(self, block=False, indentless_sequence=False):
# type: (bool, bool) -> Any
if self.scanner.check_token(AliasToken):
token = self.scanner.get_token()
event = AliasEvent(token.value, token.start_mark, token.end_mark) # type: Any
self.state = self.states.pop()
return event
anchor = None
tag = None
start_mark = end_mark = tag_mark = None
if self.scanner.check_token(AnchorToken):
token = self.scanner.get_token()
self.move_token_comment(token)
start_mark = token.start_mark
end_mark = token.end_mark
anchor = token.value
if self.scanner.check_token(TagToken):
token = self.scanner.get_token()
tag_mark = token.start_mark
end_mark = token.end_mark
tag = token.value
elif self.scanner.check_token(TagToken):
token = self.scanner.get_token()
start_mark = tag_mark = token.start_mark
end_mark = token.end_mark
tag = token.value
if self.scanner.check_token(AnchorToken):
token = self.scanner.get_token()
start_mark = tag_mark = token.start_mark
end_mark = token.end_mark
anchor = token.value
if tag is not None:
handle, suffix = tag
if handle is not None:
if handle not in self.tag_handles:
raise ParserError(
'while parsing a node',
start_mark,
_F('found undefined tag handle {handle!r}', handle=handle),
tag_mark,
)
tag = self.transform_tag(handle, suffix)
else:
tag = suffix
# if tag == '!':
# raise ParserError("while parsing a node", start_mark,
# "found non-specific tag '!'", tag_mark,
# "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag'
# and share your opinion.")
if start_mark is None:
start_mark = end_mark = self.scanner.peek_token().start_mark
event = None
implicit = tag is None or tag == '!'
if indentless_sequence and self.scanner.check_token(BlockEntryToken):
comment = None
pt = self.scanner.peek_token()
if self.loader and self.loader.comment_handling is None:
if pt.comment and pt.comment[0]:
comment = [pt.comment[0], []]
pt.comment[0] = None
elif self.loader:
if pt.comment:
comment = pt.comment
end_mark = self.scanner.peek_token().end_mark
event = SequenceStartEvent(
anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
)
self.state = self.parse_indentless_sequence_entry
return event
if self.scanner.check_token(ScalarToken):
token = self.scanner.get_token()
# self.scanner.peek_token_same_line_comment(token)
end_mark = token.end_mark
if (token.plain and tag is None) or tag == '!':
implicit = (True, False)
elif tag is None:
implicit = (False, True)
else:
implicit = (False, False)
# nprint('se', token.value, token.comment)
event = ScalarEvent(
anchor,
tag,
implicit,
token.value,
start_mark,
end_mark,
style=token.style,
comment=token.comment,
)
self.state = self.states.pop()
elif self.scanner.check_token(FlowSequenceStartToken):
pt = self.scanner.peek_token()
end_mark = pt.end_mark
event = SequenceStartEvent(
anchor,
tag,
implicit,
start_mark,
end_mark,
flow_style=True,
comment=pt.comment,
)
self.state = self.parse_flow_sequence_first_entry
elif self.scanner.check_token(FlowMappingStartToken):
pt = self.scanner.peek_token()
end_mark = pt.end_mark
event = MappingStartEvent(
anchor,
tag,
implicit,
start_mark,
end_mark,
flow_style=True,
comment=pt.comment,
)
self.state = self.parse_flow_mapping_first_key
elif block and self.scanner.check_token(BlockSequenceStartToken):
end_mark = self.scanner.peek_token().start_mark
# should inserting the comment be dependent on the
# indentation?
pt = self.scanner.peek_token()
comment = pt.comment
# nprint('pt0', type(pt))
if comment is None or comment[1] is None:
comment = pt.split_old_comment()
# nprint('pt1', comment)
event = SequenceStartEvent(
anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
)
self.state = self.parse_block_sequence_first_entry
elif block and self.scanner.check_token(BlockMappingStartToken):
end_mark = self.scanner.peek_token().start_mark
comment = self.scanner.peek_token().comment
event = MappingStartEvent(
anchor, tag, implicit, start_mark, end_mark, flow_style=False, comment=comment
)
self.state = self.parse_block_mapping_first_key
elif anchor is not None or tag is not None:
# Empty scalars are allowed even if a tag or an anchor is
# specified.
event = ScalarEvent(anchor, tag, (implicit, False), "", start_mark, end_mark)
self.state = self.states.pop()
else:
if block:
node = 'block'
else:
node = 'flow'
token = self.scanner.peek_token()
raise ParserError(
_F('while parsing a {node!s} node', node=node),
start_mark,
_F('expected the node content, but found {token_id!r}', token_id=token.id),
token.start_mark,
)
return event
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
# BLOCK-END
def parse_block_sequence_first_entry(self):
# type: () -> Any
token = self.scanner.get_token()
# move any comment from start token
# self.move_token_comment(token)
self.marks.append(token.start_mark)
return self.parse_block_sequence_entry()
def parse_block_sequence_entry(self):
# type: () -> Any
if self.scanner.check_token(BlockEntryToken):
token = self.scanner.get_token()
self.move_token_comment(token)
if not self.scanner.check_token(BlockEntryToken, BlockEndToken):
self.states.append(self.parse_block_sequence_entry)
return self.parse_block_node()
else:
self.state = self.parse_block_sequence_entry
return self.process_empty_scalar(token.end_mark)
if not self.scanner.check_token(BlockEndToken):
token = self.scanner.peek_token()
raise ParserError(
'while parsing a block collection',
self.marks[-1],
_F('expected <block end>, but found {token_id!r}', token_id=token.id),
token.start_mark,
)
token = self.scanner.get_token() # BlockEndToken
event = SequenceEndEvent(token.start_mark, token.end_mark, comment=token.comment)
self.state = self.states.pop()
self.marks.pop()
return event
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
# indentless_sequence?
# sequence:
# - entry
# - nested
def parse_indentless_sequence_entry(self):
# type: () -> Any
if self.scanner.check_token(BlockEntryToken):
token = self.scanner.get_token()
self.move_token_comment(token)
if not self.scanner.check_token(
BlockEntryToken, KeyToken, ValueToken, BlockEndToken
):
self.states.append(self.parse_indentless_sequence_entry)
return self.parse_block_node()
else:
self.state = self.parse_indentless_sequence_entry
return self.process_empty_scalar(token.end_mark)
token = self.scanner.peek_token()
c = None
if self.loader and self.loader.comment_handling is None:
c = token.comment
start_mark = token.start_mark
else:
start_mark = self.last_event.end_mark # type: ignore
c = self.distribute_comment(token.comment, start_mark.line) # type: ignore
event = SequenceEndEvent(start_mark, start_mark, comment=c)
self.state = self.states.pop()
return event
# block_mapping ::= BLOCK-MAPPING_START
# ((KEY block_node_or_indentless_sequence?)?
# (VALUE block_node_or_indentless_sequence?)?)*
# BLOCK-END
def parse_block_mapping_first_key(self):
# type: () -> Any
token = self.scanner.get_token()
self.marks.append(token.start_mark)
return self.parse_block_mapping_key()
def parse_block_mapping_key(self):
# type: () -> Any
if self.scanner.check_token(KeyToken):
token = self.scanner.get_token()
self.move_token_comment(token)
if not self.scanner.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_value)
return self.parse_block_node_or_indentless_sequence()
else:
self.state = self.parse_block_mapping_value
return self.process_empty_scalar(token.end_mark)
if self.resolver.processing_version > (1, 1) and self.scanner.check_token(ValueToken):
self.state = self.parse_block_mapping_value
return self.process_empty_scalar(self.scanner.peek_token().start_mark)
if not self.scanner.check_token(BlockEndToken):
token = self.scanner.peek_token()
raise ParserError(
'while parsing a block mapping',
self.marks[-1],
_F('expected <block end>, but found {token_id!r}', token_id=token.id),
token.start_mark,
)
token = self.scanner.get_token()
self.move_token_comment(token)
event = MappingEndEvent(token.start_mark, token.end_mark, comment=token.comment)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_block_mapping_value(self):
# type: () -> Any
if self.scanner.check_token(ValueToken):
token = self.scanner.get_token()
# value token might have post comment move it to e.g. block
if self.scanner.check_token(ValueToken):
self.move_token_comment(token)
else:
if not self.scanner.check_token(KeyToken):
self.move_token_comment(token, empty=True)
# else: empty value for this key cannot move token.comment
if not self.scanner.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_key)
return self.parse_block_node_or_indentless_sequence()
else:
self.state = self.parse_block_mapping_key
comment = token.comment
if comment is None:
token = self.scanner.peek_token()
comment = token.comment
if comment:
token._comment = [None, comment[1]]
comment = [comment[0], None]
return self.process_empty_scalar(token.end_mark, comment=comment)
else:
self.state = self.parse_block_mapping_key
token = self.scanner.peek_token()
return self.process_empty_scalar(token.start_mark)
# flow_sequence ::= FLOW-SEQUENCE-START
# (flow_sequence_entry FLOW-ENTRY)*
# flow_sequence_entry?
# FLOW-SEQUENCE-END
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
#
# Note that while production rules for both flow_sequence_entry and
# flow_mapping_entry are equal, their interpretations are different.
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
# generate an inline mapping (set syntax).
def parse_flow_sequence_first_entry(self):
# type: () -> Any
token = self.scanner.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_sequence_entry(first=True)
def parse_flow_sequence_entry(self, first=False):
# type: (bool) -> Any
if not self.scanner.check_token(FlowSequenceEndToken):
if not first:
if self.scanner.check_token(FlowEntryToken):
self.scanner.get_token()
else:
token = self.scanner.peek_token()
raise ParserError(
'while parsing a flow sequence',
self.marks[-1],
_F("expected ',' or ']', but got {token_id!r}", token_id=token.id),
token.start_mark,
)
if self.scanner.check_token(KeyToken):
token = self.scanner.peek_token()
event = MappingStartEvent(
None, None, True, token.start_mark, token.end_mark, flow_style=True
) # type: Any
self.state = self.parse_flow_sequence_entry_mapping_key
return event
elif not self.scanner.check_token(FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry)
return self.parse_flow_node()
token = self.scanner.get_token()
event = SequenceEndEvent(token.start_mark, token.end_mark, comment=token.comment)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_flow_sequence_entry_mapping_key(self):
# type: () -> Any
token = self.scanner.get_token()
if not self.scanner.check_token(ValueToken, FlowEntryToken, FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry_mapping_value)
return self.parse_flow_node()
else:
self.state = self.parse_flow_sequence_entry_mapping_value
return self.process_empty_scalar(token.end_mark)
def parse_flow_sequence_entry_mapping_value(self):
# type: () -> Any
if self.scanner.check_token(ValueToken):
token = self.scanner.get_token()
if not self.scanner.check_token(FlowEntryToken, FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry_mapping_end)
return self.parse_flow_node()
else:
self.state = self.parse_flow_sequence_entry_mapping_end
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_flow_sequence_entry_mapping_end
token = self.scanner.peek_token()
return self.process_empty_scalar(token.start_mark)
def parse_flow_sequence_entry_mapping_end(self):
# type: () -> Any
self.state = self.parse_flow_sequence_entry
token = self.scanner.peek_token()
return MappingEndEvent(token.start_mark, token.start_mark)
# flow_mapping ::= FLOW-MAPPING-START
# (flow_mapping_entry FLOW-ENTRY)*
# flow_mapping_entry?
# FLOW-MAPPING-END
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
def parse_flow_mapping_first_key(self):
# type: () -> Any
token = self.scanner.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_mapping_key(first=True)
def parse_flow_mapping_key(self, first=False):
# type: (Any) -> Any
if not self.scanner.check_token(FlowMappingEndToken):
if not first:
if self.scanner.check_token(FlowEntryToken):
self.scanner.get_token()
else:
token = self.scanner.peek_token()
raise ParserError(
'while parsing a flow mapping',
self.marks[-1],
_F("expected ',' or '}}', but got {token_id!r}", token_id=token.id),
token.start_mark,
)
if self.scanner.check_token(KeyToken):
token = self.scanner.get_token()
if not self.scanner.check_token(
ValueToken, FlowEntryToken, FlowMappingEndToken
):
self.states.append(self.parse_flow_mapping_value)
return self.parse_flow_node()
else:
self.state = self.parse_flow_mapping_value
return self.process_empty_scalar(token.end_mark)
elif self.resolver.processing_version > (1, 1) and self.scanner.check_token(
ValueToken
):
self.state = self.parse_flow_mapping_value
return self.process_empty_scalar(self.scanner.peek_token().end_mark)
elif not self.scanner.check_token(FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_empty_value)
return self.parse_flow_node()
token = self.scanner.get_token()
event = MappingEndEvent(token.start_mark, token.end_mark, comment=token.comment)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_flow_mapping_value(self):
# type: () -> Any
if self.scanner.check_token(ValueToken):
token = self.scanner.get_token()
if not self.scanner.check_token(FlowEntryToken, FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_key)
return self.parse_flow_node()
else:
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_flow_mapping_key
token = self.scanner.peek_token()
return self.process_empty_scalar(token.start_mark)
def parse_flow_mapping_empty_value(self):
# type: () -> Any
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(self.scanner.peek_token().start_mark)
def process_empty_scalar(self, mark, comment=None):
# type: (Any, Any) -> Any
return ScalarEvent(None, None, (True, False), "", mark, mark, comment=comment)
def move_token_comment(self, token, nt=None, empty=False):
# type: (Any, Optional[Any], Optional[bool]) -> Any
pass
class RoundTripParser(Parser):
"""roundtrip is a safe loader, that wants to see the unmangled tag"""
def transform_tag(self, handle, suffix):
# type: (Any, Any) -> Any
# return self.tag_handles[handle]+suffix
if handle == '!!' and suffix in (
'null',
'bool',
'int',
'float',
'binary',
'timestamp',
'omap',
'pairs',
'set',
'str',
'seq',
'map',
):
return Parser.transform_tag(self, handle, suffix)
return handle + suffix
def move_token_comment(self, token, nt=None, empty=False):
# type: (Any, Optional[Any], Optional[bool]) -> Any
token.move_old_comment(self.scanner.peek_token() if nt is None else nt, empty=empty)
class RoundTripParserSC(RoundTripParser):
"""roundtrip is a safe loader, that wants to see the unmangled tag"""
# some of the differences are based on the superclass testing
# if self.loader.comment_handling is not None
def move_token_comment(self, token, nt=None, empty=False):
# type: (Any, Any, Any, Optional[bool]) -> None
token.move_new_comment(self.scanner.peek_token() if nt is None else nt, empty=empty)
def distribute_comment(self, comment, line):
# type: (Any, Any) -> Any
# ToDo, look at indentation of the comment to determine attachment
if comment is None:
return None
if not comment[0]:
return None
if comment[0][0] != line + 1:
nprintf('>>>dcxxx', comment, line)
assert comment[0][0] == line + 1
# if comment[0] - line > 1:
# return
typ = self.loader.comment_handling & 0b11
# nprintf('>>>dca', comment, line, typ)
if typ == C_POST:
return None
if typ == C_PRE:
c = [None, None, comment[0]]
comment[0] = None
return c
# nprintf('>>>dcb', comment[0])
for _idx, cmntidx in enumerate(comment[0]):
# nprintf('>>>dcb', cmntidx)
if isinstance(self.scanner.comments[cmntidx], BlankLineComment):
break
else:
return None # no space found
if _idx == 0:
return None # first line was blank
# nprintf('>>>dcc', idx)
if typ == C_SPLIT_ON_FIRST_BLANK:
c = [None, None, comment[0][:_idx]]
comment[0] = comment[0][_idx:]
return c
raise NotImplementedError # reserved

View File

@@ -1,302 +0,0 @@
# coding: utf-8
# This module contains abstractions for the input stream. You don't have to
# looks further, there are no pretty code.
#
# We define two classes here.
#
# Mark(source, line, column)
# It's just a record and its only use is producing nice error messages.
# Parser does not use it for any other purposes.
#
# Reader(source, data)
# Reader determines the encoding of `data` and converts it to unicode.
# Reader provides the following methods and attributes:
# reader.peek(length=1) - return the next `length` characters
# reader.forward(length=1) - move the current position to `length`
# characters.
# reader.index - the number of the current character.
# reader.line, stream.column - the line and the column of the current
# character.
import codecs
from ruamel.yaml.error import YAMLError, FileMark, StringMark, YAMLStreamError
from ruamel.yaml.compat import _F # NOQA
from ruamel.yaml.util import RegExp
if False: # MYPY
from typing import Any, Dict, Optional, List, Union, Text, Tuple, Optional # NOQA
# from ruamel.yaml.compat import StreamTextType # NOQA
__all__ = ['Reader', 'ReaderError']
class ReaderError(YAMLError):
def __init__(self, name, position, character, encoding, reason):
# type: (Any, Any, Any, Any, Any) -> None
self.name = name
self.character = character
self.position = position
self.encoding = encoding
self.reason = reason
def __str__(self):
# type: () -> Any
if isinstance(self.character, bytes):
return _F(
"'{self_encoding!s}' codec can't decode byte #x{ord_self_character:02x}: "
'{self_reason!s}\n'
' in "{self_name!s}", position {self_position:d}',
self_encoding=self.encoding,
ord_self_character=ord(self.character),
self_reason=self.reason,
self_name=self.name,
self_position=self.position,
)
else:
return _F(
'unacceptable character #x{self_character:04x}: {self_reason!s}\n'
' in "{self_name!s}", position {self_position:d}',
self_character=self.character,
self_reason=self.reason,
self_name=self.name,
self_position=self.position,
)
class Reader:
# Reader:
# - determines the data encoding and converts it to a unicode string,
# - checks if characters are in allowed range,
# - adds '\0' to the end.
# Reader accepts
# - a `bytes` object,
# - a `str` object,
# - a file-like object with its `read` method returning `str`,
# - a file-like object with its `read` method returning `unicode`.
# Yeah, it's ugly and slow.
def __init__(self, stream, loader=None):
# type: (Any, Any) -> None
self.loader = loader
if self.loader is not None and getattr(self.loader, '_reader', None) is None:
self.loader._reader = self
self.reset_reader()
self.stream = stream # type: Any # as .read is called
def reset_reader(self):
# type: () -> None
self.name = None # type: Any
self.stream_pointer = 0
self.eof = True
self.buffer = ""
self.pointer = 0
self.raw_buffer = None # type: Any
self.raw_decode = None
self.encoding = None # type: Optional[Text]
self.index = 0
self.line = 0
self.column = 0
@property
def stream(self):
# type: () -> Any
try:
return self._stream
except AttributeError:
raise YAMLStreamError('input stream needs to specified')
@stream.setter
def stream(self, val):
# type: (Any) -> None
if val is None:
return
self._stream = None
if isinstance(val, str):
self.name = '<unicode string>'
self.check_printable(val)
self.buffer = val + '\0'
elif isinstance(val, bytes):
self.name = '<byte string>'
self.raw_buffer = val
self.determine_encoding()
else:
if not hasattr(val, 'read'):
raise YAMLStreamError('stream argument needs to have a read() method')
self._stream = val
self.name = getattr(self.stream, 'name', '<file>')
self.eof = False
self.raw_buffer = None
self.determine_encoding()
def peek(self, index=0):
# type: (int) -> Text
try:
return self.buffer[self.pointer + index]
except IndexError:
self.update(index + 1)
return self.buffer[self.pointer + index]
def prefix(self, length=1):
# type: (int) -> Any
if self.pointer + length >= len(self.buffer):
self.update(length)
return self.buffer[self.pointer : self.pointer + length]
def forward_1_1(self, length=1):
# type: (int) -> None
if self.pointer + length + 1 >= len(self.buffer):
self.update(length + 1)
while length != 0:
ch = self.buffer[self.pointer]
self.pointer += 1
self.index += 1
if ch in '\n\x85\u2028\u2029' or (
ch == '\r' and self.buffer[self.pointer] != '\n'
):
self.line += 1
self.column = 0
elif ch != '\uFEFF':
self.column += 1
length -= 1
def forward(self, length=1):
# type: (int) -> None
if self.pointer + length + 1 >= len(self.buffer):
self.update(length + 1)
while length != 0:
ch = self.buffer[self.pointer]
self.pointer += 1
self.index += 1
if ch == '\n' or (ch == '\r' and self.buffer[self.pointer] != '\n'):
self.line += 1
self.column = 0
elif ch != '\uFEFF':
self.column += 1
length -= 1
def get_mark(self):
# type: () -> Any
if self.stream is None:
return StringMark(
self.name, self.index, self.line, self.column, self.buffer, self.pointer
)
else:
return FileMark(self.name, self.index, self.line, self.column)
def determine_encoding(self):
# type: () -> None
while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2):
self.update_raw()
if isinstance(self.raw_buffer, bytes):
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
self.raw_decode = codecs.utf_16_le_decode # type: ignore
self.encoding = 'utf-16-le'
elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
self.raw_decode = codecs.utf_16_be_decode # type: ignore
self.encoding = 'utf-16-be'
else:
self.raw_decode = codecs.utf_8_decode # type: ignore
self.encoding = 'utf-8'
self.update(1)
NON_PRINTABLE = RegExp(
'[^\x09\x0A\x0D\x20-\x7E\x85' '\xA0-\uD7FF' '\uE000-\uFFFD' '\U00010000-\U0010FFFF' ']'
)
_printable_ascii = ('\x09\x0A\x0D' + "".join(map(chr, range(0x20, 0x7F)))).encode('ascii')
@classmethod
def _get_non_printable_ascii(cls, data): # type: ignore
# type: (Text, bytes) -> Optional[Tuple[int, Text]]
ascii_bytes = data.encode('ascii') # type: ignore
non_printables = ascii_bytes.translate(None, cls._printable_ascii) # type: ignore
if not non_printables:
return None
non_printable = non_printables[:1]
return ascii_bytes.index(non_printable), non_printable.decode('ascii')
@classmethod
def _get_non_printable_regex(cls, data):
# type: (Text) -> Optional[Tuple[int, Text]]
match = cls.NON_PRINTABLE.search(data)
if not bool(match):
return None
return match.start(), match.group()
@classmethod
def _get_non_printable(cls, data):
# type: (Text) -> Optional[Tuple[int, Text]]
try:
return cls._get_non_printable_ascii(data) # type: ignore
except UnicodeEncodeError:
return cls._get_non_printable_regex(data)
def check_printable(self, data):
# type: (Any) -> None
non_printable_match = self._get_non_printable(data)
if non_printable_match is not None:
start, character = non_printable_match
position = self.index + (len(self.buffer) - self.pointer) + start
raise ReaderError(
self.name,
position,
ord(character),
'unicode',
'special characters are not allowed',
)
def update(self, length):
# type: (int) -> None
if self.raw_buffer is None:
return
self.buffer = self.buffer[self.pointer :]
self.pointer = 0
while len(self.buffer) < length:
if not self.eof:
self.update_raw()
if self.raw_decode is not None:
try:
data, converted = self.raw_decode(self.raw_buffer, 'strict', self.eof)
except UnicodeDecodeError as exc:
character = self.raw_buffer[exc.start]
if self.stream is not None:
position = self.stream_pointer - len(self.raw_buffer) + exc.start
elif self.stream is not None:
position = self.stream_pointer - len(self.raw_buffer) + exc.start
else:
position = exc.start
raise ReaderError(self.name, position, character, exc.encoding, exc.reason)
else:
data = self.raw_buffer
converted = len(data)
self.check_printable(data)
self.buffer += data
self.raw_buffer = self.raw_buffer[converted:]
if self.eof:
self.buffer += '\0'
self.raw_buffer = None
break
def update_raw(self, size=None):
# type: (Optional[int]) -> None
if size is None:
size = 4096
data = self.stream.read(size)
if self.raw_buffer is None:
self.raw_buffer = data
else:
self.raw_buffer += data
self.stream_pointer += len(data)
if not data:
self.eof = True
# try:
# import psyco
# psyco.bind(Reader)
# except ImportError:
# pass

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More