Compare commits
1 Commits
disinherit
...
revert-341
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4583161224 |
4
.github/workflows/audit.yaml
vendored
4
.github/workflows/audit.yaml
vendored
@@ -19,8 +19,8 @@ jobs:
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
|
||||
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
6
.github/workflows/build-containers.yml
vendored
6
.github/workflows/build-containers.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -80,7 +80,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
||||
uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@37abcedcc1da61a57767b7588cb9d03eb57e28b3 # @v2
|
||||
uses: docker/build-push-action@c56af957549030174b10d6867f20e78cfd7debc5 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
8
.github/workflows/setup_git.ps1
vendored
8
.github/workflows/setup_git.ps1
vendored
@@ -1,9 +1,15 @@
|
||||
# (c) 2022 Lawrence Livermore National Laboratory
|
||||
# (c) 2021 Lawrence Livermore National Laboratory
|
||||
|
||||
Set-Location spack
|
||||
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
git config --global core.longpaths true
|
||||
|
||||
# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253)
|
||||
# This is needed to let some fixture in our unit-test suite run
|
||||
git config --global protocol.file.allow always
|
||||
|
||||
if ($(git branch --show-current) -ne "develop")
|
||||
{
|
||||
git branch develop origin/develop
|
||||
|
||||
4
.github/workflows/setup_git.sh
vendored
4
.github/workflows/setup_git.sh
vendored
@@ -2,6 +2,10 @@
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
|
||||
# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253)
|
||||
# This is needed to let some fixture in our unit-test suite run
|
||||
git config --global protocol.file.allow always
|
||||
|
||||
# create a local pr base branch
|
||||
if [[ -n $GITHUB_BASE_REF ]]; then
|
||||
git fetch origin "${GITHUB_BASE_REF}:${GITHUB_BASE_REF}"
|
||||
|
||||
20
.github/workflows/unit_tests.yaml
vendored
20
.github/workflows/unit_tests.yaml
vendored
@@ -47,10 +47,10 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -94,10 +94,10 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -145,16 +145,16 @@ jobs:
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d bootstrap now --dev
|
||||
spack -d solve zlib
|
||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -185,10 +185,10 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
|
||||
8
.github/workflows/valid-style.yml
vendored
8
.github/workflows/valid-style.yml
vendored
@@ -18,8 +18,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -35,10 +35,10 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
||||
175
.github/workflows/windows_python.yml
vendored
175
.github/workflows/windows_python.yml
vendored
@@ -10,15 +10,15 @@ concurrency:
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||
powershell Invoke-Expression -Command ".\share\spack\qa\windows_test_setup.ps1"; {0}
|
||||
jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -26,11 +26,13 @@ jobs:
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
cd spack
|
||||
dir
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
@@ -39,10 +41,10 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -50,11 +52,12 @@ jobs:
|
||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov clingo
|
||||
- name: Create local develop
|
||||
run: |
|
||||
./.github/workflows/setup_git.ps1
|
||||
.\spack\.github\workflows\setup_git.ps1
|
||||
- name: Command Unit Test
|
||||
run: |
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
cd spack
|
||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
@@ -63,10 +66,10 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -75,81 +78,81 @@ jobs:
|
||||
- name: Build Test
|
||||
run: |
|
||||
spack compiler find
|
||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||
spack external find cmake
|
||||
spack external find ninja
|
||||
spack -d install abseil-cpp
|
||||
# TODO: johnwparent - reduce the size of the installer operations
|
||||
# make-installer:
|
||||
# runs-on: windows-latest
|
||||
# steps:
|
||||
# - name: Disable Windows Symlinks
|
||||
# run: |
|
||||
# git config --global core.symlinks false
|
||||
# shell:
|
||||
# powershell
|
||||
# - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c
|
||||
# with:
|
||||
# fetch-depth: 0
|
||||
# - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
# with:
|
||||
# python-version: 3.9
|
||||
# - name: Install Python packages
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip six pywin32 setuptools
|
||||
# - name: Add Light and Candle to Path
|
||||
# run: |
|
||||
# $env:WIX >> $GITHUB_PATH
|
||||
# - name: Run Installer
|
||||
# run: |
|
||||
# ./share/spack/qa/setup_spack_installer.ps1
|
||||
# spack make-installer -s . -g SILENT pkg
|
||||
# echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
# env:
|
||||
# ProgressPreference: SilentlyContinue
|
||||
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
# with:
|
||||
# name: Windows Spack Installer Bundle
|
||||
# path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
# - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
# with:
|
||||
# name: Windows Spack Installer
|
||||
# path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
# execute-installer:
|
||||
# needs: make-installer
|
||||
# runs-on: windows-latest
|
||||
# defaults:
|
||||
# run:
|
||||
# shell: pwsh
|
||||
# steps:
|
||||
# - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
# with:
|
||||
# python-version: 3.9
|
||||
# - name: Install Python packages
|
||||
# run: |
|
||||
# python -m pip install --upgrade pip six pywin32 setuptools
|
||||
# - name: Setup installer directory
|
||||
# run: |
|
||||
# mkdir -p spack_installer
|
||||
# echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
# - uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: Windows Spack Installer Bundle
|
||||
# path: ${{ env.spack_installer }}
|
||||
# - name: Execute Bundled Installer
|
||||
# run: |
|
||||
# $proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
# $handle = $proc.Handle # cache proc.Handle
|
||||
# $proc.WaitForExit();
|
||||
# $LASTEXITCODE
|
||||
# env:
|
||||
# ProgressPreference: SilentlyContinue
|
||||
# - uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: Windows Spack Installer
|
||||
# path: ${{ env.spack_installer }}
|
||||
# - name: Execute MSI
|
||||
# run: |
|
||||
# $proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
# $handle = $proc.Handle # cache proc.Handle
|
||||
# $proc.WaitForExit();
|
||||
# $LASTEXITCODE
|
||||
make-installer:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Disable Windows Symlinks
|
||||
run: |
|
||||
git config --global core.symlinks false
|
||||
shell:
|
||||
powershell
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools
|
||||
- name: Add Light and Candle to Path
|
||||
run: |
|
||||
$env:WIX >> $GITHUB_PATH
|
||||
- name: Run Installer
|
||||
run: |
|
||||
.\spack\share\spack\qa\setup_spack.ps1
|
||||
spack make-installer -s spack -g SILENT pkg
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
execute-installer:
|
||||
needs: make-installer
|
||||
runs-on: windows-latest
|
||||
defaults:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python -m pip install --upgrade pip six pywin32 setuptools
|
||||
- name: Setup installer directory
|
||||
run: |
|
||||
mkdir -p spack_installer
|
||||
echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute Bundled Installer
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.spack_installer }}
|
||||
- name: Execute MSI
|
||||
run: |
|
||||
$proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru
|
||||
$handle = $proc.Handle # cache proc.Handle
|
||||
$proc.WaitForExit();
|
||||
$LASTEXITCODE
|
||||
|
||||
@@ -10,7 +10,6 @@ def getpywin():
|
||||
try:
|
||||
import win32con # noqa: F401
|
||||
except ImportError:
|
||||
print("pyWin32 not installed but is required...\nInstalling via pip:")
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "-q", "install", "--upgrade", "pip"])
|
||||
subprocess.check_call([sys.executable, "-m", "pip", "-q", "install", "pywin32"])
|
||||
|
||||
|
||||
@@ -52,6 +52,7 @@ if defined py_path (
|
||||
|
||||
if defined py_exe (
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\haspywin.py"
|
||||
"%py_exe%" "%SPACK_ROOT%\bin\spack" external find python >NUL
|
||||
)
|
||||
|
||||
set "EDITOR=notepad"
|
||||
|
||||
@@ -54,11 +54,6 @@ config:
|
||||
# are that it precludes its use as a system package and its ability to be
|
||||
# pip installable.
|
||||
#
|
||||
# In Spack environment files, chaining onto existing system Spack
|
||||
# installations, the $env variable can be used to download, cache and build
|
||||
# into user-writable paths that are relative to the currently active
|
||||
# environment.
|
||||
#
|
||||
# In any case, if the username is not already in the path, Spack will append
|
||||
# the value of `$user` in an attempt to avoid potential conflicts between
|
||||
# users in shared temporary spaces.
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This file controls default concretization preferences for Spack.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/packages.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- msvc
|
||||
providers:
|
||||
mpi: [msmpi]
|
||||
162
lib/spack/docs/analyze.rst
Normal file
162
lib/spack/docs/analyze.rst
Normal file
@@ -0,0 +1,162 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _analyze:
|
||||
|
||||
=======
|
||||
Analyze
|
||||
=======
|
||||
|
||||
|
||||
The analyze command is a front-end to various tools that let us analyze
|
||||
package installations. Each analyzer is a module for a different kind
|
||||
of analysis that can be done on a package installation, including (but not
|
||||
limited to) binary, log, or text analysis. Thus, the analyze command group
|
||||
allows you to take an existing package install, choose an analyzer,
|
||||
and extract some output for the package using it.
|
||||
|
||||
|
||||
-----------------
|
||||
Analyzer Metadata
|
||||
-----------------
|
||||
|
||||
For all analyzers, we write to an ``analyzers`` folder in ``~/.spack``, or the
|
||||
value that you specify in your spack config at ``config:analyzers_dir``.
|
||||
For example, here we see the results of running an analysis on zlib:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tree ~/.spack/analyzers/
|
||||
└── linux-ubuntu20.04-skylake
|
||||
└── gcc-9.3.0
|
||||
└── zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||
├── environment_variables
|
||||
│ └── spack-analyzer-environment-variables.json
|
||||
├── install_files
|
||||
│ └── spack-analyzer-install-files.json
|
||||
└── libabigail
|
||||
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
|
||||
This means that you can always find analyzer output in this folder, and it
|
||||
is organized with the same logic as the package install it was run for.
|
||||
If you want to customize this top level folder, simply provide the ``--path``
|
||||
argument to ``spack analyze run``. The nested organization will be maintained
|
||||
within your custom root.
|
||||
|
||||
-----------------
|
||||
Listing Analyzers
|
||||
-----------------
|
||||
|
||||
If you aren't familiar with Spack's analyzers, you can quickly list those that
|
||||
are available:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze list-analyzers
|
||||
install_files : install file listing read from install_manifest.json
|
||||
environment_variables : environment variables parsed from spack-build-env.txt
|
||||
config_args : config args loaded from spack-configure-args.txt
|
||||
libabigail : Application Binary Interface (ABI) features for objects
|
||||
|
||||
|
||||
In the above, the first three are fairly simple - parsing metadata files from
|
||||
a package install directory to save
|
||||
|
||||
-------------------
|
||||
Analyzing a Package
|
||||
-------------------
|
||||
|
||||
The analyze command, akin to install, will accept a package spec to perform
|
||||
an analysis for. The package must be installed. Let's walk through an example
|
||||
with zlib. We first ask to analyze it. However, since we have more than one
|
||||
install, we are asked to disambiguate:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run zlib
|
||||
==> Error: zlib matches multiple packages.
|
||||
Matching packages:
|
||||
fz2bs56 zlib@1.2.11%gcc@7.5.0 arch=linux-ubuntu18.04-skylake
|
||||
sl7m27m zlib@1.2.11%gcc@9.3.0 arch=linux-ubuntu20.04-skylake
|
||||
Use a more specific spec.
|
||||
|
||||
|
||||
We can then specify the spec version that we want to analyze:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run zlib/fz2bs56
|
||||
|
||||
If you don't provide any specific analyzer names, by default all analyzers
|
||||
(shown in the ``list-analyzers`` subcommand list) will be run. If an analyzer does not
|
||||
have any result, it will be skipped. For example, here is a result running for
|
||||
zlib:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ls ~/.spack/analyzers/linux-ubuntu20.04-skylake/gcc-9.3.0/zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2/
|
||||
spack-analyzer-environment-variables.json
|
||||
spack-analyzer-install-files.json
|
||||
spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
If you want to run a specific analyzer, ask for it with `--analyzer`. Here we run
|
||||
spack analyze on libabigail (already installed) _using_ libabigail1
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --analyzer abigail libabigail
|
||||
|
||||
|
||||
.. _analyze_monitoring:
|
||||
|
||||
----------------------
|
||||
Monitoring An Analysis
|
||||
----------------------
|
||||
|
||||
For any kind of analysis, you can
|
||||
use a `spack monitor <https://github.com/spack/spack-monitor>`_ "Spackmon"
|
||||
as a server to upload the same run metadata to. You can
|
||||
follow the instructions in the `spack monitor documentation <https://spack-monitor.readthedocs.org>`_
|
||||
to first create a server along with a username and token for yourself.
|
||||
You can then use this guide to interact with the server.
|
||||
|
||||
You should first export our spack monitor token and username to the environment:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
|
||||
By default, the host for your server is expected to be at ``http://127.0.0.1``
|
||||
with a prefix of ``ms1``, and if this is the case, you can simply add the
|
||||
``--monitor`` flag to the install command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor wget
|
||||
|
||||
If you need to customize the host or the prefix, you can do that as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io wget
|
||||
|
||||
If your server doesn't have authentication, you can skip it:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor --monitor-disable-auth wget
|
||||
|
||||
Regardless of your choice, when you run analyze on an installed package (whether
|
||||
it was installed with ``--monitor`` or not, you'll see the results generating as they did
|
||||
before, and a message that the monitor server was pinged:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze --monitor wget
|
||||
...
|
||||
==> Sending result for wget bin/wget to monitor.
|
||||
@@ -13,51 +13,49 @@ Some sites may encourage users to set up their own test environments
|
||||
before carrying out central installations, or some users may prefer to set
|
||||
up these environments on their own motivation. To reduce the load of
|
||||
recompiling otherwise identical package specs in different installations,
|
||||
installed packages can be put into build cache tarballs, pushed to
|
||||
installed packages can be put into build cache tarballs, uploaded to
|
||||
your Spack mirror and then downloaded and installed by others.
|
||||
|
||||
Whenever a mirror provides prebuilt packages, Spack will take these packages
|
||||
into account during concretization and installation, making ``spack install``
|
||||
signficantly faster.
|
||||
|
||||
--------------------------
|
||||
Creating build cache files
|
||||
--------------------------
|
||||
|
||||
.. note::
|
||||
|
||||
We use the terms "build cache" and "mirror" often interchangeably. Mirrors
|
||||
are used during installation both for sources and prebuilt packages. Build
|
||||
caches refer to mirrors that provide prebuilt packages.
|
||||
|
||||
|
||||
----------------------
|
||||
Creating a build cache
|
||||
----------------------
|
||||
A compressed tarball of an installed package is created. Tarballs are created
|
||||
for all of its link and run dependency packages as well. Compressed tarballs are
|
||||
signed with gpg and signature and tarball and put in a ``.spack`` file. Optionally,
|
||||
the rpaths (and ids and deps on macOS) can be changed to paths relative to
|
||||
the Spack install tree before the tarball is created.
|
||||
|
||||
Build caches are created via:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache create <path/url/mirror name> <spec>
|
||||
$ spack buildcache create <spec>
|
||||
|
||||
This command takes the locally installed spec and its dependencies, and
|
||||
creates tarballs of their install prefixes. It also generates metadata files,
|
||||
signed with GPG. These tarballs and metadata files are then pushed to the
|
||||
provided binary cache, which can be a local directory or a remote URL.
|
||||
|
||||
Here is an example where a build cache is created in a local directory named
|
||||
"spack-cache", to which we push the "ninja" spec:
|
||||
If you wanted to create a build cache in a local directory, you would provide
|
||||
the ``-d`` argument to target that directory, again also specifying the spec.
|
||||
Here is an example creating a local directory, "spack-cache" and creating
|
||||
build cache files for the "ninja" spec:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache create --allow-root ./spack-cache ninja
|
||||
==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache
|
||||
$ mkdir -p ./spack-cache
|
||||
$ spack buildcache create -d ./spack-cache ninja
|
||||
==> Buildcache files will be output to file:///home/spackuser/spack/spack-cache/build_cache
|
||||
gpgconf: socketdir is '/run/user/1000/gnupg'
|
||||
gpg: using "E6DF6A8BD43208E4D6F392F23777740B7DBD643D" as default secret key for signing
|
||||
|
||||
Not that ``ninja`` must be installed locally for this to work.
|
||||
Note that the targeted spec must already be installed. Once you have a build cache,
|
||||
you can add it as a mirror, discussed next.
|
||||
|
||||
We're using the ``--allow-root`` flag to tell Spack that is OK when any of
|
||||
the binaries we're pushing contain references to the local Spack install
|
||||
directory.
|
||||
.. warning::
|
||||
|
||||
Once you have a build cache, you can add it as a mirror, discussed next.
|
||||
Spack improved the format used for binary caches in v0.18. The entire v0.18 series
|
||||
will be able to verify and install binary caches both in the new and in the old format.
|
||||
Support for using the old format is expected to end in v0.19, so we advise users to
|
||||
recreate relevant buildcaches using Spack v0.18 or higher.
|
||||
|
||||
---------------------------------------
|
||||
Finding or installing build cache files
|
||||
@@ -68,10 +66,10 @@ with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add <name> <url or path>
|
||||
$ spack mirror add <name> <url>
|
||||
|
||||
|
||||
Both web URLs and local paths on the filesystem can be specified. In the previous
|
||||
Note that the url can be a web url _or_ a local filesystem location. In the previous
|
||||
example, you might add the directory "spack-cache" and call it ``mymirror``:
|
||||
|
||||
|
||||
@@ -96,7 +94,7 @@ this new build cache as follows:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack buildcache update-index ./spack-cache
|
||||
$ spack buildcache update-index -d spack-cache/
|
||||
|
||||
Now you can use list:
|
||||
|
||||
@@ -107,38 +105,46 @@ Now you can use list:
|
||||
-- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------
|
||||
ninja@1.10.2
|
||||
|
||||
With ``mymirror`` configured and an index available, Spack will automatically
|
||||
use it during concretization and installation. That means that you can expect
|
||||
``spack install ninja`` to fetch prebuilt packages from the mirror. Let's
|
||||
verify by re-installing ninja:
|
||||
|
||||
Great! So now let's say you have a different spack installation, or perhaps just
|
||||
a different environment for the same one, and you want to install a package from
|
||||
that build cache. Let's first uninstall the actual library "ninja" to see if we can
|
||||
re-install it from the cache.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack uninstall ninja
|
||||
$ spack install ninja
|
||||
==> Installing ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spec.json.sig
|
||||
gpg: Signature made Do 12 Jan 2023 16:01:04 CET
|
||||
gpg: using RSA key 61B82B2B2350E171BD17A1744E3A689061D57BF6
|
||||
gpg: Good signature from "example (GPG created for Spack) <example@example.com>" [ultimate]
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.10.2/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
|
||||
==> Extracting ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz from binary cache
|
||||
==> ninja: Successfully installed ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
|
||||
Search: 0.00s. Fetch: 0.17s. Install: 0.12s. Total: 0.29s
|
||||
[+] /home/harmen/spack/opt/spack/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
|
||||
|
||||
|
||||
It worked! You've just completed a full example of creating a build cache with
|
||||
a spec of interest, adding it as a mirror, updating its index, listing the contents,
|
||||
and finally, installing from it.
|
||||
|
||||
By default Spack falls back to building from sources when the mirror is not available
|
||||
or when the package is simply not already available. To force Spack to only install
|
||||
prebuilt packages, you can use
|
||||
And now reinstall from the buildcache
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --use-buildcache only <package>
|
||||
$ spack buildcache install ninja
|
||||
==> buildcache spec(s) matching ninja
|
||||
==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.10.2/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-i4e5luour7jxdpc3bkiykd4imke3mkym.spack
|
||||
####################################################################################################################################### 100.0%
|
||||
==> Installing buildcache for spec ninja@1.10.2%gcc@9.3.0 arch=linux-ubuntu20.04-skylake
|
||||
gpgconf: socketdir is '/run/user/1000/gnupg'
|
||||
gpg: Signature made Tue 23 Mar 2021 10:16:29 PM MDT
|
||||
gpg: using RSA key E6DF6A8BD43208E4D6F392F23777740B7DBD643D
|
||||
gpg: Good signature from "spackuser (GPG created for Spack) <spackuser@noreply.users.github.com>" [ultimate]
|
||||
|
||||
|
||||
It worked! You've just completed a full example of creating a build cache with
|
||||
a spec of interest, adding it as a mirror, updating it's index, listing the contents,
|
||||
and finally, installing from it.
|
||||
|
||||
|
||||
Note that the above command is intended to install a particular package to a
|
||||
build cache you have created, and not to install a package from a build cache.
|
||||
For the latter, once a mirror is added, by default when you do ``spack install`` the ``--use-cache``
|
||||
flag is set, and you will install a package from a build cache if it is available.
|
||||
If you want to always use the cache, you can do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --cache-only <package>
|
||||
|
||||
For example, to combine all of the commands above to add the E4S build cache
|
||||
and then install from it exclusively, you would do:
|
||||
@@ -147,7 +153,7 @@ and then install from it exclusively, you would do:
|
||||
|
||||
$ spack mirror add E4S https://cache.e4s.io
|
||||
$ spack buildcache keys --install --trust
|
||||
$ spack install --use-buildache only <package>
|
||||
$ spack install --cache-only <package>
|
||||
|
||||
We use ``--install`` and ``--trust`` to say that we are installing keys to our
|
||||
keyring, and trusting all downloaded keys.
|
||||
|
||||
@@ -272,7 +272,7 @@ Selection of the target microarchitectures
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The options under the ``targets`` attribute control which targets are considered during a solve.
|
||||
Currently the options in this section are only configurable from the ``concretizer.yaml`` file
|
||||
Currently the options in this section are only configurable from the ``concretization.yaml`` file
|
||||
and there are no corresponding command line arguments to enable them for a single solve.
|
||||
|
||||
The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``.
|
||||
|
||||
@@ -36,7 +36,7 @@
|
||||
if not os.path.exists(link_name):
|
||||
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/_vendoring"))
|
||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback"))
|
||||
sys.path.append(os.path.abspath("_spack_root/lib/spack/"))
|
||||
|
||||
# Add the Spack bin directory to the path so that we can use its output in docs.
|
||||
@@ -74,16 +74,8 @@
|
||||
"--force", # Overwrite existing files
|
||||
"--no-toc", # Don't create a table of contents file
|
||||
"--output-dir=.", # Directory to place all output
|
||||
"--module-first", # emit module docs before submodule docs
|
||||
]
|
||||
sphinx_apidoc(
|
||||
apidoc_args
|
||||
+ [
|
||||
"_spack_root/lib/spack/spack",
|
||||
"_spack_root/lib/spack/spack/test/*.py",
|
||||
"_spack_root/lib/spack/spack/test/cmd/*.py",
|
||||
]
|
||||
)
|
||||
sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/spack"])
|
||||
sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/llnl"])
|
||||
|
||||
# Enable todo items
|
||||
@@ -208,14 +200,12 @@ def setup(sphinx):
|
||||
("py:class", "_frozen_importlib_external.SourceFileLoader"),
|
||||
("py:class", "clingo.Control"),
|
||||
("py:class", "six.moves.urllib.parse.ParseResult"),
|
||||
("py:class", "TextIO"),
|
||||
# Spack classes that are private and we don't want to expose
|
||||
("py:class", "spack.provider_index._IndexBase"),
|
||||
("py:class", "spack.repo._PrependFileLoader"),
|
||||
("py:class", "spack.build_systems._checks.BaseBuilder"),
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.VersionBase"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
|
||||
@@ -394,7 +394,7 @@ are indicated at the start of the path with ``~`` or ``~user``.
|
||||
Spack-specific variables
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack understands over a dozen special variables. These are:
|
||||
Spack understands several special variables. These are:
|
||||
|
||||
* ``$env``: name of the currently active :ref:`environment <environments>`
|
||||
* ``$spack``: path to the prefix of this Spack installation
|
||||
|
||||
@@ -175,11 +175,14 @@ Spec-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.spec`
|
||||
Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization
|
||||
Contains :class:`~spack.spec.Spec` and :class:`~spack.spec.SpecParser`.
|
||||
Also implements most of the logic for normalization and concretization
|
||||
of specs.
|
||||
|
||||
:mod:`spack.parser`
|
||||
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
||||
:mod:`spack.parse`
|
||||
Contains some base classes for implementing simple recursive descent
|
||||
parsers: :class:`~spack.parse.Parser` and :class:`~spack.parse.Lexer`.
|
||||
Used by :class:`~spack.spec.SpecParser`.
|
||||
|
||||
:mod:`spack.concretize`
|
||||
Contains :class:`~spack.concretize.Concretizer` implementation,
|
||||
@@ -232,7 +235,7 @@ Spack Subcommands
|
||||
Unit tests
|
||||
^^^^^^^^^^
|
||||
|
||||
``spack.test``
|
||||
:mod:`spack.test`
|
||||
Implements Spack's test suite. Add a module and put its name in
|
||||
the test suite in ``__init__.py`` to add more unit tests.
|
||||
|
||||
|
||||
@@ -67,6 +67,7 @@ or refer to the full manual below.
|
||||
build_settings
|
||||
environments
|
||||
containers
|
||||
monitoring
|
||||
mirrors
|
||||
module_file_support
|
||||
repositories
|
||||
@@ -77,6 +78,12 @@ or refer to the full manual below.
|
||||
extensions
|
||||
pipelines
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Research
|
||||
|
||||
analyze
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contributing
|
||||
|
||||
265
lib/spack/docs/monitoring.rst
Normal file
265
lib/spack/docs/monitoring.rst
Normal file
@@ -0,0 +1,265 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _monitoring:
|
||||
|
||||
==========
|
||||
Monitoring
|
||||
==========
|
||||
|
||||
You can use a `spack monitor <https://github.com/spack/spack-monitor>`_ "Spackmon"
|
||||
server to store a database of your packages, builds, and associated metadata
|
||||
for provenance, research, or some other kind of development. You should
|
||||
follow the instructions in the `spack monitor documentation <https://spack-monitor.readthedocs.org>`_
|
||||
to first create a server along with a username and token for yourself.
|
||||
You can then use this guide to interact with the server.
|
||||
|
||||
-------------------
|
||||
Analysis Monitoring
|
||||
-------------------
|
||||
|
||||
To read about how to monitor an analysis (meaning you want to send analysis results
|
||||
to a server) see :ref:`analyze_monitoring`.
|
||||
|
||||
---------------------
|
||||
Monitoring An Install
|
||||
---------------------
|
||||
|
||||
Since an install is typically when you build packages, we logically want
|
||||
to tell spack to monitor during this step. Let's start with an example
|
||||
where we want to monitor the install of hdf5. Unless you have disabled authentication
|
||||
for the server, we first want to export our spack monitor token and username to the environment:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
|
||||
By default, the host for your server is expected to be at ``http://127.0.0.1``
|
||||
with a prefix of ``ms1``, and if this is the case, you can simply add the
|
||||
``--monitor`` flag to the install command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor hdf5
|
||||
|
||||
|
||||
If you need to customize the host or the prefix, you can do that as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io hdf5
|
||||
|
||||
|
||||
As a precaution, we cut out early in the spack client if you have not provided
|
||||
authentication credentials. For example, if you run the command above without
|
||||
exporting your username or token, you'll see:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
==> Error: You are required to export SPACKMON_TOKEN and SPACKMON_USER
|
||||
|
||||
This extra check is to ensure that we don't start any builds,
|
||||
and then discover that you forgot to export your token. However, if
|
||||
your monitoring server has authentication disabled, you can tell this to
|
||||
the client to skip this step:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-disable-auth hdf5
|
||||
|
||||
If the service is not running, you'll cleanly exit early - the install will
|
||||
not continue if you've asked it to monitor and there is no service.
|
||||
For example, here is what you'll see if the monitoring service is not running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[Errno 111] Connection refused
|
||||
|
||||
|
||||
If you want to continue builds (and stop monitoring) you can set the ``--monitor-keep-going``
|
||||
flag.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-keep-going hdf5
|
||||
|
||||
This could mean that if a request fails, you only have partial or no data
|
||||
added to your monitoring database. This setting will not be applied to the
|
||||
first request to check if the server is running, but to subsequent requests.
|
||||
If you don't have a monitor server running and you want to build, simply
|
||||
don't provide the ``--monitor`` flag! Finally, if you want to provide one or
|
||||
more tags to your build, you can do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# Add one tag, "pizza"
|
||||
$ spack install --monitor --monitor-tags pizza hdf5
|
||||
|
||||
# Add two tags, "pizza" and "pasta"
|
||||
$ spack install --monitor --monitor-tags pizza,pasta hdf5
|
||||
|
||||
|
||||
----------------------------
|
||||
Monitoring with Containerize
|
||||
----------------------------
|
||||
|
||||
The same argument group is available to add to a containerize command.
|
||||
|
||||
^^^^^^
|
||||
Docker
|
||||
^^^^^^
|
||||
|
||||
To add monitoring to a Docker container recipe generation using the defaults,
|
||||
and assuming a monitor server running on localhost, you would
|
||||
start with a spack.yaml in your present working directory:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- samtools
|
||||
|
||||
And then do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# preview first
|
||||
spack containerize --monitor
|
||||
|
||||
# and then write to a Dockerfile
|
||||
spack containerize --monitor > Dockerfile
|
||||
|
||||
|
||||
The install command will be edited to include commands for enabling monitoring.
|
||||
However, getting secrets into the container for your monitor server is something
|
||||
that should be done carefully. Specifically you should:
|
||||
|
||||
- Never try to define secrets as ENV, ARG, or using ``--build-arg``
|
||||
- Do not try to get the secret into the container via a "temporary" file that you remove (it in fact will still exist in a layer)
|
||||
|
||||
Instead, it's recommended to use buildkit `as explained here <https://pythonspeed.com/articles/docker-build-secrets/>`_.
|
||||
You'll need to again export environment variables for your spack monitor server:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
And then use buildkit along with your build and identifying the name of the secret:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ DOCKER_BUILDKIT=1 docker build --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container .
|
||||
|
||||
The secrets are expected to come from your environment, and then will be temporarily mounted and available
|
||||
at ``/run/secrets/<name>``. If you forget to supply them (and authentication is required) the build
|
||||
will fail. If you need to build on your host (and interact with a spack monitor at localhost) you'll
|
||||
need to tell Docker to use the host network:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ DOCKER_BUILDKIT=1 docker build --network="host" --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container .
|
||||
|
||||
|
||||
^^^^^^^^^^^
|
||||
Singularity
|
||||
^^^^^^^^^^^
|
||||
|
||||
To add monitoring to a Singularity container build, the spack.yaml needs to
|
||||
be modified slightly to specify wanting a different format:
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- samtools
|
||||
container:
|
||||
format: singularity
|
||||
|
||||
|
||||
Again, generate the recipe:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# preview first
|
||||
$ spack containerize --monitor
|
||||
|
||||
# then write to a Singularity recipe
|
||||
$ spack containerize --monitor > Singularity
|
||||
|
||||
|
||||
Singularity doesn't have a direct way to define secrets at build time, so we have
|
||||
to do a bit of a manual command to add a file, source secrets in it, and remove it.
|
||||
Since Singularity doesn't have layers like Docker, deleting a file will truly
|
||||
remove it from the container and history. So let's say we have this file,
|
||||
``secrets.sh``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# secrets.sh
|
||||
export SPACKMON_USER=spack
|
||||
export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
|
||||
|
||||
We would then generate the Singularity recipe, and add a files section,
|
||||
a source of that file at the start of ``%post``, and **importantly**
|
||||
a removal of the final at the end of that same section.
|
||||
|
||||
.. code-block::
|
||||
|
||||
Bootstrap: docker
|
||||
From: spack/ubuntu-bionic:latest
|
||||
Stage: build
|
||||
|
||||
%files
|
||||
secrets.sh /opt/secrets.sh
|
||||
|
||||
%post
|
||||
. /opt/secrets.sh
|
||||
|
||||
# spack install commands are here
|
||||
...
|
||||
|
||||
# Don't forget to remove here!
|
||||
rm /opt/secrets.sh
|
||||
|
||||
|
||||
You can then build the container as your normally would.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ sudo singularity build container.sif Singularity
|
||||
|
||||
|
||||
------------------
|
||||
Monitoring Offline
|
||||
------------------
|
||||
|
||||
In the case that you want to save monitor results to your filesystem
|
||||
and then upload them later (perhaps you are in an environment where you don't
|
||||
have credentials or it isn't safe to use them) you can use the ``--monitor-save-local``
|
||||
flag.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-save-local hdf5
|
||||
|
||||
This will save results in a subfolder, "monitor" in your designated spack
|
||||
reports folder, which defaults to ``$HOME/.spack/reports/monitor``. When
|
||||
you are ready to upload them to a spack monitor server:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack monitor upload ~/.spack/reports/monitor
|
||||
|
||||
|
||||
You can choose the root directory of results as shown above, or a specific
|
||||
subdirectory. The command accepts other arguments to specify configuration
|
||||
for the monitor.
|
||||
@@ -34,15 +34,6 @@ ubiquitous in the scientific software community. Second, it's a modern
|
||||
language and has many powerful features to help make package writing
|
||||
easy.
|
||||
|
||||
.. warning::
|
||||
|
||||
As a general rule, packages should install the software *from source*.
|
||||
The only exception is for proprietary software (e.g., vendor compilers).
|
||||
|
||||
If a special build system needs to be added in order to support building
|
||||
a package from source, then the associated code and recipe need to be added
|
||||
first.
|
||||
|
||||
|
||||
.. _installation_procedure:
|
||||
|
||||
@@ -2406,15 +2397,13 @@ this because uninstalling the dependency would break the package.
|
||||
|
||||
``build``, ``link``, and ``run`` dependencies all affect the hash of Spack
|
||||
packages (along with ``sha256`` sums of patches and archives used to build the
|
||||
package, and a `canonical hash <https://github.com/spack/spack/pull/28156>`_ of
|
||||
package, and a [canonical hash](https://github.com/spack/spack/pull/28156) of
|
||||
the ``package.py`` recipes). ``test`` dependencies do not affect the package
|
||||
hash, as they are only used to construct a test environment *after* building and
|
||||
installing a given package installation. Older versions of Spack did not include
|
||||
build dependencies in the hash, but this has been
|
||||
`fixed <https://github.com/spack/spack/pull/28504>`_ as of |Spack v0.18|_.
|
||||
|
||||
.. |Spack v0.18| replace:: Spack ``v0.18``
|
||||
.. _Spack v0.18: https://github.com/spack/spack/releases/tag/v0.18.0
|
||||
[fixed](https://github.com/spack/spack/pull/28504) as of [Spack
|
||||
``v0.18``](https://github.com/spack/spack/releases/tag/v0.18.0)
|
||||
|
||||
If the dependency type is not specified, Spack uses a default of
|
||||
``('build', 'link')``. This is the common case for compiler languages.
|
||||
@@ -3604,70 +3593,6 @@ In the example above ``Cp2k`` inherits all the conflicts and variants that ``Cud
|
||||
|
||||
.. _install-environment:
|
||||
|
||||
--------------------------------
|
||||
Package Inheritance
|
||||
--------------------------------
|
||||
|
||||
Spack packages are Python classes, and you can use inheritance with them just as you can
|
||||
with any Python class. This is common when you have your own package :ref:`repository
|
||||
<repositories>` with packages that extend Spack's ``builtin`` packages.
|
||||
|
||||
You can extend a ``builtin`` package like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from spack.pkg.builtin.mpich import Mpich
|
||||
|
||||
class MyPackage(Mpich):
|
||||
version("1.0", "0209444070d9c8af9b62c94095a217e3bc6843692d1e3fdc1ff5371e03aac47c")
|
||||
version("2.0", "5dda192154047d6296ba14a4ab2d869c6926fd7f44dce8ce94f63aae2e359c5b")
|
||||
|
||||
Every repository registered with Spack ends up in a submodule of ``spack.pkg`` with a
|
||||
name corresponding to its :ref:`namespace <namespaces>`. So, if you have a different
|
||||
repository with namespace ``myrepo`` you want to import packages from , you might write:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from spack.pkg.myrepo.my_package import MyPackage
|
||||
|
||||
class NewPackage(MyPackage):
|
||||
version("3.0", "08721a102fefcea2ae4add8c9cc548df77e9224f5385ad0872a9150fdd26a415")
|
||||
version("4.0", "9cc39dd33dd4227bb82301d285437588d705290846d22ab6b8791c7e631ce385")
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``disinherit``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
When you inherit from a package in Spack, you inherit all the metadata from its
|
||||
directives, including ``version``, ``provides``, ``depends_on``, ``conflicts``, etc. For
|
||||
example, ``NewPackage`` above will have four versions: ``1.0`` and ``2.0`` inherited
|
||||
from ``MyPackage``, as well as, ``3.0``, and ``4.0`` defined in ``NewPackage``.
|
||||
|
||||
If you do not want your package to define all the same things as its base class, you can
|
||||
use the ``disinherit`` directive to start fresh in your subclass:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from spack.pkg.myrepo.my_package import MyPackage
|
||||
|
||||
class NewerPackage(MyPackage):
|
||||
disinherit("versions") # don't inherit any versions from MyPackage
|
||||
version("5.0", "08721a102fefcea2ae4add8c9cc548df77e9224f5385ad0872a9150fdd26a415")
|
||||
version("6.0", "9cc39dd33dd4227bb82301d285437588d705290846d22ab6b8791c7e631ce385")
|
||||
|
||||
Now, ``NewerPackage`` will have **only** versions ``5.0`` and ``6.0``, and will not
|
||||
inherit ``1.0`` or ``2.0`` from ``MyPackage``. You can ``disinherit`` many different
|
||||
properties from base packages. The full list of options is:
|
||||
|
||||
* ``conflicts``
|
||||
* ``dependencies``
|
||||
* ``extendees``
|
||||
* ``patches``
|
||||
* ``provided``
|
||||
* ``resources``
|
||||
* ``variants``
|
||||
* ``versions``
|
||||
|
||||
-----------------------
|
||||
The build environment
|
||||
-----------------------
|
||||
|
||||
@@ -184,48 +184,13 @@ simply run the following commands:
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate myenv
|
||||
$ spack concretize --fresh --force
|
||||
$ spack concretize --force
|
||||
$ spack install
|
||||
|
||||
The ``--fresh`` flag tells Spack to use the latest version of every package
|
||||
where possible instead of trying to optimize for reuse of existing installed
|
||||
packages.
|
||||
|
||||
The ``--force`` flag in addition tells Spack to overwrite its previous
|
||||
concretization decisions, allowing you to choose a new version of Python.
|
||||
If any of the new packages like Bash are already installed, ``spack install``
|
||||
won't re-install them, it will keep the symlinks in place.
|
||||
|
||||
-----------------------------------
|
||||
Updating & Cleaning Up Old Packages
|
||||
-----------------------------------
|
||||
|
||||
If you're looking to mimic the behavior of Homebrew, you may also want to
|
||||
clean up out-of-date packages from your environment after an upgrade. To
|
||||
upgrade your entire software stack within an environment and clean up old
|
||||
package versions, simply run the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate myenv
|
||||
$ spack mark -i --all
|
||||
$ spack concretize --fresh --force
|
||||
$ spack install
|
||||
$ spack gc
|
||||
|
||||
Running ``spack mark -i --all`` tells Spack to mark all of the existing
|
||||
packages within an environment as "implicitly" installed. This tells
|
||||
spack's garbage collection system that these packages should be cleaned up.
|
||||
|
||||
Don't worry however, this will not remove your entire environment.
|
||||
Running ``spack install`` will reexamine your spack environment after
|
||||
a fresh concretization and will re-mark any packages that should remain
|
||||
installed as "explicitly" installed.
|
||||
|
||||
**Note:** if you use multiple spack environments you should re-run ``spack install``
|
||||
in each of your environments prior to running ``spack gc`` to prevent spack
|
||||
from uninstalling any shared packages that are no longer required by the
|
||||
environment you just upgraded.
|
||||
The ``--force`` flag tells Spack to overwrite its previous concretization
|
||||
decisions, allowing you to choose a new version of Python. If any of the new
|
||||
packages like Bash are already installed, ``spack install`` won't re-install
|
||||
them, it will keep the symlinks in place.
|
||||
|
||||
--------------
|
||||
Uninstallation
|
||||
|
||||
@@ -91,8 +91,6 @@ packages and use the first valid file:
|
||||
to eventually support URLs in ``repos.yaml``, so that you can easily
|
||||
point to remote package repositories, but that is not yet implemented.
|
||||
|
||||
.. _namespaces:
|
||||
|
||||
---------------------
|
||||
Namespaces
|
||||
---------------------
|
||||
@@ -428,3 +426,36 @@ By path:
|
||||
$ spack repo list
|
||||
==> 1 package repository.
|
||||
builtin ~/spack/var/spack/repos/builtin
|
||||
|
||||
--------------------------------
|
||||
Repo namespaces and Python
|
||||
--------------------------------
|
||||
|
||||
You may have noticed that namespace notation for repositories is similar
|
||||
to the notation for namespaces in Python. As it turns out, you *can*
|
||||
treat Spack repositories like Python packages; this is how they are
|
||||
implemented.
|
||||
|
||||
You could, for example, extend a ``builtin`` package in your own
|
||||
repository:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from spack.pkg.builtin.mpich import Mpich
|
||||
|
||||
class MyPackage(Mpich):
|
||||
...
|
||||
|
||||
Spack repo namespaces are actually Python namespaces tacked on under
|
||||
``spack.pkg``. The search semantics of ``repos.yaml`` are actually
|
||||
implemented using Python's built-in `sys.path
|
||||
<https://docs.python.org/2/library/sys.html#sys.path>`_ search. The
|
||||
:py:mod:`spack.repo` module implements a custom `Python importer
|
||||
<https://docs.python.org/2/library/imp.html>`_.
|
||||
|
||||
.. warning::
|
||||
|
||||
The mechanism for extending packages is not yet extensively tested,
|
||||
and extending packages across repositories imposes inter-repo
|
||||
dependencies, which may be hard to manage. Use this feature at your
|
||||
own risk, but let us know if you have a use case for it.
|
||||
|
||||
57
lib/spack/external/__init__.py
vendored
57
lib/spack/external/__init__.py
vendored
@@ -11,14 +11,25 @@
|
||||
|
||||
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
|
||||
* Usage: dependency of macholib
|
||||
* Version: 0.17.3
|
||||
* Version: 0.17.2
|
||||
|
||||
archspec
|
||||
--------
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.0 (commit e44bad9c7b6defac73696f64078b2fe634719b62)
|
||||
* Version: 0.2.0 (commit 77640e572725ad97f18e63a04857155752ace045)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/argparse
|
||||
* Usage: We include our own version to be Python 3.X compatible.
|
||||
* Version: 1.4.0
|
||||
* Note: This package has been slightly modified to improve
|
||||
error message formatting. See the following commit if the
|
||||
vendored copy ever needs to be updated again:
|
||||
https://github.com/spack/spack/pull/6786/commits/dfcef577b77249106ea4e4c69a6cd9e64fa6c418
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
@@ -41,7 +52,7 @@
|
||||
|
||||
* Homepage: https://github.com/python-attrs/attrs
|
||||
* Usage: Needed by jsonschema.
|
||||
* Version: 22.1.0
|
||||
* Version: 21.2.0 (83d3cd70f90a3f4d19ee8b508e58d1c58821c0ad)
|
||||
|
||||
ctest_log_parser
|
||||
----------------
|
||||
@@ -56,14 +67,21 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/distro
|
||||
* Usage: Provides a more stable linux distribution detection.
|
||||
* Version: 1.8.0
|
||||
* Version: 1.6.0 (64946a1e2a9ff529047070657728600e006c99ff)
|
||||
* Note: Last version supporting Python 2.7
|
||||
|
||||
functools32
|
||||
-----------
|
||||
* Homepage: https://github.com/MiCHiLU/python-functools32
|
||||
* Usage: Needed by jsonschema when using Python 2.7.
|
||||
* Version: 3.2.3-2
|
||||
|
||||
jinja2
|
||||
------
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/Jinja2
|
||||
* Usage: A modern and designer-friendly templating language for Python.
|
||||
* Version: 3.0.3 (last version supporting Python 3.6)
|
||||
* Version: 2.11.3 (last version supporting Python 2.7)
|
||||
|
||||
jsonschema
|
||||
----------
|
||||
@@ -78,21 +96,44 @@
|
||||
|
||||
* Homepage: https://macholib.readthedocs.io/en/latest/index.html#
|
||||
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
|
||||
* Version: 1.16.2
|
||||
* Version: 1.15.2
|
||||
|
||||
markupsafe
|
||||
----------
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/MarkupSafe
|
||||
* Usage: Implements a XML/HTML/XHTML Markup safe string for Python.
|
||||
* Version: 2.0.1 (last version supporting Python 3.6)
|
||||
* Version: 1.1.1 (last version supporting Python 2.7)
|
||||
|
||||
py
|
||||
--
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/py
|
||||
* Usage: Needed by pytest. Library with cross-python path,
|
||||
ini-parsing, io, code, and log facilities.
|
||||
* Version: 1.4.34 (last version supporting Python 2.6)
|
||||
* Note: This packages has been modified:
|
||||
* https://github.com/pytest-dev/py/pull/186 was backported
|
||||
|
||||
pyrsistent
|
||||
----------
|
||||
|
||||
* Homepage: http://github.com/tobgu/pyrsistent/
|
||||
* Usage: Needed by `jsonschema`
|
||||
* Version: 0.18.0
|
||||
* Version: 0.16.1 (last version supporting Python 2.7)
|
||||
* Note: We only include the parts needed for `jsonschema`.
|
||||
|
||||
pytest
|
||||
------
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/pytest
|
||||
* Usage: Testing framework used by Spack.
|
||||
* Version: 3.2.5 (last version supporting Python 2.6)
|
||||
* Note: This package has been slightly modified:
|
||||
* We improve Python 2.6 compatibility. See:
|
||||
https://github.com/spack/spack/pull/6801.
|
||||
* We have patched pytest not to depend on setuptools. See:
|
||||
https://github.com/spack/spack/pull/15612
|
||||
|
||||
ruamel.yaml
|
||||
------
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
__version__ = '0.18.0'
|
||||
@@ -1 +0,0 @@
|
||||
from _pyrsistent_version import *
|
||||
1
lib/spack/external/_vendoring/altgraph.pyi
vendored
1
lib/spack/external/_vendoring/altgraph.pyi
vendored
@@ -1 +0,0 @@
|
||||
from altgraph import *
|
||||
18
lib/spack/external/_vendoring/altgraph/LICENSE
vendored
18
lib/spack/external/_vendoring/altgraph/LICENSE
vendored
@@ -1,18 +0,0 @@
|
||||
Copyright (c) 2004 Istvan Albert unless otherwise noted.
|
||||
Copyright (c) 2006-2010 Bob Ippolito
|
||||
Copyright (2) 2010-2020 Ronald Oussoren, et. al.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to
|
||||
deal in the Software without restriction, including without limitation the
|
||||
rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
||||
IN THE SOFTWARE.
|
||||
486
lib/spack/external/_vendoring/attr/__init__.pyi
vendored
486
lib/spack/external/_vendoring/attr/__init__.pyi
vendored
@@ -1,486 +0,0 @@
|
||||
import sys
|
||||
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
ClassVar,
|
||||
Dict,
|
||||
Generic,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Protocol,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
# `import X as X` is required to make these public
|
||||
from . import converters as converters
|
||||
from . import exceptions as exceptions
|
||||
from . import filters as filters
|
||||
from . import setters as setters
|
||||
from . import validators as validators
|
||||
from ._cmp import cmp_using as cmp_using
|
||||
from ._version_info import VersionInfo
|
||||
|
||||
__version__: str
|
||||
__version_info__: VersionInfo
|
||||
__title__: str
|
||||
__description__: str
|
||||
__url__: str
|
||||
__uri__: str
|
||||
__author__: str
|
||||
__email__: str
|
||||
__license__: str
|
||||
__copyright__: str
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_C = TypeVar("_C", bound=type)
|
||||
|
||||
_EqOrderType = Union[bool, Callable[[Any], Any]]
|
||||
_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
|
||||
_ConverterType = Callable[[Any], Any]
|
||||
_FilterType = Callable[[Attribute[_T], _T], bool]
|
||||
_ReprType = Callable[[Any], str]
|
||||
_ReprArgType = Union[bool, _ReprType]
|
||||
_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any]
|
||||
_OnSetAttrArgType = Union[
|
||||
_OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
|
||||
]
|
||||
_FieldTransformer = Callable[
|
||||
[type, List[Attribute[Any]]], List[Attribute[Any]]
|
||||
]
|
||||
# FIXME: in reality, if multiple validators are passed they must be in a list
|
||||
# or tuple, but those are invariant and so would prevent subtypes of
|
||||
# _ValidatorType from working when passed in a list or tuple.
|
||||
_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
|
||||
|
||||
# A protocol to be able to statically accept an attrs class.
|
||||
class AttrsInstance(Protocol):
|
||||
__attrs_attrs__: ClassVar[Any]
|
||||
|
||||
# _make --
|
||||
|
||||
NOTHING: object
|
||||
|
||||
# NOTE: Factory lies about its return type to make this possible:
|
||||
# `x: List[int] # = Factory(list)`
|
||||
# Work around mypy issue #4554 in the common case by using an overload.
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Literal
|
||||
@overload
|
||||
def Factory(factory: Callable[[], _T]) -> _T: ...
|
||||
@overload
|
||||
def Factory(
|
||||
factory: Callable[[Any], _T],
|
||||
takes_self: Literal[True],
|
||||
) -> _T: ...
|
||||
@overload
|
||||
def Factory(
|
||||
factory: Callable[[], _T],
|
||||
takes_self: Literal[False],
|
||||
) -> _T: ...
|
||||
|
||||
else:
|
||||
@overload
|
||||
def Factory(factory: Callable[[], _T]) -> _T: ...
|
||||
@overload
|
||||
def Factory(
|
||||
factory: Union[Callable[[Any], _T], Callable[[], _T]],
|
||||
takes_self: bool = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# Static type inference support via __dataclass_transform__ implemented as per:
|
||||
# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md
|
||||
# This annotation must be applied to all overloads of "define" and "attrs"
|
||||
#
|
||||
# NOTE: This is a typing construct and does not exist at runtime. Extensions
|
||||
# wrapping attrs decorators should declare a separate __dataclass_transform__
|
||||
# signature in the extension module using the specification linked above to
|
||||
# provide pyright support.
|
||||
def __dataclass_transform__(
|
||||
*,
|
||||
eq_default: bool = True,
|
||||
order_default: bool = False,
|
||||
kw_only_default: bool = False,
|
||||
field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
|
||||
) -> Callable[[_T], _T]: ...
|
||||
|
||||
class Attribute(Generic[_T]):
|
||||
name: str
|
||||
default: Optional[_T]
|
||||
validator: Optional[_ValidatorType[_T]]
|
||||
repr: _ReprArgType
|
||||
cmp: _EqOrderType
|
||||
eq: _EqOrderType
|
||||
order: _EqOrderType
|
||||
hash: Optional[bool]
|
||||
init: bool
|
||||
converter: Optional[_ConverterType]
|
||||
metadata: Dict[Any, Any]
|
||||
type: Optional[Type[_T]]
|
||||
kw_only: bool
|
||||
on_setattr: _OnSetAttrType
|
||||
def evolve(self, **changes: Any) -> "Attribute[Any]": ...
|
||||
|
||||
# NOTE: We had several choices for the annotation to use for type arg:
|
||||
# 1) Type[_T]
|
||||
# - Pros: Handles simple cases correctly
|
||||
# - Cons: Might produce less informative errors in the case of conflicting
|
||||
# TypeVars e.g. `attr.ib(default='bad', type=int)`
|
||||
# 2) Callable[..., _T]
|
||||
# - Pros: Better error messages than #1 for conflicting TypeVars
|
||||
# - Cons: Terrible error messages for validator checks.
|
||||
# e.g. attr.ib(type=int, validator=validate_str)
|
||||
# -> error: Cannot infer function type argument
|
||||
# 3) type (and do all of the work in the mypy plugin)
|
||||
# - Pros: Simple here, and we could customize the plugin with our own errors.
|
||||
# - Cons: Would need to write mypy plugin code to handle all the cases.
|
||||
# We chose option #1.
|
||||
|
||||
# `attr` lies about its return type to make the following possible:
|
||||
# attr() -> Any
|
||||
# attr(8) -> int
|
||||
# attr(validator=<some callable>) -> Whatever the callable expects.
|
||||
# This makes this type of assignments possible:
|
||||
# x: int = attr(8)
|
||||
#
|
||||
# This form catches explicit None or no default but with no other arguments
|
||||
# returns Any.
|
||||
@overload
|
||||
def attrib(
|
||||
default: None = ...,
|
||||
validator: None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: None = ...,
|
||||
converter: None = ...,
|
||||
factory: None = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
|
||||
# This form catches an explicit None or no default and infers the type from the
|
||||
# other arguments.
|
||||
@overload
|
||||
def attrib(
|
||||
default: None = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: Optional[Type[_T]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form catches an explicit default argument.
|
||||
@overload
|
||||
def attrib(
|
||||
default: _T,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: Optional[Type[_T]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form covers type=non-Type: e.g. forward references (str), Any
|
||||
@overload
|
||||
def attrib(
|
||||
default: Optional[_T] = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
type: object = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: None = ...,
|
||||
validator: None = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: None = ...,
|
||||
factory: None = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[bool] = ...,
|
||||
order: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
|
||||
# This form catches an explicit None or no default and infers the type from the
|
||||
# other arguments.
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: None = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form catches an explicit default argument.
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: _T,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> _T: ...
|
||||
|
||||
# This form covers type=non-Type: e.g. forward references (str), Any
|
||||
@overload
|
||||
def field(
|
||||
*,
|
||||
default: Optional[_T] = ...,
|
||||
validator: Optional[_ValidatorArgType[_T]] = ...,
|
||||
repr: _ReprArgType = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
metadata: Optional[Mapping[Any, Any]] = ...,
|
||||
converter: Optional[_ConverterType] = ...,
|
||||
factory: Optional[Callable[[], _T]] = ...,
|
||||
kw_only: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
) -> Any: ...
|
||||
@overload
|
||||
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
|
||||
def attrs(
|
||||
maybe_cls: _C,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr_ns: Optional[str] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
auto_detect: bool = ...,
|
||||
collect_by_mro: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> _C: ...
|
||||
@overload
|
||||
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
|
||||
def attrs(
|
||||
maybe_cls: None = ...,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr_ns: Optional[str] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
auto_detect: bool = ...,
|
||||
collect_by_mro: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> Callable[[_C], _C]: ...
|
||||
@overload
|
||||
@__dataclass_transform__(field_descriptors=(attrib, field))
|
||||
def define(
|
||||
maybe_cls: _C,
|
||||
*,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr: bool = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[bool] = ...,
|
||||
order: Optional[bool] = ...,
|
||||
auto_detect: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> _C: ...
|
||||
@overload
|
||||
@__dataclass_transform__(field_descriptors=(attrib, field))
|
||||
def define(
|
||||
maybe_cls: None = ...,
|
||||
*,
|
||||
these: Optional[Dict[str, Any]] = ...,
|
||||
repr: bool = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[bool] = ...,
|
||||
order: Optional[bool] = ...,
|
||||
auto_detect: bool = ...,
|
||||
getstate_setstate: Optional[bool] = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
match_args: bool = ...,
|
||||
) -> Callable[[_C], _C]: ...
|
||||
|
||||
mutable = define
|
||||
frozen = define # they differ only in their defaults
|
||||
|
||||
def fields(cls: Type[AttrsInstance]) -> Any: ...
|
||||
def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ...
|
||||
def validate(inst: AttrsInstance) -> None: ...
|
||||
def resolve_types(
|
||||
cls: _C,
|
||||
globalns: Optional[Dict[str, Any]] = ...,
|
||||
localns: Optional[Dict[str, Any]] = ...,
|
||||
attribs: Optional[List[Attribute[Any]]] = ...,
|
||||
) -> _C: ...
|
||||
|
||||
# TODO: add support for returning a proper attrs class from the mypy plugin
|
||||
# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
|
||||
# [attr.ib()])` is valid
|
||||
def make_class(
|
||||
name: str,
|
||||
attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
|
||||
bases: Tuple[type, ...] = ...,
|
||||
repr_ns: Optional[str] = ...,
|
||||
repr: bool = ...,
|
||||
cmp: Optional[_EqOrderType] = ...,
|
||||
hash: Optional[bool] = ...,
|
||||
init: bool = ...,
|
||||
slots: bool = ...,
|
||||
frozen: bool = ...,
|
||||
weakref_slot: bool = ...,
|
||||
str: bool = ...,
|
||||
auto_attribs: bool = ...,
|
||||
kw_only: bool = ...,
|
||||
cache_hash: bool = ...,
|
||||
auto_exc: bool = ...,
|
||||
eq: Optional[_EqOrderType] = ...,
|
||||
order: Optional[_EqOrderType] = ...,
|
||||
collect_by_mro: bool = ...,
|
||||
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
||||
field_transformer: Optional[_FieldTransformer] = ...,
|
||||
) -> type: ...
|
||||
|
||||
# _funcs --
|
||||
|
||||
# TODO: add support for returning TypedDict from the mypy plugin
|
||||
# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
|
||||
# these:
|
||||
# https://github.com/python/mypy/issues/4236
|
||||
# https://github.com/python/typing/issues/253
|
||||
# XXX: remember to fix attrs.asdict/astuple too!
|
||||
def asdict(
|
||||
inst: AttrsInstance,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
dict_factory: Type[Mapping[Any, Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
value_serializer: Optional[
|
||||
Callable[[type, Attribute[Any], Any], Any]
|
||||
] = ...,
|
||||
tuple_keys: Optional[bool] = ...,
|
||||
) -> Dict[str, Any]: ...
|
||||
|
||||
# TODO: add support for returning NamedTuple from the mypy plugin
|
||||
def astuple(
|
||||
inst: AttrsInstance,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
tuple_factory: Type[Sequence[Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
) -> Tuple[Any, ...]: ...
|
||||
def has(cls: type) -> bool: ...
|
||||
def assoc(inst: _T, **changes: Any) -> _T: ...
|
||||
def evolve(inst: _T, **changes: Any) -> _T: ...
|
||||
|
||||
# _config --
|
||||
|
||||
def set_run_validators(run: bool) -> None: ...
|
||||
def get_run_validators() -> bool: ...
|
||||
|
||||
# aliases --
|
||||
|
||||
s = attributes = attrs
|
||||
ib = attr = attrib
|
||||
dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
|
||||
13
lib/spack/external/_vendoring/attr/_cmp.pyi
vendored
13
lib/spack/external/_vendoring/attr/_cmp.pyi
vendored
@@ -1,13 +0,0 @@
|
||||
from typing import Any, Callable, Optional, Type
|
||||
|
||||
_CompareWithType = Callable[[Any, Any], bool]
|
||||
|
||||
def cmp_using(
|
||||
eq: Optional[_CompareWithType],
|
||||
lt: Optional[_CompareWithType],
|
||||
le: Optional[_CompareWithType],
|
||||
gt: Optional[_CompareWithType],
|
||||
ge: Optional[_CompareWithType],
|
||||
require_same_type: bool,
|
||||
class_name: str,
|
||||
) -> Type: ...
|
||||
185
lib/spack/external/_vendoring/attr/_compat.py
vendored
185
lib/spack/external/_vendoring/attr/_compat.py
vendored
@@ -1,185 +0,0 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
|
||||
import inspect
|
||||
import platform
|
||||
import sys
|
||||
import threading
|
||||
import types
|
||||
import warnings
|
||||
|
||||
from collections.abc import Mapping, Sequence # noqa
|
||||
|
||||
|
||||
PYPY = platform.python_implementation() == "PyPy"
|
||||
PY36 = sys.version_info[:2] >= (3, 6)
|
||||
HAS_F_STRINGS = PY36
|
||||
PY310 = sys.version_info[:2] >= (3, 10)
|
||||
|
||||
|
||||
if PYPY or PY36:
|
||||
ordered_dict = dict
|
||||
else:
|
||||
from collections import OrderedDict
|
||||
|
||||
ordered_dict = OrderedDict
|
||||
|
||||
|
||||
def just_warn(*args, **kw):
|
||||
warnings.warn(
|
||||
"Running interpreter doesn't sufficiently support code object "
|
||||
"introspection. Some features like bare super() or accessing "
|
||||
"__class__ will not work with slotted classes.",
|
||||
RuntimeWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
|
||||
class _AnnotationExtractor:
|
||||
"""
|
||||
Extract type annotations from a callable, returning None whenever there
|
||||
is none.
|
||||
"""
|
||||
|
||||
__slots__ = ["sig"]
|
||||
|
||||
def __init__(self, callable):
|
||||
try:
|
||||
self.sig = inspect.signature(callable)
|
||||
except (ValueError, TypeError): # inspect failed
|
||||
self.sig = None
|
||||
|
||||
def get_first_param_type(self):
|
||||
"""
|
||||
Return the type annotation of the first argument if it's not empty.
|
||||
"""
|
||||
if not self.sig:
|
||||
return None
|
||||
|
||||
params = list(self.sig.parameters.values())
|
||||
if params and params[0].annotation is not inspect.Parameter.empty:
|
||||
return params[0].annotation
|
||||
|
||||
return None
|
||||
|
||||
def get_return_type(self):
|
||||
"""
|
||||
Return the return type if it's not empty.
|
||||
"""
|
||||
if (
|
||||
self.sig
|
||||
and self.sig.return_annotation is not inspect.Signature.empty
|
||||
):
|
||||
return self.sig.return_annotation
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def make_set_closure_cell():
|
||||
"""Return a function of two arguments (cell, value) which sets
|
||||
the value stored in the closure cell `cell` to `value`.
|
||||
"""
|
||||
# pypy makes this easy. (It also supports the logic below, but
|
||||
# why not do the easy/fast thing?)
|
||||
if PYPY:
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
cell.__setstate__((value,))
|
||||
|
||||
return set_closure_cell
|
||||
|
||||
# Otherwise gotta do it the hard way.
|
||||
|
||||
# Create a function that will set its first cellvar to `value`.
|
||||
def set_first_cellvar_to(value):
|
||||
x = value
|
||||
return
|
||||
|
||||
# This function will be eliminated as dead code, but
|
||||
# not before its reference to `x` forces `x` to be
|
||||
# represented as a closure cell rather than a local.
|
||||
def force_x_to_be_a_cell(): # pragma: no cover
|
||||
return x
|
||||
|
||||
try:
|
||||
# Extract the code object and make sure our assumptions about
|
||||
# the closure behavior are correct.
|
||||
co = set_first_cellvar_to.__code__
|
||||
if co.co_cellvars != ("x",) or co.co_freevars != ():
|
||||
raise AssertionError # pragma: no cover
|
||||
|
||||
# Convert this code object to a code object that sets the
|
||||
# function's first _freevar_ (not cellvar) to the argument.
|
||||
if sys.version_info >= (3, 8):
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
cell.cell_contents = value
|
||||
|
||||
else:
|
||||
args = [co.co_argcount]
|
||||
args.append(co.co_kwonlyargcount)
|
||||
args.extend(
|
||||
[
|
||||
co.co_nlocals,
|
||||
co.co_stacksize,
|
||||
co.co_flags,
|
||||
co.co_code,
|
||||
co.co_consts,
|
||||
co.co_names,
|
||||
co.co_varnames,
|
||||
co.co_filename,
|
||||
co.co_name,
|
||||
co.co_firstlineno,
|
||||
co.co_lnotab,
|
||||
# These two arguments are reversed:
|
||||
co.co_cellvars,
|
||||
co.co_freevars,
|
||||
]
|
||||
)
|
||||
set_first_freevar_code = types.CodeType(*args)
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
# Create a function using the set_first_freevar_code,
|
||||
# whose first closure cell is `cell`. Calling it will
|
||||
# change the value of that cell.
|
||||
setter = types.FunctionType(
|
||||
set_first_freevar_code, {}, "setter", (), (cell,)
|
||||
)
|
||||
# And call it to set the cell.
|
||||
setter(value)
|
||||
|
||||
# Make sure it works on this interpreter:
|
||||
def make_func_with_cell():
|
||||
x = None
|
||||
|
||||
def func():
|
||||
return x # pragma: no cover
|
||||
|
||||
return func
|
||||
|
||||
cell = make_func_with_cell().__closure__[0]
|
||||
set_closure_cell(cell, 100)
|
||||
if cell.cell_contents != 100:
|
||||
raise AssertionError # pragma: no cover
|
||||
|
||||
except Exception:
|
||||
return just_warn
|
||||
else:
|
||||
return set_closure_cell
|
||||
|
||||
|
||||
set_closure_cell = make_set_closure_cell()
|
||||
|
||||
# Thread-local global to track attrs instances which are already being repr'd.
|
||||
# This is needed because there is no other (thread-safe) way to pass info
|
||||
# about the instances that are already being repr'd through the call stack
|
||||
# in order to ensure we don't perform infinite recursion.
|
||||
#
|
||||
# For instance, if an instance contains a dict which contains that instance,
|
||||
# we need to know that we're already repr'ing the outside instance from within
|
||||
# the dict's repr() call.
|
||||
#
|
||||
# This lives here rather than in _make.py so that the functions in _make.py
|
||||
# don't have a direct reference to the thread-local in their globals dict.
|
||||
# If they have such a reference, it breaks cloudpickle.
|
||||
repr_context = threading.local()
|
||||
@@ -1,9 +0,0 @@
|
||||
class VersionInfo:
|
||||
@property
|
||||
def year(self) -> int: ...
|
||||
@property
|
||||
def minor(self) -> int: ...
|
||||
@property
|
||||
def micro(self) -> int: ...
|
||||
@property
|
||||
def releaselevel(self) -> str: ...
|
||||
@@ -1,13 +0,0 @@
|
||||
from typing import Callable, Optional, TypeVar, overload
|
||||
|
||||
from . import _ConverterType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
def pipe(*validators: _ConverterType) -> _ConverterType: ...
|
||||
def optional(converter: _ConverterType) -> _ConverterType: ...
|
||||
@overload
|
||||
def default_if_none(default: _T) -> _ConverterType: ...
|
||||
@overload
|
||||
def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
|
||||
def to_bool(val: str) -> bool: ...
|
||||
@@ -1,17 +0,0 @@
|
||||
from typing import Any
|
||||
|
||||
class FrozenError(AttributeError):
|
||||
msg: str = ...
|
||||
|
||||
class FrozenInstanceError(FrozenError): ...
|
||||
class FrozenAttributeError(FrozenError): ...
|
||||
class AttrsAttributeNotFoundError(ValueError): ...
|
||||
class NotAnAttrsClassError(ValueError): ...
|
||||
class DefaultAlreadySetError(RuntimeError): ...
|
||||
class UnannotatedAttributeError(RuntimeError): ...
|
||||
class PythonTooOldError(RuntimeError): ...
|
||||
|
||||
class NotCallableError(TypeError):
|
||||
msg: str = ...
|
||||
value: Any = ...
|
||||
def __init__(self, msg: str, value: Any) -> None: ...
|
||||
@@ -1,6 +0,0 @@
|
||||
from typing import Any, Union
|
||||
|
||||
from . import Attribute, _FilterType
|
||||
|
||||
def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
|
||||
def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ...
|
||||
19
lib/spack/external/_vendoring/attr/setters.pyi
vendored
19
lib/spack/external/_vendoring/attr/setters.pyi
vendored
@@ -1,19 +0,0 @@
|
||||
from typing import Any, NewType, NoReturn, TypeVar, cast
|
||||
|
||||
from . import Attribute, _OnSetAttrType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
def frozen(
|
||||
instance: Any, attribute: Attribute[Any], new_value: Any
|
||||
) -> NoReturn: ...
|
||||
def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
|
||||
def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
|
||||
|
||||
# convert is allowed to return Any, because they can be chained using pipe.
|
||||
def convert(
|
||||
instance: Any, attribute: Attribute[Any], new_value: Any
|
||||
) -> Any: ...
|
||||
|
||||
_NoOpType = NewType("_NoOpType", object)
|
||||
NO_OP: _NoOpType
|
||||
@@ -1,80 +0,0 @@
|
||||
from typing import (
|
||||
Any,
|
||||
AnyStr,
|
||||
Callable,
|
||||
Container,
|
||||
ContextManager,
|
||||
Iterable,
|
||||
List,
|
||||
Mapping,
|
||||
Match,
|
||||
Optional,
|
||||
Pattern,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
overload,
|
||||
)
|
||||
|
||||
from . import _ValidatorType
|
||||
from . import _ValidatorArgType
|
||||
|
||||
_T = TypeVar("_T")
|
||||
_T1 = TypeVar("_T1")
|
||||
_T2 = TypeVar("_T2")
|
||||
_T3 = TypeVar("_T3")
|
||||
_I = TypeVar("_I", bound=Iterable)
|
||||
_K = TypeVar("_K")
|
||||
_V = TypeVar("_V")
|
||||
_M = TypeVar("_M", bound=Mapping)
|
||||
|
||||
def set_disabled(run: bool) -> None: ...
|
||||
def get_disabled() -> bool: ...
|
||||
def disabled() -> ContextManager[None]: ...
|
||||
|
||||
# To be more precise on instance_of use some overloads.
|
||||
# If there are more than 3 items in the tuple then we fall back to Any
|
||||
@overload
|
||||
def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ...
|
||||
@overload
|
||||
def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ...
|
||||
@overload
|
||||
def instance_of(
|
||||
type: Tuple[Type[_T1], Type[_T2]]
|
||||
) -> _ValidatorType[Union[_T1, _T2]]: ...
|
||||
@overload
|
||||
def instance_of(
|
||||
type: Tuple[Type[_T1], Type[_T2], Type[_T3]]
|
||||
) -> _ValidatorType[Union[_T1, _T2, _T3]]: ...
|
||||
@overload
|
||||
def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ...
|
||||
def provides(interface: Any) -> _ValidatorType[Any]: ...
|
||||
def optional(
|
||||
validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]]
|
||||
) -> _ValidatorType[Optional[_T]]: ...
|
||||
def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
|
||||
def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
|
||||
def matches_re(
|
||||
regex: Union[Pattern[AnyStr], AnyStr],
|
||||
flags: int = ...,
|
||||
func: Optional[
|
||||
Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]]
|
||||
] = ...,
|
||||
) -> _ValidatorType[AnyStr]: ...
|
||||
def deep_iterable(
|
||||
member_validator: _ValidatorArgType[_T],
|
||||
iterable_validator: Optional[_ValidatorType[_I]] = ...,
|
||||
) -> _ValidatorType[_I]: ...
|
||||
def deep_mapping(
|
||||
key_validator: _ValidatorType[_K],
|
||||
value_validator: _ValidatorType[_V],
|
||||
mapping_validator: Optional[_ValidatorType[_M]] = ...,
|
||||
) -> _ValidatorType[_M]: ...
|
||||
def is_callable() -> _ValidatorType[_T]: ...
|
||||
def lt(val: _T) -> _ValidatorType[_T]: ...
|
||||
def le(val: _T) -> _ValidatorType[_T]: ...
|
||||
def ge(val: _T) -> _ValidatorType[_T]: ...
|
||||
def gt(val: _T) -> _ValidatorType[_T]: ...
|
||||
def max_len(length: int) -> _ValidatorType[_T]: ...
|
||||
def min_len(length: int) -> _ValidatorType[_T]: ...
|
||||
70
lib/spack/external/_vendoring/attrs/__init__.py
vendored
70
lib/spack/external/_vendoring/attrs/__init__.py
vendored
@@ -1,70 +0,0 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr import (
|
||||
NOTHING,
|
||||
Attribute,
|
||||
Factory,
|
||||
__author__,
|
||||
__copyright__,
|
||||
__description__,
|
||||
__doc__,
|
||||
__email__,
|
||||
__license__,
|
||||
__title__,
|
||||
__url__,
|
||||
__version__,
|
||||
__version_info__,
|
||||
assoc,
|
||||
cmp_using,
|
||||
define,
|
||||
evolve,
|
||||
field,
|
||||
fields,
|
||||
fields_dict,
|
||||
frozen,
|
||||
has,
|
||||
make_class,
|
||||
mutable,
|
||||
resolve_types,
|
||||
validate,
|
||||
)
|
||||
from attr._next_gen import asdict, astuple
|
||||
|
||||
from . import converters, exceptions, filters, setters, validators
|
||||
|
||||
|
||||
__all__ = [
|
||||
"__author__",
|
||||
"__copyright__",
|
||||
"__description__",
|
||||
"__doc__",
|
||||
"__email__",
|
||||
"__license__",
|
||||
"__title__",
|
||||
"__url__",
|
||||
"__version__",
|
||||
"__version_info__",
|
||||
"asdict",
|
||||
"assoc",
|
||||
"astuple",
|
||||
"Attribute",
|
||||
"cmp_using",
|
||||
"converters",
|
||||
"define",
|
||||
"evolve",
|
||||
"exceptions",
|
||||
"Factory",
|
||||
"field",
|
||||
"fields_dict",
|
||||
"fields",
|
||||
"filters",
|
||||
"frozen",
|
||||
"has",
|
||||
"make_class",
|
||||
"mutable",
|
||||
"NOTHING",
|
||||
"resolve_types",
|
||||
"setters",
|
||||
"validate",
|
||||
"validators",
|
||||
]
|
||||
66
lib/spack/external/_vendoring/attrs/__init__.pyi
vendored
66
lib/spack/external/_vendoring/attrs/__init__.pyi
vendored
@@ -1,66 +0,0 @@
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
)
|
||||
|
||||
# Because we need to type our own stuff, we have to make everything from
|
||||
# attr explicitly public too.
|
||||
from attr import __author__ as __author__
|
||||
from attr import __copyright__ as __copyright__
|
||||
from attr import __description__ as __description__
|
||||
from attr import __email__ as __email__
|
||||
from attr import __license__ as __license__
|
||||
from attr import __title__ as __title__
|
||||
from attr import __url__ as __url__
|
||||
from attr import __version__ as __version__
|
||||
from attr import __version_info__ as __version_info__
|
||||
from attr import _FilterType
|
||||
from attr import assoc as assoc
|
||||
from attr import Attribute as Attribute
|
||||
from attr import cmp_using as cmp_using
|
||||
from attr import converters as converters
|
||||
from attr import define as define
|
||||
from attr import evolve as evolve
|
||||
from attr import exceptions as exceptions
|
||||
from attr import Factory as Factory
|
||||
from attr import field as field
|
||||
from attr import fields as fields
|
||||
from attr import fields_dict as fields_dict
|
||||
from attr import filters as filters
|
||||
from attr import frozen as frozen
|
||||
from attr import has as has
|
||||
from attr import make_class as make_class
|
||||
from attr import mutable as mutable
|
||||
from attr import NOTHING as NOTHING
|
||||
from attr import resolve_types as resolve_types
|
||||
from attr import setters as setters
|
||||
from attr import validate as validate
|
||||
from attr import validators as validators
|
||||
|
||||
# TODO: see definition of attr.asdict/astuple
|
||||
def asdict(
|
||||
inst: Any,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
dict_factory: Type[Mapping[Any, Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
value_serializer: Optional[
|
||||
Callable[[type, Attribute[Any], Any], Any]
|
||||
] = ...,
|
||||
tuple_keys: bool = ...,
|
||||
) -> Dict[str, Any]: ...
|
||||
|
||||
# TODO: add support for returning NamedTuple from the mypy plugin
|
||||
def astuple(
|
||||
inst: Any,
|
||||
recurse: bool = ...,
|
||||
filter: Optional[_FilterType[Any]] = ...,
|
||||
tuple_factory: Type[Sequence[Any]] = ...,
|
||||
retain_collection_types: bool = ...,
|
||||
) -> Tuple[Any, ...]: ...
|
||||
@@ -1,3 +0,0 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.converters import * # noqa
|
||||
@@ -1,3 +0,0 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.exceptions import * # noqa
|
||||
@@ -1,3 +0,0 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.filters import * # noqa
|
||||
@@ -1,3 +0,0 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.setters import * # noqa
|
||||
@@ -1,3 +0,0 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.validators import * # noqa
|
||||
202
lib/spack/external/_vendoring/distro/LICENSE
vendored
202
lib/spack/external/_vendoring/distro/LICENSE
vendored
@@ -1,202 +0,0 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
54
lib/spack/external/_vendoring/distro/__init__.py
vendored
54
lib/spack/external/_vendoring/distro/__init__.py
vendored
@@ -1,54 +0,0 @@
|
||||
from .distro import (
|
||||
NORMALIZED_DISTRO_ID,
|
||||
NORMALIZED_LSB_ID,
|
||||
NORMALIZED_OS_ID,
|
||||
LinuxDistribution,
|
||||
__version__,
|
||||
build_number,
|
||||
codename,
|
||||
distro_release_attr,
|
||||
distro_release_info,
|
||||
id,
|
||||
info,
|
||||
like,
|
||||
linux_distribution,
|
||||
lsb_release_attr,
|
||||
lsb_release_info,
|
||||
major_version,
|
||||
minor_version,
|
||||
name,
|
||||
os_release_attr,
|
||||
os_release_info,
|
||||
uname_attr,
|
||||
uname_info,
|
||||
version,
|
||||
version_parts,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"NORMALIZED_DISTRO_ID",
|
||||
"NORMALIZED_LSB_ID",
|
||||
"NORMALIZED_OS_ID",
|
||||
"LinuxDistribution",
|
||||
"build_number",
|
||||
"codename",
|
||||
"distro_release_attr",
|
||||
"distro_release_info",
|
||||
"id",
|
||||
"info",
|
||||
"like",
|
||||
"linux_distribution",
|
||||
"lsb_release_attr",
|
||||
"lsb_release_info",
|
||||
"major_version",
|
||||
"minor_version",
|
||||
"name",
|
||||
"os_release_attr",
|
||||
"os_release_info",
|
||||
"uname_attr",
|
||||
"uname_info",
|
||||
"version",
|
||||
"version_parts",
|
||||
]
|
||||
|
||||
__version__ = __version__
|
||||
@@ -1,4 +0,0 @@
|
||||
from .distro import main
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
45
lib/spack/external/_vendoring/jinja2/__init__.py
vendored
45
lib/spack/external/_vendoring/jinja2/__init__.py
vendored
@@ -1,45 +0,0 @@
|
||||
"""Jinja is a template engine written in pure Python. It provides a
|
||||
non-XML syntax that supports inline expressions and an optional
|
||||
sandboxed environment.
|
||||
"""
|
||||
from .bccache import BytecodeCache as BytecodeCache
|
||||
from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache
|
||||
from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache
|
||||
from .environment import Environment as Environment
|
||||
from .environment import Template as Template
|
||||
from .exceptions import TemplateAssertionError as TemplateAssertionError
|
||||
from .exceptions import TemplateError as TemplateError
|
||||
from .exceptions import TemplateNotFound as TemplateNotFound
|
||||
from .exceptions import TemplateRuntimeError as TemplateRuntimeError
|
||||
from .exceptions import TemplatesNotFound as TemplatesNotFound
|
||||
from .exceptions import TemplateSyntaxError as TemplateSyntaxError
|
||||
from .exceptions import UndefinedError as UndefinedError
|
||||
from .filters import contextfilter
|
||||
from .filters import environmentfilter
|
||||
from .filters import evalcontextfilter
|
||||
from .loaders import BaseLoader as BaseLoader
|
||||
from .loaders import ChoiceLoader as ChoiceLoader
|
||||
from .loaders import DictLoader as DictLoader
|
||||
from .loaders import FileSystemLoader as FileSystemLoader
|
||||
from .loaders import FunctionLoader as FunctionLoader
|
||||
from .loaders import ModuleLoader as ModuleLoader
|
||||
from .loaders import PackageLoader as PackageLoader
|
||||
from .loaders import PrefixLoader as PrefixLoader
|
||||
from .runtime import ChainableUndefined as ChainableUndefined
|
||||
from .runtime import DebugUndefined as DebugUndefined
|
||||
from .runtime import make_logging_undefined as make_logging_undefined
|
||||
from .runtime import StrictUndefined as StrictUndefined
|
||||
from .runtime import Undefined as Undefined
|
||||
from .utils import clear_caches as clear_caches
|
||||
from .utils import contextfunction
|
||||
from .utils import environmentfunction
|
||||
from .utils import escape
|
||||
from .utils import evalcontextfunction
|
||||
from .utils import is_undefined as is_undefined
|
||||
from .utils import Markup
|
||||
from .utils import pass_context as pass_context
|
||||
from .utils import pass_environment as pass_environment
|
||||
from .utils import pass_eval_context as pass_eval_context
|
||||
from .utils import select_autoescape as select_autoescape
|
||||
|
||||
__version__ = "3.0.3"
|
||||
@@ -1,75 +0,0 @@
|
||||
import inspect
|
||||
import typing as t
|
||||
from functools import wraps
|
||||
|
||||
from .utils import _PassArg
|
||||
from .utils import pass_eval_context
|
||||
|
||||
V = t.TypeVar("V")
|
||||
|
||||
|
||||
def async_variant(normal_func): # type: ignore
|
||||
def decorator(async_func): # type: ignore
|
||||
pass_arg = _PassArg.from_obj(normal_func)
|
||||
need_eval_context = pass_arg is None
|
||||
|
||||
if pass_arg is _PassArg.environment:
|
||||
|
||||
def is_async(args: t.Any) -> bool:
|
||||
return t.cast(bool, args[0].is_async)
|
||||
|
||||
else:
|
||||
|
||||
def is_async(args: t.Any) -> bool:
|
||||
return t.cast(bool, args[0].environment.is_async)
|
||||
|
||||
@wraps(normal_func)
|
||||
def wrapper(*args, **kwargs): # type: ignore
|
||||
b = is_async(args)
|
||||
|
||||
if need_eval_context:
|
||||
args = args[1:]
|
||||
|
||||
if b:
|
||||
return async_func(*args, **kwargs)
|
||||
|
||||
return normal_func(*args, **kwargs)
|
||||
|
||||
if need_eval_context:
|
||||
wrapper = pass_eval_context(wrapper)
|
||||
|
||||
wrapper.jinja_async_variant = True
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)}
|
||||
|
||||
|
||||
async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V":
|
||||
# Avoid a costly call to isawaitable
|
||||
if type(value) in _common_primitives:
|
||||
return t.cast("V", value)
|
||||
|
||||
if inspect.isawaitable(value):
|
||||
return await t.cast("t.Awaitable[V]", value)
|
||||
|
||||
return t.cast("V", value)
|
||||
|
||||
|
||||
async def auto_aiter(
|
||||
iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
|
||||
) -> "t.AsyncIterator[V]":
|
||||
if hasattr(iterable, "__aiter__"):
|
||||
async for item in t.cast("t.AsyncIterable[V]", iterable):
|
||||
yield item
|
||||
else:
|
||||
for item in t.cast("t.Iterable[V]", iterable):
|
||||
yield item
|
||||
|
||||
|
||||
async def auto_to_list(
|
||||
value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]",
|
||||
) -> t.List["V"]:
|
||||
return [x async for x in auto_aiter(value)]
|
||||
124
lib/spack/external/_vendoring/jinja2/nativetypes.py
vendored
124
lib/spack/external/_vendoring/jinja2/nativetypes.py
vendored
@@ -1,124 +0,0 @@
|
||||
import typing as t
|
||||
from ast import literal_eval
|
||||
from ast import parse
|
||||
from itertools import chain
|
||||
from itertools import islice
|
||||
|
||||
from . import nodes
|
||||
from .compiler import CodeGenerator
|
||||
from .compiler import Frame
|
||||
from .compiler import has_safe_repr
|
||||
from .environment import Environment
|
||||
from .environment import Template
|
||||
|
||||
|
||||
def native_concat(values: t.Iterable[t.Any]) -> t.Optional[t.Any]:
|
||||
"""Return a native Python type from the list of compiled nodes. If
|
||||
the result is a single node, its value is returned. Otherwise, the
|
||||
nodes are concatenated as strings. If the result can be parsed with
|
||||
:func:`ast.literal_eval`, the parsed value is returned. Otherwise,
|
||||
the string is returned.
|
||||
|
||||
:param values: Iterable of outputs to concatenate.
|
||||
"""
|
||||
head = list(islice(values, 2))
|
||||
|
||||
if not head:
|
||||
return None
|
||||
|
||||
if len(head) == 1:
|
||||
raw = head[0]
|
||||
if not isinstance(raw, str):
|
||||
return raw
|
||||
else:
|
||||
raw = "".join([str(v) for v in chain(head, values)])
|
||||
|
||||
try:
|
||||
return literal_eval(
|
||||
# In Python 3.10+ ast.literal_eval removes leading spaces/tabs
|
||||
# from the given string. For backwards compatibility we need to
|
||||
# parse the string ourselves without removing leading spaces/tabs.
|
||||
parse(raw, mode="eval")
|
||||
)
|
||||
except (ValueError, SyntaxError, MemoryError):
|
||||
return raw
|
||||
|
||||
|
||||
class NativeCodeGenerator(CodeGenerator):
|
||||
"""A code generator which renders Python types by not adding
|
||||
``str()`` around output nodes.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _default_finalize(value: t.Any) -> t.Any:
|
||||
return value
|
||||
|
||||
def _output_const_repr(self, group: t.Iterable[t.Any]) -> str:
|
||||
return repr("".join([str(v) for v in group]))
|
||||
|
||||
def _output_child_to_const(
|
||||
self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo
|
||||
) -> t.Any:
|
||||
const = node.as_const(frame.eval_ctx)
|
||||
|
||||
if not has_safe_repr(const):
|
||||
raise nodes.Impossible()
|
||||
|
||||
if isinstance(node, nodes.TemplateData):
|
||||
return const
|
||||
|
||||
return finalize.const(const) # type: ignore
|
||||
|
||||
def _output_child_pre(
|
||||
self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo
|
||||
) -> None:
|
||||
if finalize.src is not None:
|
||||
self.write(finalize.src)
|
||||
|
||||
def _output_child_post(
|
||||
self, node: nodes.Expr, frame: Frame, finalize: CodeGenerator._FinalizeInfo
|
||||
) -> None:
|
||||
if finalize.src is not None:
|
||||
self.write(")")
|
||||
|
||||
|
||||
class NativeEnvironment(Environment):
|
||||
"""An environment that renders templates to native Python types."""
|
||||
|
||||
code_generator_class = NativeCodeGenerator
|
||||
|
||||
|
||||
class NativeTemplate(Template):
|
||||
environment_class = NativeEnvironment
|
||||
|
||||
def render(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
|
||||
"""Render the template to produce a native Python type. If the
|
||||
result is a single node, its value is returned. Otherwise, the
|
||||
nodes are concatenated as strings. If the result can be parsed
|
||||
with :func:`ast.literal_eval`, the parsed value is returned.
|
||||
Otherwise, the string is returned.
|
||||
"""
|
||||
ctx = self.new_context(dict(*args, **kwargs))
|
||||
|
||||
try:
|
||||
return native_concat(self.root_render_func(ctx)) # type: ignore
|
||||
except Exception:
|
||||
return self.environment.handle_exception()
|
||||
|
||||
async def render_async(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
|
||||
if not self.environment.is_async:
|
||||
raise RuntimeError(
|
||||
"The environment was not created with async mode enabled."
|
||||
)
|
||||
|
||||
ctx = self.new_context(dict(*args, **kwargs))
|
||||
|
||||
try:
|
||||
return native_concat(
|
||||
[n async for n in self.root_render_func(ctx)] # type: ignore
|
||||
)
|
||||
except Exception:
|
||||
return self.environment.handle_exception()
|
||||
|
||||
|
||||
NativeEnvironment.template_class = NativeTemplate
|
||||
854
lib/spack/external/_vendoring/jinja2/utils.py
vendored
854
lib/spack/external/_vendoring/jinja2/utils.py
vendored
@@ -1,854 +0,0 @@
|
||||
import enum
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import typing as t
|
||||
import warnings
|
||||
from collections import abc
|
||||
from collections import deque
|
||||
from random import choice
|
||||
from random import randrange
|
||||
from threading import Lock
|
||||
from types import CodeType
|
||||
from urllib.parse import quote_from_bytes
|
||||
|
||||
import markupsafe
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
|
||||
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
|
||||
|
||||
# special singleton representing missing values for the runtime
|
||||
missing: t.Any = type("MissingType", (), {"__repr__": lambda x: "missing"})()
|
||||
|
||||
internal_code: t.MutableSet[CodeType] = set()
|
||||
|
||||
concat = "".join
|
||||
|
||||
|
||||
def pass_context(f: F) -> F:
|
||||
"""Pass the :class:`~jinja2.runtime.Context` as the first argument
|
||||
to the decorated function when called while rendering a template.
|
||||
|
||||
Can be used on functions, filters, and tests.
|
||||
|
||||
If only ``Context.eval_context`` is needed, use
|
||||
:func:`pass_eval_context`. If only ``Context.environment`` is
|
||||
needed, use :func:`pass_environment`.
|
||||
|
||||
.. versionadded:: 3.0.0
|
||||
Replaces ``contextfunction`` and ``contextfilter``.
|
||||
"""
|
||||
f.jinja_pass_arg = _PassArg.context # type: ignore
|
||||
return f
|
||||
|
||||
|
||||
def pass_eval_context(f: F) -> F:
|
||||
"""Pass the :class:`~jinja2.nodes.EvalContext` as the first argument
|
||||
to the decorated function when called while rendering a template.
|
||||
See :ref:`eval-context`.
|
||||
|
||||
Can be used on functions, filters, and tests.
|
||||
|
||||
If only ``EvalContext.environment`` is needed, use
|
||||
:func:`pass_environment`.
|
||||
|
||||
.. versionadded:: 3.0.0
|
||||
Replaces ``evalcontextfunction`` and ``evalcontextfilter``.
|
||||
"""
|
||||
f.jinja_pass_arg = _PassArg.eval_context # type: ignore
|
||||
return f
|
||||
|
||||
|
||||
def pass_environment(f: F) -> F:
|
||||
"""Pass the :class:`~jinja2.Environment` as the first argument to
|
||||
the decorated function when called while rendering a template.
|
||||
|
||||
Can be used on functions, filters, and tests.
|
||||
|
||||
.. versionadded:: 3.0.0
|
||||
Replaces ``environmentfunction`` and ``environmentfilter``.
|
||||
"""
|
||||
f.jinja_pass_arg = _PassArg.environment # type: ignore
|
||||
return f
|
||||
|
||||
|
||||
class _PassArg(enum.Enum):
|
||||
context = enum.auto()
|
||||
eval_context = enum.auto()
|
||||
environment = enum.auto()
|
||||
|
||||
@classmethod
|
||||
def from_obj(cls, obj: F) -> t.Optional["_PassArg"]:
|
||||
if hasattr(obj, "jinja_pass_arg"):
|
||||
return obj.jinja_pass_arg # type: ignore
|
||||
|
||||
for prefix in "context", "eval_context", "environment":
|
||||
squashed = prefix.replace("_", "")
|
||||
|
||||
for name in f"{squashed}function", f"{squashed}filter":
|
||||
if getattr(obj, name, False) is True:
|
||||
warnings.warn(
|
||||
f"{name!r} is deprecated and will stop working"
|
||||
f" in Jinja 3.1. Use 'pass_{prefix}' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return cls[prefix]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def contextfunction(f: F) -> F:
|
||||
"""Pass the context as the first argument to the decorated function.
|
||||
|
||||
.. deprecated:: 3.0
|
||||
Will be removed in Jinja 3.1. Use :func:`~jinja2.pass_context`
|
||||
instead.
|
||||
"""
|
||||
warnings.warn(
|
||||
"'contextfunction' is renamed to 'pass_context', the old name"
|
||||
" will be removed in Jinja 3.1.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return pass_context(f)
|
||||
|
||||
|
||||
def evalcontextfunction(f: F) -> F:
|
||||
"""Pass the eval context as the first argument to the decorated
|
||||
function.
|
||||
|
||||
.. deprecated:: 3.0
|
||||
Will be removed in Jinja 3.1. Use
|
||||
:func:`~jinja2.pass_eval_context` instead.
|
||||
|
||||
.. versionadded:: 2.4
|
||||
"""
|
||||
warnings.warn(
|
||||
"'evalcontextfunction' is renamed to 'pass_eval_context', the"
|
||||
" old name will be removed in Jinja 3.1.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return pass_eval_context(f)
|
||||
|
||||
|
||||
def environmentfunction(f: F) -> F:
|
||||
"""Pass the environment as the first argument to the decorated
|
||||
function.
|
||||
|
||||
.. deprecated:: 3.0
|
||||
Will be removed in Jinja 3.1. Use
|
||||
:func:`~jinja2.pass_environment` instead.
|
||||
"""
|
||||
warnings.warn(
|
||||
"'environmentfunction' is renamed to 'pass_environment', the"
|
||||
" old name will be removed in Jinja 3.1.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return pass_environment(f)
|
||||
|
||||
|
||||
def internalcode(f: F) -> F:
|
||||
"""Marks the function as internally used"""
|
||||
internal_code.add(f.__code__)
|
||||
return f
|
||||
|
||||
|
||||
def is_undefined(obj: t.Any) -> bool:
|
||||
"""Check if the object passed is undefined. This does nothing more than
|
||||
performing an instance check against :class:`Undefined` but looks nicer.
|
||||
This can be used for custom filters or tests that want to react to
|
||||
undefined variables. For example a custom default filter can look like
|
||||
this::
|
||||
|
||||
def default(var, default=''):
|
||||
if is_undefined(var):
|
||||
return default
|
||||
return var
|
||||
"""
|
||||
from .runtime import Undefined
|
||||
|
||||
return isinstance(obj, Undefined)
|
||||
|
||||
|
||||
def consume(iterable: t.Iterable[t.Any]) -> None:
|
||||
"""Consumes an iterable without doing anything with it."""
|
||||
for _ in iterable:
|
||||
pass
|
||||
|
||||
|
||||
def clear_caches() -> None:
|
||||
"""Jinja keeps internal caches for environments and lexers. These are
|
||||
used so that Jinja doesn't have to recreate environments and lexers all
|
||||
the time. Normally you don't have to care about that but if you are
|
||||
measuring memory consumption you may want to clean the caches.
|
||||
"""
|
||||
from .environment import get_spontaneous_environment
|
||||
from .lexer import _lexer_cache
|
||||
|
||||
get_spontaneous_environment.cache_clear()
|
||||
_lexer_cache.clear()
|
||||
|
||||
|
||||
def import_string(import_name: str, silent: bool = False) -> t.Any:
|
||||
"""Imports an object based on a string. This is useful if you want to
|
||||
use import paths as endpoints or something similar. An import path can
|
||||
be specified either in dotted notation (``xml.sax.saxutils.escape``)
|
||||
or with a colon as object delimiter (``xml.sax.saxutils:escape``).
|
||||
|
||||
If the `silent` is True the return value will be `None` if the import
|
||||
fails.
|
||||
|
||||
:return: imported object
|
||||
"""
|
||||
try:
|
||||
if ":" in import_name:
|
||||
module, obj = import_name.split(":", 1)
|
||||
elif "." in import_name:
|
||||
module, _, obj = import_name.rpartition(".")
|
||||
else:
|
||||
return __import__(import_name)
|
||||
return getattr(__import__(module, None, None, [obj]), obj)
|
||||
except (ImportError, AttributeError):
|
||||
if not silent:
|
||||
raise
|
||||
|
||||
|
||||
def open_if_exists(filename: str, mode: str = "rb") -> t.Optional[t.IO]:
|
||||
"""Returns a file descriptor for the filename if that file exists,
|
||||
otherwise ``None``.
|
||||
"""
|
||||
if not os.path.isfile(filename):
|
||||
return None
|
||||
|
||||
return open(filename, mode)
|
||||
|
||||
|
||||
def object_type_repr(obj: t.Any) -> str:
|
||||
"""Returns the name of the object's type. For some recognized
|
||||
singletons the name of the object is returned instead. (For
|
||||
example for `None` and `Ellipsis`).
|
||||
"""
|
||||
if obj is None:
|
||||
return "None"
|
||||
elif obj is Ellipsis:
|
||||
return "Ellipsis"
|
||||
|
||||
cls = type(obj)
|
||||
|
||||
if cls.__module__ == "builtins":
|
||||
return f"{cls.__name__} object"
|
||||
|
||||
return f"{cls.__module__}.{cls.__name__} object"
|
||||
|
||||
|
||||
def pformat(obj: t.Any) -> str:
|
||||
"""Format an object using :func:`pprint.pformat`."""
|
||||
from pprint import pformat # type: ignore
|
||||
|
||||
return pformat(obj)
|
||||
|
||||
|
||||
_http_re = re.compile(
|
||||
r"""
|
||||
^
|
||||
(
|
||||
(https?://|www\.) # scheme or www
|
||||
(([\w%-]+\.)+)? # subdomain
|
||||
(
|
||||
[a-z]{2,63} # basic tld
|
||||
|
|
||||
xn--[\w%]{2,59} # idna tld
|
||||
)
|
||||
|
|
||||
([\w%-]{2,63}\.)+ # basic domain
|
||||
(com|net|int|edu|gov|org|info|mil) # basic tld
|
||||
|
|
||||
(https?://) # scheme
|
||||
(
|
||||
(([\d]{1,3})(\.[\d]{1,3}){3}) # IPv4
|
||||
|
|
||||
(\[([\da-f]{0,4}:){2}([\da-f]{0,4}:?){1,6}]) # IPv6
|
||||
)
|
||||
)
|
||||
(?::[\d]{1,5})? # port
|
||||
(?:[/?#]\S*)? # path, query, and fragment
|
||||
$
|
||||
""",
|
||||
re.IGNORECASE | re.VERBOSE,
|
||||
)
|
||||
_email_re = re.compile(r"^\S+@\w[\w.-]*\.\w+$")
|
||||
|
||||
|
||||
def urlize(
|
||||
text: str,
|
||||
trim_url_limit: t.Optional[int] = None,
|
||||
rel: t.Optional[str] = None,
|
||||
target: t.Optional[str] = None,
|
||||
extra_schemes: t.Optional[t.Iterable[str]] = None,
|
||||
) -> str:
|
||||
"""Convert URLs in text into clickable links.
|
||||
|
||||
This may not recognize links in some situations. Usually, a more
|
||||
comprehensive formatter, such as a Markdown library, is a better
|
||||
choice.
|
||||
|
||||
Works on ``http://``, ``https://``, ``www.``, ``mailto:``, and email
|
||||
addresses. Links with trailing punctuation (periods, commas, closing
|
||||
parentheses) and leading punctuation (opening parentheses) are
|
||||
recognized excluding the punctuation. Email addresses that include
|
||||
header fields are not recognized (for example,
|
||||
``mailto:address@example.com?cc=copy@example.com``).
|
||||
|
||||
:param text: Original text containing URLs to link.
|
||||
:param trim_url_limit: Shorten displayed URL values to this length.
|
||||
:param target: Add the ``target`` attribute to links.
|
||||
:param rel: Add the ``rel`` attribute to links.
|
||||
:param extra_schemes: Recognize URLs that start with these schemes
|
||||
in addition to the default behavior.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
The ``extra_schemes`` parameter was added.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
Generate ``https://`` links for URLs without a scheme.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
The parsing rules were updated. Recognize email addresses with
|
||||
or without the ``mailto:`` scheme. Validate IP addresses. Ignore
|
||||
parentheses and brackets in more cases.
|
||||
"""
|
||||
if trim_url_limit is not None:
|
||||
|
||||
def trim_url(x: str) -> str:
|
||||
if len(x) > trim_url_limit: # type: ignore
|
||||
return f"{x[:trim_url_limit]}..."
|
||||
|
||||
return x
|
||||
|
||||
else:
|
||||
|
||||
def trim_url(x: str) -> str:
|
||||
return x
|
||||
|
||||
words = re.split(r"(\s+)", str(markupsafe.escape(text)))
|
||||
rel_attr = f' rel="{markupsafe.escape(rel)}"' if rel else ""
|
||||
target_attr = f' target="{markupsafe.escape(target)}"' if target else ""
|
||||
|
||||
for i, word in enumerate(words):
|
||||
head, middle, tail = "", word, ""
|
||||
match = re.match(r"^([(<]|<)+", middle)
|
||||
|
||||
if match:
|
||||
head = match.group()
|
||||
middle = middle[match.end() :]
|
||||
|
||||
# Unlike lead, which is anchored to the start of the string,
|
||||
# need to check that the string ends with any of the characters
|
||||
# before trying to match all of them, to avoid backtracking.
|
||||
if middle.endswith((")", ">", ".", ",", "\n", ">")):
|
||||
match = re.search(r"([)>.,\n]|>)+$", middle)
|
||||
|
||||
if match:
|
||||
tail = match.group()
|
||||
middle = middle[: match.start()]
|
||||
|
||||
# Prefer balancing parentheses in URLs instead of ignoring a
|
||||
# trailing character.
|
||||
for start_char, end_char in ("(", ")"), ("<", ">"), ("<", ">"):
|
||||
start_count = middle.count(start_char)
|
||||
|
||||
if start_count <= middle.count(end_char):
|
||||
# Balanced, or lighter on the left
|
||||
continue
|
||||
|
||||
# Move as many as possible from the tail to balance
|
||||
for _ in range(min(start_count, tail.count(end_char))):
|
||||
end_index = tail.index(end_char) + len(end_char)
|
||||
# Move anything in the tail before the end char too
|
||||
middle += tail[:end_index]
|
||||
tail = tail[end_index:]
|
||||
|
||||
if _http_re.match(middle):
|
||||
if middle.startswith("https://") or middle.startswith("http://"):
|
||||
middle = (
|
||||
f'<a href="{middle}"{rel_attr}{target_attr}>{trim_url(middle)}</a>'
|
||||
)
|
||||
else:
|
||||
middle = (
|
||||
f'<a href="https://{middle}"{rel_attr}{target_attr}>'
|
||||
f"{trim_url(middle)}</a>"
|
||||
)
|
||||
|
||||
elif middle.startswith("mailto:") and _email_re.match(middle[7:]):
|
||||
middle = f'<a href="{middle}">{middle[7:]}</a>'
|
||||
|
||||
elif (
|
||||
"@" in middle
|
||||
and not middle.startswith("www.")
|
||||
and ":" not in middle
|
||||
and _email_re.match(middle)
|
||||
):
|
||||
middle = f'<a href="mailto:{middle}">{middle}</a>'
|
||||
|
||||
elif extra_schemes is not None:
|
||||
for scheme in extra_schemes:
|
||||
if middle != scheme and middle.startswith(scheme):
|
||||
middle = f'<a href="{middle}"{rel_attr}{target_attr}>{middle}</a>'
|
||||
|
||||
words[i] = f"{head}{middle}{tail}"
|
||||
|
||||
return "".join(words)
|
||||
|
||||
|
||||
def generate_lorem_ipsum(
|
||||
n: int = 5, html: bool = True, min: int = 20, max: int = 100
|
||||
) -> str:
|
||||
"""Generate some lorem ipsum for the template."""
|
||||
from .constants import LOREM_IPSUM_WORDS
|
||||
|
||||
words = LOREM_IPSUM_WORDS.split()
|
||||
result = []
|
||||
|
||||
for _ in range(n):
|
||||
next_capitalized = True
|
||||
last_comma = last_fullstop = 0
|
||||
word = None
|
||||
last = None
|
||||
p = []
|
||||
|
||||
# each paragraph contains out of 20 to 100 words.
|
||||
for idx, _ in enumerate(range(randrange(min, max))):
|
||||
while True:
|
||||
word = choice(words)
|
||||
if word != last:
|
||||
last = word
|
||||
break
|
||||
if next_capitalized:
|
||||
word = word.capitalize()
|
||||
next_capitalized = False
|
||||
# add commas
|
||||
if idx - randrange(3, 8) > last_comma:
|
||||
last_comma = idx
|
||||
last_fullstop += 2
|
||||
word += ","
|
||||
# add end of sentences
|
||||
if idx - randrange(10, 20) > last_fullstop:
|
||||
last_comma = last_fullstop = idx
|
||||
word += "."
|
||||
next_capitalized = True
|
||||
p.append(word)
|
||||
|
||||
# ensure that the paragraph ends with a dot.
|
||||
p_str = " ".join(p)
|
||||
|
||||
if p_str.endswith(","):
|
||||
p_str = p_str[:-1] + "."
|
||||
elif not p_str.endswith("."):
|
||||
p_str += "."
|
||||
|
||||
result.append(p_str)
|
||||
|
||||
if not html:
|
||||
return "\n\n".join(result)
|
||||
return markupsafe.Markup(
|
||||
"\n".join(f"<p>{markupsafe.escape(x)}</p>" for x in result)
|
||||
)
|
||||
|
||||
|
||||
def url_quote(obj: t.Any, charset: str = "utf-8", for_qs: bool = False) -> str:
|
||||
"""Quote a string for use in a URL using the given charset.
|
||||
|
||||
:param obj: String or bytes to quote. Other types are converted to
|
||||
string then encoded to bytes using the given charset.
|
||||
:param charset: Encode text to bytes using this charset.
|
||||
:param for_qs: Quote "/" and use "+" for spaces.
|
||||
"""
|
||||
if not isinstance(obj, bytes):
|
||||
if not isinstance(obj, str):
|
||||
obj = str(obj)
|
||||
|
||||
obj = obj.encode(charset)
|
||||
|
||||
safe = b"" if for_qs else b"/"
|
||||
rv = quote_from_bytes(obj, safe)
|
||||
|
||||
if for_qs:
|
||||
rv = rv.replace("%20", "+")
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
def unicode_urlencode(obj: t.Any, charset: str = "utf-8", for_qs: bool = False) -> str:
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"'unicode_urlencode' has been renamed to 'url_quote'. The old"
|
||||
" name will be removed in Jinja 3.1.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return url_quote(obj, charset=charset, for_qs=for_qs)
|
||||
|
||||
|
||||
@abc.MutableMapping.register
|
||||
class LRUCache:
|
||||
"""A simple LRU Cache implementation."""
|
||||
|
||||
# this is fast for small capacities (something below 1000) but doesn't
|
||||
# scale. But as long as it's only used as storage for templates this
|
||||
# won't do any harm.
|
||||
|
||||
def __init__(self, capacity: int) -> None:
|
||||
self.capacity = capacity
|
||||
self._mapping: t.Dict[t.Any, t.Any] = {}
|
||||
self._queue: "te.Deque[t.Any]" = deque()
|
||||
self._postinit()
|
||||
|
||||
def _postinit(self) -> None:
|
||||
# alias all queue methods for faster lookup
|
||||
self._popleft = self._queue.popleft
|
||||
self._pop = self._queue.pop
|
||||
self._remove = self._queue.remove
|
||||
self._wlock = Lock()
|
||||
self._append = self._queue.append
|
||||
|
||||
def __getstate__(self) -> t.Mapping[str, t.Any]:
|
||||
return {
|
||||
"capacity": self.capacity,
|
||||
"_mapping": self._mapping,
|
||||
"_queue": self._queue,
|
||||
}
|
||||
|
||||
def __setstate__(self, d: t.Mapping[str, t.Any]) -> None:
|
||||
self.__dict__.update(d)
|
||||
self._postinit()
|
||||
|
||||
def __getnewargs__(self) -> t.Tuple:
|
||||
return (self.capacity,)
|
||||
|
||||
def copy(self) -> "LRUCache":
|
||||
"""Return a shallow copy of the instance."""
|
||||
rv = self.__class__(self.capacity)
|
||||
rv._mapping.update(self._mapping)
|
||||
rv._queue.extend(self._queue)
|
||||
return rv
|
||||
|
||||
def get(self, key: t.Any, default: t.Any = None) -> t.Any:
|
||||
"""Return an item from the cache dict or `default`"""
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def setdefault(self, key: t.Any, default: t.Any = None) -> t.Any:
|
||||
"""Set `default` if the key is not in the cache otherwise
|
||||
leave unchanged. Return the value of this key.
|
||||
"""
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
self[key] = default
|
||||
return default
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Clear the cache."""
|
||||
with self._wlock:
|
||||
self._mapping.clear()
|
||||
self._queue.clear()
|
||||
|
||||
def __contains__(self, key: t.Any) -> bool:
|
||||
"""Check if a key exists in this cache."""
|
||||
return key in self._mapping
|
||||
|
||||
def __len__(self) -> int:
|
||||
"""Return the current size of the cache."""
|
||||
return len(self._mapping)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<{type(self).__name__} {self._mapping!r}>"
|
||||
|
||||
def __getitem__(self, key: t.Any) -> t.Any:
|
||||
"""Get an item from the cache. Moves the item up so that it has the
|
||||
highest priority then.
|
||||
|
||||
Raise a `KeyError` if it does not exist.
|
||||
"""
|
||||
with self._wlock:
|
||||
rv = self._mapping[key]
|
||||
|
||||
if self._queue[-1] != key:
|
||||
try:
|
||||
self._remove(key)
|
||||
except ValueError:
|
||||
# if something removed the key from the container
|
||||
# when we read, ignore the ValueError that we would
|
||||
# get otherwise.
|
||||
pass
|
||||
|
||||
self._append(key)
|
||||
|
||||
return rv
|
||||
|
||||
def __setitem__(self, key: t.Any, value: t.Any) -> None:
|
||||
"""Sets the value for an item. Moves the item up so that it
|
||||
has the highest priority then.
|
||||
"""
|
||||
with self._wlock:
|
||||
if key in self._mapping:
|
||||
self._remove(key)
|
||||
elif len(self._mapping) == self.capacity:
|
||||
del self._mapping[self._popleft()]
|
||||
|
||||
self._append(key)
|
||||
self._mapping[key] = value
|
||||
|
||||
def __delitem__(self, key: t.Any) -> None:
|
||||
"""Remove an item from the cache dict.
|
||||
Raise a `KeyError` if it does not exist.
|
||||
"""
|
||||
with self._wlock:
|
||||
del self._mapping[key]
|
||||
|
||||
try:
|
||||
self._remove(key)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def items(self) -> t.Iterable[t.Tuple[t.Any, t.Any]]:
|
||||
"""Return a list of items."""
|
||||
result = [(key, self._mapping[key]) for key in list(self._queue)]
|
||||
result.reverse()
|
||||
return result
|
||||
|
||||
def values(self) -> t.Iterable[t.Any]:
|
||||
"""Return a list of all values."""
|
||||
return [x[1] for x in self.items()]
|
||||
|
||||
def keys(self) -> t.Iterable[t.Any]:
|
||||
"""Return a list of all keys ordered by most recent usage."""
|
||||
return list(self)
|
||||
|
||||
def __iter__(self) -> t.Iterator[t.Any]:
|
||||
return reversed(tuple(self._queue))
|
||||
|
||||
def __reversed__(self) -> t.Iterator[t.Any]:
|
||||
"""Iterate over the keys in the cache dict, oldest items
|
||||
coming first.
|
||||
"""
|
||||
return iter(tuple(self._queue))
|
||||
|
||||
__copy__ = copy
|
||||
|
||||
|
||||
def select_autoescape(
|
||||
enabled_extensions: t.Collection[str] = ("html", "htm", "xml"),
|
||||
disabled_extensions: t.Collection[str] = (),
|
||||
default_for_string: bool = True,
|
||||
default: bool = False,
|
||||
) -> t.Callable[[t.Optional[str]], bool]:
|
||||
"""Intelligently sets the initial value of autoescaping based on the
|
||||
filename of the template. This is the recommended way to configure
|
||||
autoescaping if you do not want to write a custom function yourself.
|
||||
|
||||
If you want to enable it for all templates created from strings or
|
||||
for all templates with `.html` and `.xml` extensions::
|
||||
|
||||
from jinja2 import Environment, select_autoescape
|
||||
env = Environment(autoescape=select_autoescape(
|
||||
enabled_extensions=('html', 'xml'),
|
||||
default_for_string=True,
|
||||
))
|
||||
|
||||
Example configuration to turn it on at all times except if the template
|
||||
ends with `.txt`::
|
||||
|
||||
from jinja2 import Environment, select_autoescape
|
||||
env = Environment(autoescape=select_autoescape(
|
||||
disabled_extensions=('txt',),
|
||||
default_for_string=True,
|
||||
default=True,
|
||||
))
|
||||
|
||||
The `enabled_extensions` is an iterable of all the extensions that
|
||||
autoescaping should be enabled for. Likewise `disabled_extensions` is
|
||||
a list of all templates it should be disabled for. If a template is
|
||||
loaded from a string then the default from `default_for_string` is used.
|
||||
If nothing matches then the initial value of autoescaping is set to the
|
||||
value of `default`.
|
||||
|
||||
For security reasons this function operates case insensitive.
|
||||
|
||||
.. versionadded:: 2.9
|
||||
"""
|
||||
enabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in enabled_extensions)
|
||||
disabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in disabled_extensions)
|
||||
|
||||
def autoescape(template_name: t.Optional[str]) -> bool:
|
||||
if template_name is None:
|
||||
return default_for_string
|
||||
template_name = template_name.lower()
|
||||
if template_name.endswith(enabled_patterns):
|
||||
return True
|
||||
if template_name.endswith(disabled_patterns):
|
||||
return False
|
||||
return default
|
||||
|
||||
return autoescape
|
||||
|
||||
|
||||
def htmlsafe_json_dumps(
|
||||
obj: t.Any, dumps: t.Optional[t.Callable[..., str]] = None, **kwargs: t.Any
|
||||
) -> markupsafe.Markup:
|
||||
"""Serialize an object to a string of JSON with :func:`json.dumps`,
|
||||
then replace HTML-unsafe characters with Unicode escapes and mark
|
||||
the result safe with :class:`~markupsafe.Markup`.
|
||||
|
||||
This is available in templates as the ``|tojson`` filter.
|
||||
|
||||
The following characters are escaped: ``<``, ``>``, ``&``, ``'``.
|
||||
|
||||
The returned string is safe to render in HTML documents and
|
||||
``<script>`` tags. The exception is in HTML attributes that are
|
||||
double quoted; either use single quotes or the ``|forceescape``
|
||||
filter.
|
||||
|
||||
:param obj: The object to serialize to JSON.
|
||||
:param dumps: The ``dumps`` function to use. Defaults to
|
||||
``env.policies["json.dumps_function"]``, which defaults to
|
||||
:func:`json.dumps`.
|
||||
:param kwargs: Extra arguments to pass to ``dumps``. Merged onto
|
||||
``env.policies["json.dumps_kwargs"]``.
|
||||
|
||||
.. versionchanged:: 3.0
|
||||
The ``dumper`` parameter is renamed to ``dumps``.
|
||||
|
||||
.. versionadded:: 2.9
|
||||
"""
|
||||
if dumps is None:
|
||||
dumps = json.dumps
|
||||
|
||||
return markupsafe.Markup(
|
||||
dumps(obj, **kwargs)
|
||||
.replace("<", "\\u003c")
|
||||
.replace(">", "\\u003e")
|
||||
.replace("&", "\\u0026")
|
||||
.replace("'", "\\u0027")
|
||||
)
|
||||
|
||||
|
||||
class Cycler:
|
||||
"""Cycle through values by yield them one at a time, then restarting
|
||||
once the end is reached. Available as ``cycler`` in templates.
|
||||
|
||||
Similar to ``loop.cycle``, but can be used outside loops or across
|
||||
multiple loops. For example, render a list of folders and files in a
|
||||
list, alternating giving them "odd" and "even" classes.
|
||||
|
||||
.. code-block:: html+jinja
|
||||
|
||||
{% set row_class = cycler("odd", "even") %}
|
||||
<ul class="browser">
|
||||
{% for folder in folders %}
|
||||
<li class="folder {{ row_class.next() }}">{{ folder }}
|
||||
{% endfor %}
|
||||
{% for file in files %}
|
||||
<li class="file {{ row_class.next() }}">{{ file }}
|
||||
{% endfor %}
|
||||
</ul>
|
||||
|
||||
:param items: Each positional argument will be yielded in the order
|
||||
given for each cycle.
|
||||
|
||||
.. versionadded:: 2.1
|
||||
"""
|
||||
|
||||
def __init__(self, *items: t.Any) -> None:
|
||||
if not items:
|
||||
raise RuntimeError("at least one item has to be provided")
|
||||
self.items = items
|
||||
self.pos = 0
|
||||
|
||||
def reset(self) -> None:
|
||||
"""Resets the current item to the first item."""
|
||||
self.pos = 0
|
||||
|
||||
@property
|
||||
def current(self) -> t.Any:
|
||||
"""Return the current item. Equivalent to the item that will be
|
||||
returned next time :meth:`next` is called.
|
||||
"""
|
||||
return self.items[self.pos]
|
||||
|
||||
def next(self) -> t.Any:
|
||||
"""Return the current item, then advance :attr:`current` to the
|
||||
next item.
|
||||
"""
|
||||
rv = self.current
|
||||
self.pos = (self.pos + 1) % len(self.items)
|
||||
return rv
|
||||
|
||||
__next__ = next
|
||||
|
||||
|
||||
class Joiner:
|
||||
"""A joining helper for templates."""
|
||||
|
||||
def __init__(self, sep: str = ", ") -> None:
|
||||
self.sep = sep
|
||||
self.used = False
|
||||
|
||||
def __call__(self) -> str:
|
||||
if not self.used:
|
||||
self.used = True
|
||||
return ""
|
||||
return self.sep
|
||||
|
||||
|
||||
class Namespace:
|
||||
"""A namespace object that can hold arbitrary attributes. It may be
|
||||
initialized from a dictionary or with keyword arguments."""
|
||||
|
||||
def __init__(*args: t.Any, **kwargs: t.Any) -> None: # noqa: B902
|
||||
self, args = args[0], args[1:]
|
||||
self.__attrs = dict(*args, **kwargs)
|
||||
|
||||
def __getattribute__(self, name: str) -> t.Any:
|
||||
# __class__ is needed for the awaitable check in async mode
|
||||
if name in {"_Namespace__attrs", "__class__"}:
|
||||
return object.__getattribute__(self, name)
|
||||
try:
|
||||
return self.__attrs[name]
|
||||
except KeyError:
|
||||
raise AttributeError(name) from None
|
||||
|
||||
def __setitem__(self, name: str, value: t.Any) -> None:
|
||||
self.__attrs[name] = value
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Namespace {self.__attrs!r}>"
|
||||
|
||||
|
||||
class Markup(markupsafe.Markup):
|
||||
def __new__(cls, base="", encoding=None, errors="strict"): # type: ignore
|
||||
warnings.warn(
|
||||
"'jinja2.Markup' is deprecated and will be removed in Jinja"
|
||||
" 3.1. Import 'markupsafe.Markup' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return super().__new__(cls, base, encoding, errors)
|
||||
|
||||
|
||||
def escape(s: t.Any) -> str:
|
||||
warnings.warn(
|
||||
"'jinja2.escape' is deprecated and will be removed in Jinja"
|
||||
" 3.1. Import 'markupsafe.escape' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return markupsafe.escape(s)
|
||||
1
lib/spack/external/_vendoring/jsonschema.pyi
vendored
1
lib/spack/external/_vendoring/jsonschema.pyi
vendored
@@ -1 +0,0 @@
|
||||
from jsonschema import *
|
||||
@@ -1,5 +0,0 @@
|
||||
"""
|
||||
Benchmarks for validation.
|
||||
|
||||
This package is *not* public API.
|
||||
"""
|
||||
@@ -1,26 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
A performance benchmark using the example from issue #232.
|
||||
|
||||
See https://github.com/Julian/jsonschema/pull/232.
|
||||
"""
|
||||
from twisted.python.filepath import FilePath
|
||||
from pyperf import Runner
|
||||
from pyrsistent import m
|
||||
|
||||
from jsonschema.tests._suite import Version
|
||||
import jsonschema
|
||||
|
||||
|
||||
issue232 = Version(
|
||||
path=FilePath(__file__).sibling("issue232"),
|
||||
remotes=m(),
|
||||
name="issue232",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
issue232.benchmark(
|
||||
runner=Runner(),
|
||||
Validator=jsonschema.Draft4Validator,
|
||||
)
|
||||
@@ -1,14 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
A performance benchmark using the official test suite.
|
||||
|
||||
This benchmarks jsonschema using every valid example in the
|
||||
JSON-Schema-Test-Suite. It will take some time to complete.
|
||||
"""
|
||||
from pyperf import Runner
|
||||
|
||||
from jsonschema.tests._suite import Suite
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
Suite().benchmark(runner=Runner())
|
||||
@@ -1,5 +0,0 @@
|
||||
def bug(issue=None):
|
||||
message = "A known bug."
|
||||
if issue is not None:
|
||||
message += " See issue #{issue}.".format(issue=issue)
|
||||
return message
|
||||
@@ -1,239 +0,0 @@
|
||||
"""
|
||||
Python representations of the JSON Schema Test Suite tests.
|
||||
"""
|
||||
|
||||
from functools import partial
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from twisted.python.filepath import FilePath
|
||||
import attr
|
||||
|
||||
from jsonschema.compat import PY3
|
||||
from jsonschema.validators import validators
|
||||
import jsonschema
|
||||
|
||||
|
||||
def _find_suite():
|
||||
root = os.environ.get("JSON_SCHEMA_TEST_SUITE")
|
||||
if root is not None:
|
||||
return FilePath(root)
|
||||
|
||||
root = FilePath(jsonschema.__file__).parent().sibling("json")
|
||||
if not root.isdir(): # pragma: no cover
|
||||
raise ValueError(
|
||||
(
|
||||
"Can't find the JSON-Schema-Test-Suite directory. "
|
||||
"Set the 'JSON_SCHEMA_TEST_SUITE' environment "
|
||||
"variable or run the tests from alongside a checkout "
|
||||
"of the suite."
|
||||
),
|
||||
)
|
||||
return root
|
||||
|
||||
|
||||
@attr.s(hash=True)
|
||||
class Suite(object):
|
||||
|
||||
_root = attr.ib(default=attr.Factory(_find_suite))
|
||||
|
||||
def _remotes(self):
|
||||
jsonschema_suite = self._root.descendant(["bin", "jsonschema_suite"])
|
||||
remotes = subprocess.check_output(
|
||||
[sys.executable, jsonschema_suite.path, "remotes"],
|
||||
)
|
||||
return {
|
||||
"http://localhost:1234/" + name: schema
|
||||
for name, schema in json.loads(remotes.decode("utf-8")).items()
|
||||
}
|
||||
|
||||
def benchmark(self, runner): # pragma: no cover
|
||||
for name in validators:
|
||||
self.version(name=name).benchmark(runner=runner)
|
||||
|
||||
def version(self, name):
|
||||
return Version(
|
||||
name=name,
|
||||
path=self._root.descendant(["tests", name]),
|
||||
remotes=self._remotes(),
|
||||
)
|
||||
|
||||
|
||||
@attr.s(hash=True)
|
||||
class Version(object):
|
||||
|
||||
_path = attr.ib()
|
||||
_remotes = attr.ib()
|
||||
|
||||
name = attr.ib()
|
||||
|
||||
def benchmark(self, runner, **kwargs): # pragma: no cover
|
||||
for suite in self.tests():
|
||||
for test in suite:
|
||||
runner.bench_func(
|
||||
test.fully_qualified_name,
|
||||
partial(test.validate_ignoring_errors, **kwargs),
|
||||
)
|
||||
|
||||
def tests(self):
|
||||
return (
|
||||
test
|
||||
for child in self._path.globChildren("*.json")
|
||||
for test in self._tests_in(
|
||||
subject=child.basename()[:-5],
|
||||
path=child,
|
||||
)
|
||||
)
|
||||
|
||||
def format_tests(self):
|
||||
path = self._path.descendant(["optional", "format"])
|
||||
return (
|
||||
test
|
||||
for child in path.globChildren("*.json")
|
||||
for test in self._tests_in(
|
||||
subject=child.basename()[:-5],
|
||||
path=child,
|
||||
)
|
||||
)
|
||||
|
||||
def tests_of(self, name):
|
||||
return self._tests_in(
|
||||
subject=name,
|
||||
path=self._path.child(name + ".json"),
|
||||
)
|
||||
|
||||
def optional_tests_of(self, name):
|
||||
return self._tests_in(
|
||||
subject=name,
|
||||
path=self._path.descendant(["optional", name + ".json"]),
|
||||
)
|
||||
|
||||
def to_unittest_testcase(self, *suites, **kwargs):
|
||||
name = kwargs.pop("name", "Test" + self.name.title())
|
||||
methods = {
|
||||
test.method_name: test.to_unittest_method(**kwargs)
|
||||
for suite in suites
|
||||
for tests in suite
|
||||
for test in tests
|
||||
}
|
||||
cls = type(name, (unittest.TestCase,), methods)
|
||||
|
||||
try:
|
||||
cls.__module__ = _someone_save_us_the_module_of_the_caller()
|
||||
except Exception: # pragma: no cover
|
||||
# We're doing crazy things, so if they go wrong, like a function
|
||||
# behaving differently on some other interpreter, just make them
|
||||
# not happen.
|
||||
pass
|
||||
|
||||
return cls
|
||||
|
||||
def _tests_in(self, subject, path):
|
||||
for each in json.loads(path.getContent().decode("utf-8")):
|
||||
yield (
|
||||
_Test(
|
||||
version=self,
|
||||
subject=subject,
|
||||
case_description=each["description"],
|
||||
schema=each["schema"],
|
||||
remotes=self._remotes,
|
||||
**test
|
||||
) for test in each["tests"]
|
||||
)
|
||||
|
||||
|
||||
@attr.s(hash=True, repr=False)
|
||||
class _Test(object):
|
||||
|
||||
version = attr.ib()
|
||||
|
||||
subject = attr.ib()
|
||||
case_description = attr.ib()
|
||||
description = attr.ib()
|
||||
|
||||
data = attr.ib()
|
||||
schema = attr.ib(repr=False)
|
||||
|
||||
valid = attr.ib()
|
||||
|
||||
_remotes = attr.ib()
|
||||
|
||||
def __repr__(self): # pragma: no cover
|
||||
return "<Test {}>".format(self.fully_qualified_name)
|
||||
|
||||
@property
|
||||
def fully_qualified_name(self): # pragma: no cover
|
||||
return " > ".join(
|
||||
[
|
||||
self.version.name,
|
||||
self.subject,
|
||||
self.case_description,
|
||||
self.description,
|
||||
]
|
||||
)
|
||||
|
||||
@property
|
||||
def method_name(self):
|
||||
delimiters = r"[\W\- ]+"
|
||||
name = "test_%s_%s_%s" % (
|
||||
re.sub(delimiters, "_", self.subject),
|
||||
re.sub(delimiters, "_", self.case_description),
|
||||
re.sub(delimiters, "_", self.description),
|
||||
)
|
||||
|
||||
if not PY3: # pragma: no cover
|
||||
name = name.encode("utf-8")
|
||||
return name
|
||||
|
||||
def to_unittest_method(self, skip=lambda test: None, **kwargs):
|
||||
if self.valid:
|
||||
def fn(this):
|
||||
self.validate(**kwargs)
|
||||
else:
|
||||
def fn(this):
|
||||
with this.assertRaises(jsonschema.ValidationError):
|
||||
self.validate(**kwargs)
|
||||
|
||||
fn.__name__ = self.method_name
|
||||
reason = skip(self)
|
||||
return unittest.skipIf(reason is not None, reason)(fn)
|
||||
|
||||
def validate(self, Validator, **kwargs):
|
||||
resolver = jsonschema.RefResolver.from_schema(
|
||||
schema=self.schema,
|
||||
store=self._remotes,
|
||||
id_of=Validator.ID_OF,
|
||||
)
|
||||
jsonschema.validate(
|
||||
instance=self.data,
|
||||
schema=self.schema,
|
||||
cls=Validator,
|
||||
resolver=resolver,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def validate_ignoring_errors(self, Validator): # pragma: no cover
|
||||
try:
|
||||
self.validate(Validator=Validator)
|
||||
except jsonschema.ValidationError:
|
||||
pass
|
||||
|
||||
|
||||
def _someone_save_us_the_module_of_the_caller():
|
||||
"""
|
||||
The FQON of the module 2nd stack frames up from here.
|
||||
|
||||
This is intended to allow us to dynamicallly return test case classes that
|
||||
are indistinguishable from being defined in the module that wants them.
|
||||
|
||||
Otherwise, trial will mis-print the FQON, and copy pasting it won't re-run
|
||||
the class that really is running.
|
||||
|
||||
Save us all, this is all so so so so so terrible.
|
||||
"""
|
||||
|
||||
return sys._getframe(2).f_globals["__name__"]
|
||||
@@ -1,151 +0,0 @@
|
||||
from unittest import TestCase
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from jsonschema import Draft4Validator, ValidationError, cli, __version__
|
||||
from jsonschema.compat import NativeIO
|
||||
from jsonschema.exceptions import SchemaError
|
||||
|
||||
|
||||
def fake_validator(*errors):
|
||||
errors = list(reversed(errors))
|
||||
|
||||
class FakeValidator(object):
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def iter_errors(self, instance):
|
||||
if errors:
|
||||
return errors.pop()
|
||||
return []
|
||||
|
||||
def check_schema(self, schema):
|
||||
pass
|
||||
|
||||
return FakeValidator
|
||||
|
||||
|
||||
class TestParser(TestCase):
|
||||
|
||||
FakeValidator = fake_validator()
|
||||
instance_file = "foo.json"
|
||||
schema_file = "schema.json"
|
||||
|
||||
def setUp(self):
|
||||
cli.open = self.fake_open
|
||||
self.addCleanup(delattr, cli, "open")
|
||||
|
||||
def fake_open(self, path):
|
||||
if path == self.instance_file:
|
||||
contents = ""
|
||||
elif path == self.schema_file:
|
||||
contents = {}
|
||||
else: # pragma: no cover
|
||||
self.fail("What is {!r}".format(path))
|
||||
return NativeIO(json.dumps(contents))
|
||||
|
||||
def test_find_validator_by_fully_qualified_object_name(self):
|
||||
arguments = cli.parse_args(
|
||||
[
|
||||
"--validator",
|
||||
"jsonschema.tests.test_cli.TestParser.FakeValidator",
|
||||
"--instance", self.instance_file,
|
||||
self.schema_file,
|
||||
]
|
||||
)
|
||||
self.assertIs(arguments["validator"], self.FakeValidator)
|
||||
|
||||
def test_find_validator_in_jsonschema(self):
|
||||
arguments = cli.parse_args(
|
||||
[
|
||||
"--validator", "Draft4Validator",
|
||||
"--instance", self.instance_file,
|
||||
self.schema_file,
|
||||
]
|
||||
)
|
||||
self.assertIs(arguments["validator"], Draft4Validator)
|
||||
|
||||
|
||||
class TestCLI(TestCase):
|
||||
def test_draft3_schema_draft4_validator(self):
|
||||
stdout, stderr = NativeIO(), NativeIO()
|
||||
with self.assertRaises(SchemaError):
|
||||
cli.run(
|
||||
{
|
||||
"validator": Draft4Validator,
|
||||
"schema": {
|
||||
"anyOf": [
|
||||
{"minimum": 20},
|
||||
{"type": "string"},
|
||||
{"required": True},
|
||||
],
|
||||
},
|
||||
"instances": [1],
|
||||
"error_format": "{error.message}",
|
||||
},
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
)
|
||||
|
||||
def test_successful_validation(self):
|
||||
stdout, stderr = NativeIO(), NativeIO()
|
||||
exit_code = cli.run(
|
||||
{
|
||||
"validator": fake_validator(),
|
||||
"schema": {},
|
||||
"instances": [1],
|
||||
"error_format": "{error.message}",
|
||||
},
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
)
|
||||
self.assertFalse(stdout.getvalue())
|
||||
self.assertFalse(stderr.getvalue())
|
||||
self.assertEqual(exit_code, 0)
|
||||
|
||||
def test_unsuccessful_validation(self):
|
||||
error = ValidationError("I am an error!", instance=1)
|
||||
stdout, stderr = NativeIO(), NativeIO()
|
||||
exit_code = cli.run(
|
||||
{
|
||||
"validator": fake_validator([error]),
|
||||
"schema": {},
|
||||
"instances": [1],
|
||||
"error_format": "{error.instance} - {error.message}",
|
||||
},
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
)
|
||||
self.assertFalse(stdout.getvalue())
|
||||
self.assertEqual(stderr.getvalue(), "1 - I am an error!")
|
||||
self.assertEqual(exit_code, 1)
|
||||
|
||||
def test_unsuccessful_validation_multiple_instances(self):
|
||||
first_errors = [
|
||||
ValidationError("9", instance=1),
|
||||
ValidationError("8", instance=1),
|
||||
]
|
||||
second_errors = [ValidationError("7", instance=2)]
|
||||
stdout, stderr = NativeIO(), NativeIO()
|
||||
exit_code = cli.run(
|
||||
{
|
||||
"validator": fake_validator(first_errors, second_errors),
|
||||
"schema": {},
|
||||
"instances": [1, 2],
|
||||
"error_format": "{error.instance} - {error.message}\t",
|
||||
},
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
)
|
||||
self.assertFalse(stdout.getvalue())
|
||||
self.assertEqual(stderr.getvalue(), "1 - 9\t1 - 8\t2 - 7\t")
|
||||
self.assertEqual(exit_code, 1)
|
||||
|
||||
def test_version(self):
|
||||
version = subprocess.check_output(
|
||||
[sys.executable, "-m", "jsonschema", "--version"],
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
version = version.decode("utf-8").strip()
|
||||
self.assertEqual(version, __version__)
|
||||
@@ -1,462 +0,0 @@
|
||||
from unittest import TestCase
|
||||
import textwrap
|
||||
|
||||
from jsonschema import Draft4Validator, exceptions
|
||||
from jsonschema.compat import PY3
|
||||
|
||||
|
||||
class TestBestMatch(TestCase):
|
||||
def best_match(self, errors):
|
||||
errors = list(errors)
|
||||
best = exceptions.best_match(errors)
|
||||
reversed_best = exceptions.best_match(reversed(errors))
|
||||
msg = "Didn't return a consistent best match!\nGot: {0}\n\nThen: {1}"
|
||||
self.assertEqual(
|
||||
best._contents(), reversed_best._contents(),
|
||||
msg=msg.format(best, reversed_best),
|
||||
)
|
||||
return best
|
||||
|
||||
def test_shallower_errors_are_better_matches(self):
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"minProperties": 2,
|
||||
"properties": {"bar": {"type": "object"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({"foo": {"bar": []}}))
|
||||
self.assertEqual(best.validator, "minProperties")
|
||||
|
||||
def test_oneOf_and_anyOf_are_weak_matches(self):
|
||||
"""
|
||||
A property you *must* match is probably better than one you have to
|
||||
match a part of.
|
||||
"""
|
||||
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"minProperties": 2,
|
||||
"anyOf": [{"type": "string"}, {"type": "number"}],
|
||||
"oneOf": [{"type": "string"}, {"type": "number"}],
|
||||
}
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({}))
|
||||
self.assertEqual(best.validator, "minProperties")
|
||||
|
||||
def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self):
|
||||
"""
|
||||
If the most relevant error is an anyOf, then we traverse its context
|
||||
and select the otherwise *least* relevant error, since in this case
|
||||
that means the most specific, deep, error inside the instance.
|
||||
|
||||
I.e. since only one of the schemas must match, we look for the most
|
||||
relevant one.
|
||||
"""
|
||||
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{"properties": {"bar": {"type": "array"}}},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
|
||||
self.assertEqual(best.validator_value, "array")
|
||||
|
||||
def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self):
|
||||
"""
|
||||
If the most relevant error is an oneOf, then we traverse its context
|
||||
and select the otherwise *least* relevant error, since in this case
|
||||
that means the most specific, deep, error inside the instance.
|
||||
|
||||
I.e. since only one of the schemas must match, we look for the most
|
||||
relevant one.
|
||||
"""
|
||||
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"properties": {"bar": {"type": "array"}}},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
|
||||
self.assertEqual(best.validator_value, "array")
|
||||
|
||||
def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self):
|
||||
"""
|
||||
Now, if the error is allOf, we traverse but select the *most* relevant
|
||||
error from the context, because all schemas here must match anyways.
|
||||
"""
|
||||
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"allOf": [
|
||||
{"type": "string"},
|
||||
{"properties": {"bar": {"type": "array"}}},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
|
||||
self.assertEqual(best.validator_value, "string")
|
||||
|
||||
def test_nested_context_for_oneOf(self):
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"properties": {
|
||||
"bar": {"type": "array"},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
|
||||
self.assertEqual(best.validator_value, "array")
|
||||
|
||||
def test_one_error(self):
|
||||
validator = Draft4Validator({"minProperties": 2})
|
||||
error, = validator.iter_errors({})
|
||||
self.assertEqual(
|
||||
exceptions.best_match(validator.iter_errors({})).validator,
|
||||
"minProperties",
|
||||
)
|
||||
|
||||
def test_no_errors(self):
|
||||
validator = Draft4Validator({})
|
||||
self.assertIsNone(exceptions.best_match(validator.iter_errors({})))
|
||||
|
||||
|
||||
class TestByRelevance(TestCase):
|
||||
def test_short_paths_are_better_matches(self):
|
||||
shallow = exceptions.ValidationError("Oh no!", path=["baz"])
|
||||
deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"])
|
||||
match = max([shallow, deep], key=exceptions.relevance)
|
||||
self.assertIs(match, shallow)
|
||||
|
||||
match = max([deep, shallow], key=exceptions.relevance)
|
||||
self.assertIs(match, shallow)
|
||||
|
||||
def test_global_errors_are_even_better_matches(self):
|
||||
shallow = exceptions.ValidationError("Oh no!", path=[])
|
||||
deep = exceptions.ValidationError("Oh yes!", path=["foo"])
|
||||
|
||||
errors = sorted([shallow, deep], key=exceptions.relevance)
|
||||
self.assertEqual(
|
||||
[list(error.path) for error in errors],
|
||||
[["foo"], []],
|
||||
)
|
||||
|
||||
errors = sorted([deep, shallow], key=exceptions.relevance)
|
||||
self.assertEqual(
|
||||
[list(error.path) for error in errors],
|
||||
[["foo"], []],
|
||||
)
|
||||
|
||||
def test_weak_validators_are_lower_priority(self):
|
||||
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
|
||||
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
|
||||
|
||||
best_match = exceptions.by_relevance(weak="a")
|
||||
|
||||
match = max([weak, normal], key=best_match)
|
||||
self.assertIs(match, normal)
|
||||
|
||||
match = max([normal, weak], key=best_match)
|
||||
self.assertIs(match, normal)
|
||||
|
||||
def test_strong_validators_are_higher_priority(self):
|
||||
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
|
||||
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
|
||||
strong = exceptions.ValidationError("Oh fine!", path=[], validator="c")
|
||||
|
||||
best_match = exceptions.by_relevance(weak="a", strong="c")
|
||||
|
||||
match = max([weak, normal, strong], key=best_match)
|
||||
self.assertIs(match, strong)
|
||||
|
||||
match = max([strong, normal, weak], key=best_match)
|
||||
self.assertIs(match, strong)
|
||||
|
||||
|
||||
class TestErrorTree(TestCase):
|
||||
def test_it_knows_how_many_total_errors_it_contains(self):
|
||||
# FIXME: https://github.com/Julian/jsonschema/issues/442
|
||||
errors = [
|
||||
exceptions.ValidationError("Something", validator=i)
|
||||
for i in range(8)
|
||||
]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertEqual(tree.total_errors, 8)
|
||||
|
||||
def test_it_contains_an_item_if_the_item_had_an_error(self):
|
||||
errors = [exceptions.ValidationError("a message", path=["bar"])]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertIn("bar", tree)
|
||||
|
||||
def test_it_does_not_contain_an_item_if_the_item_had_no_error(self):
|
||||
errors = [exceptions.ValidationError("a message", path=["bar"])]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertNotIn("foo", tree)
|
||||
|
||||
def test_validators_that_failed_appear_in_errors_dict(self):
|
||||
error = exceptions.ValidationError("a message", validator="foo")
|
||||
tree = exceptions.ErrorTree([error])
|
||||
self.assertEqual(tree.errors, {"foo": error})
|
||||
|
||||
def test_it_creates_a_child_tree_for_each_nested_path(self):
|
||||
errors = [
|
||||
exceptions.ValidationError("a bar message", path=["bar"]),
|
||||
exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]),
|
||||
]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertIn(0, tree["bar"])
|
||||
self.assertNotIn(1, tree["bar"])
|
||||
|
||||
def test_children_have_their_errors_dicts_built(self):
|
||||
e1, e2 = (
|
||||
exceptions.ValidationError("1", validator="foo", path=["bar", 0]),
|
||||
exceptions.ValidationError("2", validator="quux", path=["bar", 0]),
|
||||
)
|
||||
tree = exceptions.ErrorTree([e1, e2])
|
||||
self.assertEqual(tree["bar"][0].errors, {"foo": e1, "quux": e2})
|
||||
|
||||
def test_multiple_errors_with_instance(self):
|
||||
e1, e2 = (
|
||||
exceptions.ValidationError(
|
||||
"1",
|
||||
validator="foo",
|
||||
path=["bar", "bar2"],
|
||||
instance="i1"),
|
||||
exceptions.ValidationError(
|
||||
"2",
|
||||
validator="quux",
|
||||
path=["foobar", 2],
|
||||
instance="i2"),
|
||||
)
|
||||
exceptions.ErrorTree([e1, e2])
|
||||
|
||||
def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self):
|
||||
error = exceptions.ValidationError("123", validator="foo", instance=[])
|
||||
tree = exceptions.ErrorTree([error])
|
||||
|
||||
with self.assertRaises(IndexError):
|
||||
tree[0]
|
||||
|
||||
def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self):
|
||||
"""
|
||||
If a validator is dumb (like :validator:`required` in draft 3) and
|
||||
refers to a path that isn't in the instance, the tree still properly
|
||||
returns a subtree for that path.
|
||||
"""
|
||||
|
||||
error = exceptions.ValidationError(
|
||||
"a message", validator="foo", instance={}, path=["foo"],
|
||||
)
|
||||
tree = exceptions.ErrorTree([error])
|
||||
self.assertIsInstance(tree["foo"], exceptions.ErrorTree)
|
||||
|
||||
|
||||
class TestErrorInitReprStr(TestCase):
|
||||
def make_error(self, **kwargs):
|
||||
defaults = dict(
|
||||
message=u"hello",
|
||||
validator=u"type",
|
||||
validator_value=u"string",
|
||||
instance=5,
|
||||
schema={u"type": u"string"},
|
||||
)
|
||||
defaults.update(kwargs)
|
||||
return exceptions.ValidationError(**defaults)
|
||||
|
||||
def assertShows(self, expected, **kwargs):
|
||||
if PY3: # pragma: no cover
|
||||
expected = expected.replace("u'", "'")
|
||||
expected = textwrap.dedent(expected).rstrip("\n")
|
||||
|
||||
error = self.make_error(**kwargs)
|
||||
message_line, _, rest = str(error).partition("\n")
|
||||
self.assertEqual(message_line, error.message)
|
||||
self.assertEqual(rest, expected)
|
||||
|
||||
def test_it_calls_super_and_sets_args(self):
|
||||
error = self.make_error()
|
||||
self.assertGreater(len(error.args), 1)
|
||||
|
||||
def test_repr(self):
|
||||
self.assertEqual(
|
||||
repr(exceptions.ValidationError(message="Hello!")),
|
||||
"<ValidationError: %r>" % "Hello!",
|
||||
)
|
||||
|
||||
def test_unset_error(self):
|
||||
error = exceptions.ValidationError("message")
|
||||
self.assertEqual(str(error), "message")
|
||||
|
||||
kwargs = {
|
||||
"validator": "type",
|
||||
"validator_value": "string",
|
||||
"instance": 5,
|
||||
"schema": {"type": "string"},
|
||||
}
|
||||
# Just the message should show if any of the attributes are unset
|
||||
for attr in kwargs:
|
||||
k = dict(kwargs)
|
||||
del k[attr]
|
||||
error = exceptions.ValidationError("message", **k)
|
||||
self.assertEqual(str(error), "message")
|
||||
|
||||
def test_empty_paths(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating u'type' in schema:
|
||||
{u'type': u'string'}
|
||||
|
||||
On instance:
|
||||
5
|
||||
""",
|
||||
path=[],
|
||||
schema_path=[],
|
||||
)
|
||||
|
||||
def test_one_item_paths(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating u'type' in schema:
|
||||
{u'type': u'string'}
|
||||
|
||||
On instance[0]:
|
||||
5
|
||||
""",
|
||||
path=[0],
|
||||
schema_path=["items"],
|
||||
)
|
||||
|
||||
def test_multiple_item_paths(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating u'type' in schema[u'items'][0]:
|
||||
{u'type': u'string'}
|
||||
|
||||
On instance[0][u'a']:
|
||||
5
|
||||
""",
|
||||
path=[0, u"a"],
|
||||
schema_path=[u"items", 0, 1],
|
||||
)
|
||||
|
||||
def test_uses_pprint(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating u'maxLength' in schema:
|
||||
{0: 0,
|
||||
1: 1,
|
||||
2: 2,
|
||||
3: 3,
|
||||
4: 4,
|
||||
5: 5,
|
||||
6: 6,
|
||||
7: 7,
|
||||
8: 8,
|
||||
9: 9,
|
||||
10: 10,
|
||||
11: 11,
|
||||
12: 12,
|
||||
13: 13,
|
||||
14: 14,
|
||||
15: 15,
|
||||
16: 16,
|
||||
17: 17,
|
||||
18: 18,
|
||||
19: 19}
|
||||
|
||||
On instance:
|
||||
[0,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
10,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
16,
|
||||
17,
|
||||
18,
|
||||
19,
|
||||
20,
|
||||
21,
|
||||
22,
|
||||
23,
|
||||
24]
|
||||
""",
|
||||
instance=list(range(25)),
|
||||
schema=dict(zip(range(20), range(20))),
|
||||
validator=u"maxLength",
|
||||
)
|
||||
|
||||
def test_str_works_with_instances_having_overriden_eq_operator(self):
|
||||
"""
|
||||
Check for https://github.com/Julian/jsonschema/issues/164 which
|
||||
rendered exceptions unusable when a `ValidationError` involved
|
||||
instances with an `__eq__` method that returned truthy values.
|
||||
"""
|
||||
|
||||
class DontEQMeBro(object):
|
||||
def __eq__(this, other): # pragma: no cover
|
||||
self.fail("Don't!")
|
||||
|
||||
def __ne__(this, other): # pragma: no cover
|
||||
self.fail("Don't!")
|
||||
|
||||
instance = DontEQMeBro()
|
||||
error = exceptions.ValidationError(
|
||||
"a message",
|
||||
validator="foo",
|
||||
instance=instance,
|
||||
validator_value="some",
|
||||
schema="schema",
|
||||
)
|
||||
self.assertIn(repr(instance), str(error))
|
||||
|
||||
|
||||
class TestHashable(TestCase):
|
||||
def test_hashable(self):
|
||||
set([exceptions.ValidationError("")])
|
||||
set([exceptions.SchemaError("")])
|
||||
@@ -1,89 +0,0 @@
|
||||
"""
|
||||
Tests for the parts of jsonschema related to the :validator:`format` property.
|
||||
"""
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
from jsonschema import FormatError, ValidationError, FormatChecker
|
||||
from jsonschema.validators import Draft4Validator
|
||||
|
||||
|
||||
BOOM = ValueError("Boom!")
|
||||
BANG = ZeroDivisionError("Bang!")
|
||||
|
||||
|
||||
def boom(thing):
|
||||
if thing == "bang":
|
||||
raise BANG
|
||||
raise BOOM
|
||||
|
||||
|
||||
class TestFormatChecker(TestCase):
|
||||
def test_it_can_validate_no_formats(self):
|
||||
checker = FormatChecker(formats=())
|
||||
self.assertFalse(checker.checkers)
|
||||
|
||||
def test_it_raises_a_key_error_for_unknown_formats(self):
|
||||
with self.assertRaises(KeyError):
|
||||
FormatChecker(formats=["o noes"])
|
||||
|
||||
def test_it_can_register_cls_checkers(self):
|
||||
original = dict(FormatChecker.checkers)
|
||||
self.addCleanup(FormatChecker.checkers.pop, "boom")
|
||||
FormatChecker.cls_checks("boom")(boom)
|
||||
self.assertEqual(
|
||||
FormatChecker.checkers,
|
||||
dict(original, boom=(boom, ())),
|
||||
)
|
||||
|
||||
def test_it_can_register_checkers(self):
|
||||
checker = FormatChecker()
|
||||
checker.checks("boom")(boom)
|
||||
self.assertEqual(
|
||||
checker.checkers,
|
||||
dict(FormatChecker.checkers, boom=(boom, ()))
|
||||
)
|
||||
|
||||
def test_it_catches_registered_errors(self):
|
||||
checker = FormatChecker()
|
||||
checker.checks("boom", raises=type(BOOM))(boom)
|
||||
|
||||
with self.assertRaises(FormatError) as cm:
|
||||
checker.check(instance=12, format="boom")
|
||||
|
||||
self.assertIs(cm.exception.cause, BOOM)
|
||||
self.assertIs(cm.exception.__cause__, BOOM)
|
||||
|
||||
# Unregistered errors should not be caught
|
||||
with self.assertRaises(type(BANG)):
|
||||
checker.check(instance="bang", format="boom")
|
||||
|
||||
def test_format_error_causes_become_validation_error_causes(self):
|
||||
checker = FormatChecker()
|
||||
checker.checks("boom", raises=ValueError)(boom)
|
||||
validator = Draft4Validator({"format": "boom"}, format_checker=checker)
|
||||
|
||||
with self.assertRaises(ValidationError) as cm:
|
||||
validator.validate("BOOM")
|
||||
|
||||
self.assertIs(cm.exception.cause, BOOM)
|
||||
self.assertIs(cm.exception.__cause__, BOOM)
|
||||
|
||||
def test_format_checkers_come_with_defaults(self):
|
||||
# This is bad :/ but relied upon.
|
||||
# The docs for quite awhile recommended people do things like
|
||||
# validate(..., format_checker=FormatChecker())
|
||||
# We should change that, but we can't without deprecation...
|
||||
checker = FormatChecker()
|
||||
with self.assertRaises(FormatError):
|
||||
checker.check(instance="not-an-ipv4", format="ipv4")
|
||||
|
||||
def test_repr(self):
|
||||
checker = FormatChecker(formats=())
|
||||
checker.checks("foo")(lambda thing: True)
|
||||
checker.checks("bar")(lambda thing: True)
|
||||
checker.checks("baz")(lambda thing: True)
|
||||
self.assertEqual(
|
||||
repr(checker),
|
||||
"<FormatChecker checkers=['bar', 'baz', 'foo']>",
|
||||
)
|
||||
@@ -1,277 +0,0 @@
|
||||
"""
|
||||
Test runner for the JSON Schema official test suite
|
||||
|
||||
Tests comprehensive correctness of each draft's validator.
|
||||
|
||||
See https://github.com/json-schema-org/JSON-Schema-Test-Suite for details.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from jsonschema import (
|
||||
Draft3Validator,
|
||||
Draft4Validator,
|
||||
Draft6Validator,
|
||||
Draft7Validator,
|
||||
draft3_format_checker,
|
||||
draft4_format_checker,
|
||||
draft6_format_checker,
|
||||
draft7_format_checker,
|
||||
)
|
||||
from jsonschema.tests._helpers import bug
|
||||
from jsonschema.tests._suite import Suite
|
||||
from jsonschema.validators import _DEPRECATED_DEFAULT_TYPES, create
|
||||
|
||||
|
||||
SUITE = Suite()
|
||||
DRAFT3 = SUITE.version(name="draft3")
|
||||
DRAFT4 = SUITE.version(name="draft4")
|
||||
DRAFT6 = SUITE.version(name="draft6")
|
||||
DRAFT7 = SUITE.version(name="draft7")
|
||||
|
||||
|
||||
def skip(message, **kwargs):
|
||||
def skipper(test):
|
||||
if all(value == getattr(test, attr) for attr, value in kwargs.items()):
|
||||
return message
|
||||
return skipper
|
||||
|
||||
|
||||
def missing_format(checker):
|
||||
def missing_format(test):
|
||||
schema = test.schema
|
||||
if schema is True or schema is False or "format" not in schema:
|
||||
return
|
||||
|
||||
if schema["format"] not in checker.checkers:
|
||||
return "Format checker {0!r} not found.".format(schema["format"])
|
||||
return missing_format
|
||||
|
||||
|
||||
is_narrow_build = sys.maxunicode == 2 ** 16 - 1
|
||||
if is_narrow_build: # pragma: no cover
|
||||
message = "Not running surrogate Unicode case, this Python is narrow."
|
||||
|
||||
def narrow_unicode_build(test): # pragma: no cover
|
||||
return skip(
|
||||
message=message,
|
||||
description="one supplementary Unicode code point is not long enough",
|
||||
)(test) or skip(
|
||||
message=message,
|
||||
description="two supplementary Unicode code points is long enough",
|
||||
)(test)
|
||||
else:
|
||||
def narrow_unicode_build(test): # pragma: no cover
|
||||
return
|
||||
|
||||
|
||||
TestDraft3 = DRAFT3.to_unittest_testcase(
|
||||
DRAFT3.tests(),
|
||||
DRAFT3.optional_tests_of(name="bignum"),
|
||||
DRAFT3.optional_tests_of(name="format"),
|
||||
DRAFT3.optional_tests_of(name="zeroTerminatedFloats"),
|
||||
Validator=Draft3Validator,
|
||||
format_checker=draft3_format_checker,
|
||||
skip=lambda test: (
|
||||
narrow_unicode_build(test)
|
||||
or missing_format(draft3_format_checker)(test)
|
||||
or skip(
|
||||
message="Upstream bug in strict_rfc3339",
|
||||
subject="format",
|
||||
description="case-insensitive T and Z",
|
||||
)(test)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
TestDraft4 = DRAFT4.to_unittest_testcase(
|
||||
DRAFT4.tests(),
|
||||
DRAFT4.optional_tests_of(name="bignum"),
|
||||
DRAFT4.optional_tests_of(name="format"),
|
||||
DRAFT4.optional_tests_of(name="zeroTerminatedFloats"),
|
||||
Validator=Draft4Validator,
|
||||
format_checker=draft4_format_checker,
|
||||
skip=lambda test: (
|
||||
narrow_unicode_build(test)
|
||||
or missing_format(draft4_format_checker)(test)
|
||||
or skip(
|
||||
message=bug(),
|
||||
subject="ref",
|
||||
case_description="Recursive references between schemas",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description="Location-independent identifier",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description=(
|
||||
"Location-independent identifier with absolute URI"
|
||||
),
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description=(
|
||||
"Location-independent identifier with base URI change in subschema"
|
||||
),
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(),
|
||||
subject="refRemote",
|
||||
case_description="base URI change - change folder in subschema",
|
||||
)(test)
|
||||
or skip(
|
||||
message="Upstream bug in strict_rfc3339",
|
||||
subject="format",
|
||||
description="case-insensitive T and Z",
|
||||
)(test)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
TestDraft6 = DRAFT6.to_unittest_testcase(
|
||||
DRAFT6.tests(),
|
||||
DRAFT6.optional_tests_of(name="bignum"),
|
||||
DRAFT6.optional_tests_of(name="format"),
|
||||
DRAFT6.optional_tests_of(name="zeroTerminatedFloats"),
|
||||
Validator=Draft6Validator,
|
||||
format_checker=draft6_format_checker,
|
||||
skip=lambda test: (
|
||||
narrow_unicode_build(test)
|
||||
or missing_format(draft6_format_checker)(test)
|
||||
or skip(
|
||||
message=bug(),
|
||||
subject="ref",
|
||||
case_description="Recursive references between schemas",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description="Location-independent identifier",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description=(
|
||||
"Location-independent identifier with absolute URI"
|
||||
),
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description=(
|
||||
"Location-independent identifier with base URI change in subschema"
|
||||
),
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(),
|
||||
subject="refRemote",
|
||||
case_description="base URI change - change folder in subschema",
|
||||
)(test)
|
||||
or skip(
|
||||
message="Upstream bug in strict_rfc3339",
|
||||
subject="format",
|
||||
description="case-insensitive T and Z",
|
||||
)(test)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
TestDraft7 = DRAFT7.to_unittest_testcase(
|
||||
DRAFT7.tests(),
|
||||
DRAFT7.format_tests(),
|
||||
DRAFT7.optional_tests_of(name="bignum"),
|
||||
DRAFT7.optional_tests_of(name="content"),
|
||||
DRAFT7.optional_tests_of(name="zeroTerminatedFloats"),
|
||||
Validator=Draft7Validator,
|
||||
format_checker=draft7_format_checker,
|
||||
skip=lambda test: (
|
||||
narrow_unicode_build(test)
|
||||
or missing_format(draft7_format_checker)(test)
|
||||
or skip(
|
||||
message=bug(),
|
||||
subject="ref",
|
||||
case_description="Recursive references between schemas",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description="Location-independent identifier",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description=(
|
||||
"Location-independent identifier with absolute URI"
|
||||
),
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description=(
|
||||
"Location-independent identifier with base URI change in subschema"
|
||||
),
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(),
|
||||
subject="refRemote",
|
||||
case_description="base URI change - change folder in subschema",
|
||||
)(test)
|
||||
or skip(
|
||||
message="Upstream bug in strict_rfc3339",
|
||||
subject="date-time",
|
||||
description="case-insensitive T and Z",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(593),
|
||||
subject="content",
|
||||
case_description=(
|
||||
"validation of string-encoded content based on media type"
|
||||
),
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(593),
|
||||
subject="content",
|
||||
case_description="validation of binary string-encoding",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(593),
|
||||
subject="content",
|
||||
case_description=(
|
||||
"validation of binary-encoded media type documents"
|
||||
),
|
||||
)(test)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
|
||||
TestDraft3LegacyTypeCheck = DRAFT3.to_unittest_testcase(
|
||||
# Interestingly the any part couldn't really be done w/the old API.
|
||||
(
|
||||
(test for test in each if test.schema != {"type": "any"})
|
||||
for each in DRAFT3.tests_of(name="type")
|
||||
),
|
||||
name="TestDraft3LegacyTypeCheck",
|
||||
Validator=create(
|
||||
meta_schema=Draft3Validator.META_SCHEMA,
|
||||
validators=Draft3Validator.VALIDATORS,
|
||||
default_types=_DEPRECATED_DEFAULT_TYPES,
|
||||
),
|
||||
)
|
||||
|
||||
TestDraft4LegacyTypeCheck = DRAFT4.to_unittest_testcase(
|
||||
DRAFT4.tests_of(name="type"),
|
||||
name="TestDraft4LegacyTypeCheck",
|
||||
Validator=create(
|
||||
meta_schema=Draft4Validator.META_SCHEMA,
|
||||
validators=Draft4Validator.VALIDATORS,
|
||||
default_types=_DEPRECATED_DEFAULT_TYPES,
|
||||
),
|
||||
)
|
||||
@@ -1,190 +0,0 @@
|
||||
"""
|
||||
Tests on the new type interface. The actual correctness of the type checking
|
||||
is handled in test_jsonschema_test_suite; these tests check that TypeChecker
|
||||
functions correctly and can facilitate extensions to type checking
|
||||
"""
|
||||
from collections import namedtuple
|
||||
from unittest import TestCase
|
||||
|
||||
from jsonschema import ValidationError, _validators
|
||||
from jsonschema._types import TypeChecker
|
||||
from jsonschema.exceptions import UndefinedTypeCheck
|
||||
from jsonschema.validators import Draft4Validator, extend
|
||||
|
||||
|
||||
def equals_2(checker, instance):
|
||||
return instance == 2
|
||||
|
||||
|
||||
def is_namedtuple(instance):
|
||||
return isinstance(instance, tuple) and getattr(instance, "_fields", None)
|
||||
|
||||
|
||||
def is_object_or_named_tuple(checker, instance):
|
||||
if Draft4Validator.TYPE_CHECKER.is_type(instance, "object"):
|
||||
return True
|
||||
return is_namedtuple(instance)
|
||||
|
||||
|
||||
def coerce_named_tuple(fn):
|
||||
def coerced(validator, value, instance, schema):
|
||||
if is_namedtuple(instance):
|
||||
instance = instance._asdict()
|
||||
return fn(validator, value, instance, schema)
|
||||
return coerced
|
||||
|
||||
|
||||
required = coerce_named_tuple(_validators.required)
|
||||
properties = coerce_named_tuple(_validators.properties)
|
||||
|
||||
|
||||
class TestTypeChecker(TestCase):
|
||||
def test_is_type(self):
|
||||
checker = TypeChecker({"two": equals_2})
|
||||
self.assertEqual(
|
||||
(
|
||||
checker.is_type(instance=2, type="two"),
|
||||
checker.is_type(instance="bar", type="two"),
|
||||
),
|
||||
(True, False),
|
||||
)
|
||||
|
||||
def test_is_unknown_type(self):
|
||||
with self.assertRaises(UndefinedTypeCheck) as context:
|
||||
TypeChecker().is_type(4, "foobar")
|
||||
self.assertIn("foobar", str(context.exception))
|
||||
|
||||
def test_checks_can_be_added_at_init(self):
|
||||
checker = TypeChecker({"two": equals_2})
|
||||
self.assertEqual(checker, TypeChecker().redefine("two", equals_2))
|
||||
|
||||
def test_redefine_existing_type(self):
|
||||
self.assertEqual(
|
||||
TypeChecker().redefine("two", object()).redefine("two", equals_2),
|
||||
TypeChecker().redefine("two", equals_2),
|
||||
)
|
||||
|
||||
def test_remove(self):
|
||||
self.assertEqual(
|
||||
TypeChecker({"two": equals_2}).remove("two"),
|
||||
TypeChecker(),
|
||||
)
|
||||
|
||||
def test_remove_unknown_type(self):
|
||||
with self.assertRaises(UndefinedTypeCheck) as context:
|
||||
TypeChecker().remove("foobar")
|
||||
self.assertIn("foobar", str(context.exception))
|
||||
|
||||
def test_redefine_many(self):
|
||||
self.assertEqual(
|
||||
TypeChecker().redefine_many({"foo": int, "bar": str}),
|
||||
TypeChecker().redefine("foo", int).redefine("bar", str),
|
||||
)
|
||||
|
||||
def test_remove_multiple(self):
|
||||
self.assertEqual(
|
||||
TypeChecker({"foo": int, "bar": str}).remove("foo", "bar"),
|
||||
TypeChecker(),
|
||||
)
|
||||
|
||||
def test_type_check_can_raise_key_error(self):
|
||||
"""
|
||||
Make sure no one writes:
|
||||
|
||||
try:
|
||||
self._type_checkers[type](...)
|
||||
except KeyError:
|
||||
|
||||
ignoring the fact that the function itself can raise that.
|
||||
"""
|
||||
|
||||
error = KeyError("Stuff")
|
||||
|
||||
def raises_keyerror(checker, instance):
|
||||
raise error
|
||||
|
||||
with self.assertRaises(KeyError) as context:
|
||||
TypeChecker({"foo": raises_keyerror}).is_type(4, "foo")
|
||||
|
||||
self.assertIs(context.exception, error)
|
||||
|
||||
|
||||
class TestCustomTypes(TestCase):
|
||||
def test_simple_type_can_be_extended(self):
|
||||
def int_or_str_int(checker, instance):
|
||||
if not isinstance(instance, (int, str)):
|
||||
return False
|
||||
try:
|
||||
int(instance)
|
||||
except ValueError:
|
||||
return False
|
||||
return True
|
||||
|
||||
CustomValidator = extend(
|
||||
Draft4Validator,
|
||||
type_checker=Draft4Validator.TYPE_CHECKER.redefine(
|
||||
"integer", int_or_str_int,
|
||||
),
|
||||
)
|
||||
validator = CustomValidator({"type": "integer"})
|
||||
|
||||
validator.validate(4)
|
||||
validator.validate("4")
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
validator.validate(4.4)
|
||||
|
||||
def test_object_can_be_extended(self):
|
||||
schema = {"type": "object"}
|
||||
|
||||
Point = namedtuple("Point", ["x", "y"])
|
||||
|
||||
type_checker = Draft4Validator.TYPE_CHECKER.redefine(
|
||||
u"object", is_object_or_named_tuple,
|
||||
)
|
||||
|
||||
CustomValidator = extend(Draft4Validator, type_checker=type_checker)
|
||||
validator = CustomValidator(schema)
|
||||
|
||||
validator.validate(Point(x=4, y=5))
|
||||
|
||||
def test_object_extensions_require_custom_validators(self):
|
||||
schema = {"type": "object", "required": ["x"]}
|
||||
|
||||
type_checker = Draft4Validator.TYPE_CHECKER.redefine(
|
||||
u"object", is_object_or_named_tuple,
|
||||
)
|
||||
|
||||
CustomValidator = extend(Draft4Validator, type_checker=type_checker)
|
||||
validator = CustomValidator(schema)
|
||||
|
||||
Point = namedtuple("Point", ["x", "y"])
|
||||
# Cannot handle required
|
||||
with self.assertRaises(ValidationError):
|
||||
validator.validate(Point(x=4, y=5))
|
||||
|
||||
def test_object_extensions_can_handle_custom_validators(self):
|
||||
schema = {
|
||||
"type": "object",
|
||||
"required": ["x"],
|
||||
"properties": {"x": {"type": "integer"}},
|
||||
}
|
||||
|
||||
type_checker = Draft4Validator.TYPE_CHECKER.redefine(
|
||||
u"object", is_object_or_named_tuple,
|
||||
)
|
||||
|
||||
CustomValidator = extend(
|
||||
Draft4Validator,
|
||||
type_checker=type_checker,
|
||||
validators={"required": required, "properties": properties},
|
||||
)
|
||||
|
||||
validator = CustomValidator(schema)
|
||||
|
||||
Point = namedtuple("Point", ["x", "y"])
|
||||
# Can now process required and properties
|
||||
validator.validate(Point(x=4, y=5))
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
validator.validate(Point(x="not an integer", y=5))
|
||||
File diff suppressed because it is too large
Load Diff
1
lib/spack/external/_vendoring/macholib.pyi
vendored
1
lib/spack/external/_vendoring/macholib.pyi
vendored
@@ -1 +0,0 @@
|
||||
from macholib import *
|
||||
21
lib/spack/external/_vendoring/macholib/LICENSE
vendored
21
lib/spack/external/_vendoring/macholib/LICENSE
vendored
@@ -1,21 +0,0 @@
|
||||
Copyright 2006-2010 - Bob Ippolito
|
||||
Copyright 2010-2020 - Ronald Oussoren
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject
|
||||
to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
288
lib/spack/external/_vendoring/markupsafe/__init__.py
vendored
288
lib/spack/external/_vendoring/markupsafe/__init__.py
vendored
@@ -1,288 +0,0 @@
|
||||
import functools
|
||||
import re
|
||||
import string
|
||||
import typing as t
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
|
||||
class HasHTML(te.Protocol):
|
||||
def __html__(self) -> str:
|
||||
pass
|
||||
|
||||
|
||||
__version__ = "2.0.1"
|
||||
|
||||
_striptags_re = re.compile(r"(<!--.*?-->|<[^>]*>)")
|
||||
|
||||
|
||||
def _simple_escaping_wrapper(name: str) -> t.Callable[..., "Markup"]:
|
||||
orig = getattr(str, name)
|
||||
|
||||
@functools.wraps(orig)
|
||||
def wrapped(self: "Markup", *args: t.Any, **kwargs: t.Any) -> "Markup":
|
||||
args = _escape_argspec(list(args), enumerate(args), self.escape) # type: ignore
|
||||
_escape_argspec(kwargs, kwargs.items(), self.escape)
|
||||
return self.__class__(orig(self, *args, **kwargs))
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
class Markup(str):
|
||||
"""A string that is ready to be safely inserted into an HTML or XML
|
||||
document, either because it was escaped or because it was marked
|
||||
safe.
|
||||
|
||||
Passing an object to the constructor converts it to text and wraps
|
||||
it to mark it safe without escaping. To escape the text, use the
|
||||
:meth:`escape` class method instead.
|
||||
|
||||
>>> Markup("Hello, <em>World</em>!")
|
||||
Markup('Hello, <em>World</em>!')
|
||||
>>> Markup(42)
|
||||
Markup('42')
|
||||
>>> Markup.escape("Hello, <em>World</em>!")
|
||||
Markup('Hello <em>World</em>!')
|
||||
|
||||
This implements the ``__html__()`` interface that some frameworks
|
||||
use. Passing an object that implements ``__html__()`` will wrap the
|
||||
output of that method, marking it safe.
|
||||
|
||||
>>> class Foo:
|
||||
... def __html__(self):
|
||||
... return '<a href="/foo">foo</a>'
|
||||
...
|
||||
>>> Markup(Foo())
|
||||
Markup('<a href="/foo">foo</a>')
|
||||
|
||||
This is a subclass of :class:`str`. It has the same methods, but
|
||||
escapes their arguments and returns a ``Markup`` instance.
|
||||
|
||||
>>> Markup("<em>%s</em>") % ("foo & bar",)
|
||||
Markup('<em>foo & bar</em>')
|
||||
>>> Markup("<em>Hello</em> ") + "<foo>"
|
||||
Markup('<em>Hello</em> <foo>')
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(
|
||||
cls, base: t.Any = "", encoding: t.Optional[str] = None, errors: str = "strict"
|
||||
) -> "Markup":
|
||||
if hasattr(base, "__html__"):
|
||||
base = base.__html__()
|
||||
|
||||
if encoding is None:
|
||||
return super().__new__(cls, base)
|
||||
|
||||
return super().__new__(cls, base, encoding, errors)
|
||||
|
||||
def __html__(self) -> "Markup":
|
||||
return self
|
||||
|
||||
def __add__(self, other: t.Union[str, "HasHTML"]) -> "Markup":
|
||||
if isinstance(other, str) or hasattr(other, "__html__"):
|
||||
return self.__class__(super().__add__(self.escape(other)))
|
||||
|
||||
return NotImplemented
|
||||
|
||||
def __radd__(self, other: t.Union[str, "HasHTML"]) -> "Markup":
|
||||
if isinstance(other, str) or hasattr(other, "__html__"):
|
||||
return self.escape(other).__add__(self)
|
||||
|
||||
return NotImplemented
|
||||
|
||||
def __mul__(self, num: int) -> "Markup":
|
||||
if isinstance(num, int):
|
||||
return self.__class__(super().__mul__(num))
|
||||
|
||||
return NotImplemented # type: ignore
|
||||
|
||||
__rmul__ = __mul__
|
||||
|
||||
def __mod__(self, arg: t.Any) -> "Markup":
|
||||
if isinstance(arg, tuple):
|
||||
arg = tuple(_MarkupEscapeHelper(x, self.escape) for x in arg)
|
||||
else:
|
||||
arg = _MarkupEscapeHelper(arg, self.escape)
|
||||
|
||||
return self.__class__(super().__mod__(arg))
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}({super().__repr__()})"
|
||||
|
||||
def join(self, seq: t.Iterable[t.Union[str, "HasHTML"]]) -> "Markup":
|
||||
return self.__class__(super().join(map(self.escape, seq)))
|
||||
|
||||
join.__doc__ = str.join.__doc__
|
||||
|
||||
def split( # type: ignore
|
||||
self, sep: t.Optional[str] = None, maxsplit: int = -1
|
||||
) -> t.List["Markup"]:
|
||||
return [self.__class__(v) for v in super().split(sep, maxsplit)]
|
||||
|
||||
split.__doc__ = str.split.__doc__
|
||||
|
||||
def rsplit( # type: ignore
|
||||
self, sep: t.Optional[str] = None, maxsplit: int = -1
|
||||
) -> t.List["Markup"]:
|
||||
return [self.__class__(v) for v in super().rsplit(sep, maxsplit)]
|
||||
|
||||
rsplit.__doc__ = str.rsplit.__doc__
|
||||
|
||||
def splitlines(self, keepends: bool = False) -> t.List["Markup"]: # type: ignore
|
||||
return [self.__class__(v) for v in super().splitlines(keepends)]
|
||||
|
||||
splitlines.__doc__ = str.splitlines.__doc__
|
||||
|
||||
def unescape(self) -> str:
|
||||
"""Convert escaped markup back into a text string. This replaces
|
||||
HTML entities with the characters they represent.
|
||||
|
||||
>>> Markup("Main » <em>About</em>").unescape()
|
||||
'Main » <em>About</em>'
|
||||
"""
|
||||
from html import unescape
|
||||
|
||||
return unescape(str(self))
|
||||
|
||||
def striptags(self) -> str:
|
||||
""":meth:`unescape` the markup, remove tags, and normalize
|
||||
whitespace to single spaces.
|
||||
|
||||
>>> Markup("Main »\t<em>About</em>").striptags()
|
||||
'Main » About'
|
||||
"""
|
||||
stripped = " ".join(_striptags_re.sub("", self).split())
|
||||
return Markup(stripped).unescape()
|
||||
|
||||
@classmethod
|
||||
def escape(cls, s: t.Any) -> "Markup":
|
||||
"""Escape a string. Calls :func:`escape` and ensures that for
|
||||
subclasses the correct type is returned.
|
||||
"""
|
||||
rv = escape(s)
|
||||
|
||||
if rv.__class__ is not cls:
|
||||
return cls(rv)
|
||||
|
||||
return rv
|
||||
|
||||
for method in (
|
||||
"__getitem__",
|
||||
"capitalize",
|
||||
"title",
|
||||
"lower",
|
||||
"upper",
|
||||
"replace",
|
||||
"ljust",
|
||||
"rjust",
|
||||
"lstrip",
|
||||
"rstrip",
|
||||
"center",
|
||||
"strip",
|
||||
"translate",
|
||||
"expandtabs",
|
||||
"swapcase",
|
||||
"zfill",
|
||||
):
|
||||
locals()[method] = _simple_escaping_wrapper(method)
|
||||
|
||||
del method
|
||||
|
||||
def partition(self, sep: str) -> t.Tuple["Markup", "Markup", "Markup"]:
|
||||
l, s, r = super().partition(self.escape(sep))
|
||||
cls = self.__class__
|
||||
return cls(l), cls(s), cls(r)
|
||||
|
||||
def rpartition(self, sep: str) -> t.Tuple["Markup", "Markup", "Markup"]:
|
||||
l, s, r = super().rpartition(self.escape(sep))
|
||||
cls = self.__class__
|
||||
return cls(l), cls(s), cls(r)
|
||||
|
||||
def format(self, *args: t.Any, **kwargs: t.Any) -> "Markup":
|
||||
formatter = EscapeFormatter(self.escape)
|
||||
return self.__class__(formatter.vformat(self, args, kwargs))
|
||||
|
||||
def __html_format__(self, format_spec: str) -> "Markup":
|
||||
if format_spec:
|
||||
raise ValueError("Unsupported format specification for Markup.")
|
||||
|
||||
return self
|
||||
|
||||
|
||||
class EscapeFormatter(string.Formatter):
|
||||
__slots__ = ("escape",)
|
||||
|
||||
def __init__(self, escape: t.Callable[[t.Any], Markup]) -> None:
|
||||
self.escape = escape
|
||||
super().__init__()
|
||||
|
||||
def format_field(self, value: t.Any, format_spec: str) -> str:
|
||||
if hasattr(value, "__html_format__"):
|
||||
rv = value.__html_format__(format_spec)
|
||||
elif hasattr(value, "__html__"):
|
||||
if format_spec:
|
||||
raise ValueError(
|
||||
f"Format specifier {format_spec} given, but {type(value)} does not"
|
||||
" define __html_format__. A class that defines __html__ must define"
|
||||
" __html_format__ to work with format specifiers."
|
||||
)
|
||||
rv = value.__html__()
|
||||
else:
|
||||
# We need to make sure the format spec is str here as
|
||||
# otherwise the wrong callback methods are invoked.
|
||||
rv = string.Formatter.format_field(self, value, str(format_spec))
|
||||
return str(self.escape(rv))
|
||||
|
||||
|
||||
_ListOrDict = t.TypeVar("_ListOrDict", list, dict)
|
||||
|
||||
|
||||
def _escape_argspec(
|
||||
obj: _ListOrDict, iterable: t.Iterable[t.Any], escape: t.Callable[[t.Any], Markup]
|
||||
) -> _ListOrDict:
|
||||
"""Helper for various string-wrapped functions."""
|
||||
for key, value in iterable:
|
||||
if isinstance(value, str) or hasattr(value, "__html__"):
|
||||
obj[key] = escape(value)
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
class _MarkupEscapeHelper:
|
||||
"""Helper for :meth:`Markup.__mod__`."""
|
||||
|
||||
__slots__ = ("obj", "escape")
|
||||
|
||||
def __init__(self, obj: t.Any, escape: t.Callable[[t.Any], Markup]) -> None:
|
||||
self.obj = obj
|
||||
self.escape = escape
|
||||
|
||||
def __getitem__(self, item: t.Any) -> "_MarkupEscapeHelper":
|
||||
return _MarkupEscapeHelper(self.obj[item], self.escape)
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.escape(self.obj))
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return str(self.escape(repr(self.obj)))
|
||||
|
||||
def __int__(self) -> int:
|
||||
return int(self.obj)
|
||||
|
||||
def __float__(self) -> float:
|
||||
return float(self.obj)
|
||||
|
||||
|
||||
# circular import
|
||||
try:
|
||||
from ._speedups import escape as escape
|
||||
from ._speedups import escape_silent as escape_silent
|
||||
from ._speedups import soft_str as soft_str
|
||||
from ._speedups import soft_unicode
|
||||
except ImportError:
|
||||
from ._native import escape as escape
|
||||
from ._native import escape_silent as escape_silent # noqa: F401
|
||||
from ._native import soft_str as soft_str # noqa: F401
|
||||
from ._native import soft_unicode # noqa: F401
|
||||
339
lib/spack/external/_vendoring/markupsafe/_speedups.c
vendored
339
lib/spack/external/_vendoring/markupsafe/_speedups.c
vendored
@@ -1,339 +0,0 @@
|
||||
#include <Python.h>
|
||||
|
||||
static PyObject* markup;
|
||||
|
||||
static int
|
||||
init_constants(void)
|
||||
{
|
||||
PyObject *module;
|
||||
|
||||
/* import markup type so that we can mark the return value */
|
||||
module = PyImport_ImportModule("markupsafe");
|
||||
if (!module)
|
||||
return 0;
|
||||
markup = PyObject_GetAttrString(module, "Markup");
|
||||
Py_DECREF(module);
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
#define GET_DELTA(inp, inp_end, delta) \
|
||||
while (inp < inp_end) { \
|
||||
switch (*inp++) { \
|
||||
case '"': \
|
||||
case '\'': \
|
||||
case '&': \
|
||||
delta += 4; \
|
||||
break; \
|
||||
case '<': \
|
||||
case '>': \
|
||||
delta += 3; \
|
||||
break; \
|
||||
} \
|
||||
}
|
||||
|
||||
#define DO_ESCAPE(inp, inp_end, outp) \
|
||||
{ \
|
||||
Py_ssize_t ncopy = 0; \
|
||||
while (inp < inp_end) { \
|
||||
switch (*inp) { \
|
||||
case '"': \
|
||||
memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
|
||||
outp += ncopy; ncopy = 0; \
|
||||
*outp++ = '&'; \
|
||||
*outp++ = '#'; \
|
||||
*outp++ = '3'; \
|
||||
*outp++ = '4'; \
|
||||
*outp++ = ';'; \
|
||||
break; \
|
||||
case '\'': \
|
||||
memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
|
||||
outp += ncopy; ncopy = 0; \
|
||||
*outp++ = '&'; \
|
||||
*outp++ = '#'; \
|
||||
*outp++ = '3'; \
|
||||
*outp++ = '9'; \
|
||||
*outp++ = ';'; \
|
||||
break; \
|
||||
case '&': \
|
||||
memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
|
||||
outp += ncopy; ncopy = 0; \
|
||||
*outp++ = '&'; \
|
||||
*outp++ = 'a'; \
|
||||
*outp++ = 'm'; \
|
||||
*outp++ = 'p'; \
|
||||
*outp++ = ';'; \
|
||||
break; \
|
||||
case '<': \
|
||||
memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
|
||||
outp += ncopy; ncopy = 0; \
|
||||
*outp++ = '&'; \
|
||||
*outp++ = 'l'; \
|
||||
*outp++ = 't'; \
|
||||
*outp++ = ';'; \
|
||||
break; \
|
||||
case '>': \
|
||||
memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
|
||||
outp += ncopy; ncopy = 0; \
|
||||
*outp++ = '&'; \
|
||||
*outp++ = 'g'; \
|
||||
*outp++ = 't'; \
|
||||
*outp++ = ';'; \
|
||||
break; \
|
||||
default: \
|
||||
ncopy++; \
|
||||
} \
|
||||
inp++; \
|
||||
} \
|
||||
memcpy(outp, inp-ncopy, sizeof(*outp)*ncopy); \
|
||||
}
|
||||
|
||||
static PyObject*
|
||||
escape_unicode_kind1(PyUnicodeObject *in)
|
||||
{
|
||||
Py_UCS1 *inp = PyUnicode_1BYTE_DATA(in);
|
||||
Py_UCS1 *inp_end = inp + PyUnicode_GET_LENGTH(in);
|
||||
Py_UCS1 *outp;
|
||||
PyObject *out;
|
||||
Py_ssize_t delta = 0;
|
||||
|
||||
GET_DELTA(inp, inp_end, delta);
|
||||
if (!delta) {
|
||||
Py_INCREF(in);
|
||||
return (PyObject*)in;
|
||||
}
|
||||
|
||||
out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta,
|
||||
PyUnicode_IS_ASCII(in) ? 127 : 255);
|
||||
if (!out)
|
||||
return NULL;
|
||||
|
||||
inp = PyUnicode_1BYTE_DATA(in);
|
||||
outp = PyUnicode_1BYTE_DATA(out);
|
||||
DO_ESCAPE(inp, inp_end, outp);
|
||||
return out;
|
||||
}
|
||||
|
||||
static PyObject*
|
||||
escape_unicode_kind2(PyUnicodeObject *in)
|
||||
{
|
||||
Py_UCS2 *inp = PyUnicode_2BYTE_DATA(in);
|
||||
Py_UCS2 *inp_end = inp + PyUnicode_GET_LENGTH(in);
|
||||
Py_UCS2 *outp;
|
||||
PyObject *out;
|
||||
Py_ssize_t delta = 0;
|
||||
|
||||
GET_DELTA(inp, inp_end, delta);
|
||||
if (!delta) {
|
||||
Py_INCREF(in);
|
||||
return (PyObject*)in;
|
||||
}
|
||||
|
||||
out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 65535);
|
||||
if (!out)
|
||||
return NULL;
|
||||
|
||||
inp = PyUnicode_2BYTE_DATA(in);
|
||||
outp = PyUnicode_2BYTE_DATA(out);
|
||||
DO_ESCAPE(inp, inp_end, outp);
|
||||
return out;
|
||||
}
|
||||
|
||||
|
||||
static PyObject*
|
||||
escape_unicode_kind4(PyUnicodeObject *in)
|
||||
{
|
||||
Py_UCS4 *inp = PyUnicode_4BYTE_DATA(in);
|
||||
Py_UCS4 *inp_end = inp + PyUnicode_GET_LENGTH(in);
|
||||
Py_UCS4 *outp;
|
||||
PyObject *out;
|
||||
Py_ssize_t delta = 0;
|
||||
|
||||
GET_DELTA(inp, inp_end, delta);
|
||||
if (!delta) {
|
||||
Py_INCREF(in);
|
||||
return (PyObject*)in;
|
||||
}
|
||||
|
||||
out = PyUnicode_New(PyUnicode_GET_LENGTH(in) + delta, 1114111);
|
||||
if (!out)
|
||||
return NULL;
|
||||
|
||||
inp = PyUnicode_4BYTE_DATA(in);
|
||||
outp = PyUnicode_4BYTE_DATA(out);
|
||||
DO_ESCAPE(inp, inp_end, outp);
|
||||
return out;
|
||||
}
|
||||
|
||||
static PyObject*
|
||||
escape_unicode(PyUnicodeObject *in)
|
||||
{
|
||||
if (PyUnicode_READY(in))
|
||||
return NULL;
|
||||
|
||||
switch (PyUnicode_KIND(in)) {
|
||||
case PyUnicode_1BYTE_KIND:
|
||||
return escape_unicode_kind1(in);
|
||||
case PyUnicode_2BYTE_KIND:
|
||||
return escape_unicode_kind2(in);
|
||||
case PyUnicode_4BYTE_KIND:
|
||||
return escape_unicode_kind4(in);
|
||||
}
|
||||
assert(0); /* shouldn't happen */
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static PyObject*
|
||||
escape(PyObject *self, PyObject *text)
|
||||
{
|
||||
static PyObject *id_html;
|
||||
PyObject *s = NULL, *rv = NULL, *html;
|
||||
|
||||
if (id_html == NULL) {
|
||||
id_html = PyUnicode_InternFromString("__html__");
|
||||
if (id_html == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
/* we don't have to escape integers, bools or floats */
|
||||
if (PyLong_CheckExact(text) ||
|
||||
PyFloat_CheckExact(text) || PyBool_Check(text) ||
|
||||
text == Py_None)
|
||||
return PyObject_CallFunctionObjArgs(markup, text, NULL);
|
||||
|
||||
/* if the object has an __html__ method that performs the escaping */
|
||||
html = PyObject_GetAttr(text ,id_html);
|
||||
if (html) {
|
||||
s = PyObject_CallObject(html, NULL);
|
||||
Py_DECREF(html);
|
||||
if (s == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
/* Convert to Markup object */
|
||||
rv = PyObject_CallFunctionObjArgs(markup, (PyObject*)s, NULL);
|
||||
Py_DECREF(s);
|
||||
return rv;
|
||||
}
|
||||
|
||||
/* otherwise make the object unicode if it isn't, then escape */
|
||||
PyErr_Clear();
|
||||
if (!PyUnicode_Check(text)) {
|
||||
PyObject *unicode = PyObject_Str(text);
|
||||
if (!unicode)
|
||||
return NULL;
|
||||
s = escape_unicode((PyUnicodeObject*)unicode);
|
||||
Py_DECREF(unicode);
|
||||
}
|
||||
else
|
||||
s = escape_unicode((PyUnicodeObject*)text);
|
||||
|
||||
/* convert the unicode string into a markup object. */
|
||||
rv = PyObject_CallFunctionObjArgs(markup, (PyObject*)s, NULL);
|
||||
Py_DECREF(s);
|
||||
return rv;
|
||||
}
|
||||
|
||||
|
||||
static PyObject*
|
||||
escape_silent(PyObject *self, PyObject *text)
|
||||
{
|
||||
if (text != Py_None)
|
||||
return escape(self, text);
|
||||
return PyObject_CallFunctionObjArgs(markup, NULL);
|
||||
}
|
||||
|
||||
|
||||
static PyObject*
|
||||
soft_str(PyObject *self, PyObject *s)
|
||||
{
|
||||
if (!PyUnicode_Check(s))
|
||||
return PyObject_Str(s);
|
||||
Py_INCREF(s);
|
||||
return s;
|
||||
}
|
||||
|
||||
|
||||
static PyObject*
|
||||
soft_unicode(PyObject *self, PyObject *s)
|
||||
{
|
||||
PyErr_WarnEx(
|
||||
PyExc_DeprecationWarning,
|
||||
"'soft_unicode' has been renamed to 'soft_str'. The old name"
|
||||
" will be removed in MarkupSafe 2.1.",
|
||||
2
|
||||
);
|
||||
return soft_str(self, s);
|
||||
}
|
||||
|
||||
|
||||
static PyMethodDef module_methods[] = {
|
||||
{
|
||||
"escape",
|
||||
(PyCFunction)escape,
|
||||
METH_O,
|
||||
"Replace the characters ``&``, ``<``, ``>``, ``'``, and ``\"`` in"
|
||||
" the string with HTML-safe sequences. Use this if you need to display"
|
||||
" text that might contain such characters in HTML.\n\n"
|
||||
"If the object has an ``__html__`` method, it is called and the"
|
||||
" return value is assumed to already be safe for HTML.\n\n"
|
||||
":param s: An object to be converted to a string and escaped.\n"
|
||||
":return: A :class:`Markup` string with the escaped text.\n"
|
||||
},
|
||||
{
|
||||
"escape_silent",
|
||||
(PyCFunction)escape_silent,
|
||||
METH_O,
|
||||
"Like :func:`escape` but treats ``None`` as the empty string."
|
||||
" Useful with optional values, as otherwise you get the string"
|
||||
" ``'None'`` when the value is ``None``.\n\n"
|
||||
">>> escape(None)\n"
|
||||
"Markup('None')\n"
|
||||
">>> escape_silent(None)\n"
|
||||
"Markup('')\n"
|
||||
},
|
||||
{
|
||||
"soft_str",
|
||||
(PyCFunction)soft_str,
|
||||
METH_O,
|
||||
"Convert an object to a string if it isn't already. This preserves"
|
||||
" a :class:`Markup` string rather than converting it back to a basic"
|
||||
" string, so it will still be marked as safe and won't be escaped"
|
||||
" again.\n\n"
|
||||
">>> value = escape(\"<User 1>\")\n"
|
||||
">>> value\n"
|
||||
"Markup('<User 1>')\n"
|
||||
">>> escape(str(value))\n"
|
||||
"Markup('&lt;User 1&gt;')\n"
|
||||
">>> escape(soft_str(value))\n"
|
||||
"Markup('<User 1>')\n"
|
||||
},
|
||||
{
|
||||
"soft_unicode",
|
||||
(PyCFunction)soft_unicode,
|
||||
METH_O,
|
||||
""
|
||||
},
|
||||
{NULL, NULL, 0, NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static struct PyModuleDef module_definition = {
|
||||
PyModuleDef_HEAD_INIT,
|
||||
"markupsafe._speedups",
|
||||
NULL,
|
||||
-1,
|
||||
module_methods,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL
|
||||
};
|
||||
|
||||
PyMODINIT_FUNC
|
||||
PyInit__speedups(void)
|
||||
{
|
||||
if (!init_constants())
|
||||
return NULL;
|
||||
|
||||
return PyModule_Create(&module_definition);
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
|
||||
from . import Markup
|
||||
|
||||
def escape(s: Any) -> Markup: ...
|
||||
def escape_silent(s: Optional[Any]) -> Markup: ...
|
||||
def soft_str(s: Any) -> str: ...
|
||||
def soft_unicode(s: Any) -> str: ...
|
||||
@@ -1,47 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pyrsistent._pmap import pmap, m, PMap
|
||||
|
||||
from pyrsistent._pvector import pvector, v, PVector
|
||||
|
||||
from pyrsistent._pset import pset, s, PSet
|
||||
|
||||
from pyrsistent._pbag import pbag, b, PBag
|
||||
|
||||
from pyrsistent._plist import plist, l, PList
|
||||
|
||||
from pyrsistent._pdeque import pdeque, dq, PDeque
|
||||
|
||||
from pyrsistent._checked_types import (
|
||||
CheckedPMap, CheckedPVector, CheckedPSet, InvariantException, CheckedKeyTypeError,
|
||||
CheckedValueTypeError, CheckedType, optional)
|
||||
|
||||
from pyrsistent._field_common import (
|
||||
field, PTypeError, pset_field, pmap_field, pvector_field)
|
||||
|
||||
from pyrsistent._precord import PRecord
|
||||
|
||||
from pyrsistent._pclass import PClass, PClassMeta
|
||||
|
||||
from pyrsistent._immutable import immutable
|
||||
|
||||
from pyrsistent._helpers import freeze, thaw, mutant
|
||||
|
||||
from pyrsistent._transformations import inc, discard, rex, ny
|
||||
|
||||
from pyrsistent._toolz import get_in
|
||||
|
||||
|
||||
__all__ = ('pmap', 'm', 'PMap',
|
||||
'pvector', 'v', 'PVector',
|
||||
'pset', 's', 'PSet',
|
||||
'pbag', 'b', 'PBag',
|
||||
'plist', 'l', 'PList',
|
||||
'pdeque', 'dq', 'PDeque',
|
||||
'CheckedPMap', 'CheckedPVector', 'CheckedPSet', 'InvariantException', 'CheckedKeyTypeError', 'CheckedValueTypeError', 'CheckedType', 'optional',
|
||||
'PRecord', 'field', 'pset_field', 'pmap_field', 'pvector_field',
|
||||
'PClass', 'PClassMeta',
|
||||
'immutable',
|
||||
'freeze', 'thaw', 'mutant',
|
||||
'get_in',
|
||||
'inc', 'discard', 'rex', 'ny')
|
||||
@@ -1,213 +0,0 @@
|
||||
# flake8: noqa: E704
|
||||
# from https://gist.github.com/WuTheFWasThat/091a17d4b5cab597dfd5d4c2d96faf09
|
||||
# Stubs for pyrsistent (Python 3.6)
|
||||
|
||||
from typing import Any
|
||||
from typing import AnyStr
|
||||
from typing import Callable
|
||||
from typing import Iterable
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Mapping
|
||||
from typing import MutableMapping
|
||||
from typing import Sequence
|
||||
from typing import Set
|
||||
from typing import Union
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import TypeVar
|
||||
from typing import overload
|
||||
|
||||
# see commit 08519aa for explanation of the re-export
|
||||
from pyrsistent.typing import CheckedKeyTypeError as CheckedKeyTypeError
|
||||
from pyrsistent.typing import CheckedPMap as CheckedPMap
|
||||
from pyrsistent.typing import CheckedPSet as CheckedPSet
|
||||
from pyrsistent.typing import CheckedPVector as CheckedPVector
|
||||
from pyrsistent.typing import CheckedType as CheckedType
|
||||
from pyrsistent.typing import CheckedValueTypeError as CheckedValueTypeError
|
||||
from pyrsistent.typing import InvariantException as InvariantException
|
||||
from pyrsistent.typing import PClass as PClass
|
||||
from pyrsistent.typing import PBag as PBag
|
||||
from pyrsistent.typing import PDeque as PDeque
|
||||
from pyrsistent.typing import PList as PList
|
||||
from pyrsistent.typing import PMap as PMap
|
||||
from pyrsistent.typing import PMapEvolver as PMapEvolver
|
||||
from pyrsistent.typing import PSet as PSet
|
||||
from pyrsistent.typing import PSetEvolver as PSetEvolver
|
||||
from pyrsistent.typing import PTypeError as PTypeError
|
||||
from pyrsistent.typing import PVector as PVector
|
||||
from pyrsistent.typing import PVectorEvolver as PVectorEvolver
|
||||
|
||||
T = TypeVar('T')
|
||||
KT = TypeVar('KT')
|
||||
VT = TypeVar('VT')
|
||||
|
||||
def pmap(initial: Union[Mapping[KT, VT], Iterable[Tuple[KT, VT]]] = {}, pre_size: int = 0) -> PMap[KT, VT]: ...
|
||||
def m(**kwargs: VT) -> PMap[str, VT]: ...
|
||||
|
||||
def pvector(iterable: Iterable[T] = ...) -> PVector[T]: ...
|
||||
def v(*iterable: T) -> PVector[T]: ...
|
||||
|
||||
def pset(iterable: Iterable[T] = (), pre_size: int = 8) -> PSet[T]: ...
|
||||
def s(*iterable: T) -> PSet[T]: ...
|
||||
|
||||
# see class_test.py for use cases
|
||||
Invariant = Tuple[bool, Optional[Union[str, Callable[[], str]]]]
|
||||
|
||||
@overload
|
||||
def field(
|
||||
type: Union[Type[T], Sequence[Type[T]]] = ...,
|
||||
invariant: Callable[[Any], Union[Invariant, Iterable[Invariant]]] = lambda _: (True, None),
|
||||
initial: Any = object(),
|
||||
mandatory: bool = False,
|
||||
factory: Callable[[Any], T] = lambda x: x,
|
||||
serializer: Callable[[Any, T], Any] = lambda _, value: value,
|
||||
) -> T: ...
|
||||
# The actual return value (_PField) is irrelevant after a PRecord has been instantiated,
|
||||
# see https://github.com/tobgu/pyrsistent/blob/master/pyrsistent/_precord.py#L10
|
||||
@overload
|
||||
def field(
|
||||
type: Any = ...,
|
||||
invariant: Callable[[Any], Union[Invariant, Iterable[Invariant]]] = lambda _: (True, None),
|
||||
initial: Any = object(),
|
||||
mandatory: bool = False,
|
||||
factory: Callable[[Any], Any] = lambda x: x,
|
||||
serializer: Callable[[Any, Any], Any] = lambda _, value: value,
|
||||
) -> Any: ...
|
||||
|
||||
# Use precise types for the simplest use cases, but fall back to Any for
|
||||
# everything else. See record_test.py for the wide range of possible types for
|
||||
# item_type
|
||||
@overload
|
||||
def pset_field(
|
||||
item_type: Type[T],
|
||||
optional: bool = False,
|
||||
initial: Iterable[T] = ...,
|
||||
) -> PSet[T]: ...
|
||||
@overload
|
||||
def pset_field(
|
||||
item_type: Any,
|
||||
optional: bool = False,
|
||||
initial: Any = (),
|
||||
) -> PSet[Any]: ...
|
||||
|
||||
@overload
|
||||
def pmap_field(
|
||||
key_type: Type[KT],
|
||||
value_type: Type[VT],
|
||||
optional: bool = False,
|
||||
invariant: Callable[[Any], Tuple[bool, Optional[str]]] = lambda _: (True, None),
|
||||
) -> PMap[KT, VT]: ...
|
||||
@overload
|
||||
def pmap_field(
|
||||
key_type: Any,
|
||||
value_type: Any,
|
||||
optional: bool = False,
|
||||
invariant: Callable[[Any], Tuple[bool, Optional[str]]] = lambda _: (True, None),
|
||||
) -> PMap[Any, Any]: ...
|
||||
|
||||
@overload
|
||||
def pvector_field(
|
||||
item_type: Type[T],
|
||||
optional: bool = False,
|
||||
initial: Iterable[T] = ...,
|
||||
) -> PVector[T]: ...
|
||||
@overload
|
||||
def pvector_field(
|
||||
item_type: Any,
|
||||
optional: bool = False,
|
||||
initial: Any = (),
|
||||
) -> PVector[Any]: ...
|
||||
|
||||
def pbag(elements: Iterable[T]) -> PBag[T]: ...
|
||||
def b(*elements: T) -> PBag[T]: ...
|
||||
|
||||
def plist(iterable: Iterable[T] = (), reverse: bool = False) -> PList[T]: ...
|
||||
def l(*elements: T) -> PList[T]: ...
|
||||
|
||||
def pdeque(iterable: Optional[Iterable[T]] = None, maxlen: Optional[int] = None) -> PDeque[T]: ...
|
||||
def dq(*iterable: T) -> PDeque[T]: ...
|
||||
|
||||
@overload
|
||||
def optional(type: T) -> Tuple[T, Type[None]]: ...
|
||||
@overload
|
||||
def optional(*typs: Any) -> Tuple[Any, ...]: ...
|
||||
|
||||
T_PRecord = TypeVar('T_PRecord', bound='PRecord')
|
||||
class PRecord(PMap[AnyStr, Any]):
|
||||
_precord_fields: Mapping
|
||||
_precord_initial_values: Mapping
|
||||
|
||||
def __hash__(self) -> int: ...
|
||||
def __init__(self, **kwargs: Any) -> None: ...
|
||||
def __iter__(self) -> Iterator[Any]: ...
|
||||
def __len__(self) -> int: ...
|
||||
@classmethod
|
||||
def create(
|
||||
cls: Type[T_PRecord],
|
||||
kwargs: Mapping,
|
||||
_factory_fields: Optional[Iterable] = None,
|
||||
ignore_extra: bool = False,
|
||||
) -> T_PRecord: ...
|
||||
# This is OK because T_PRecord is a concrete type
|
||||
def discard(self: T_PRecord, key: KT) -> T_PRecord: ...
|
||||
def remove(self: T_PRecord, key: KT) -> T_PRecord: ...
|
||||
|
||||
def serialize(self, format: Optional[Any] = ...) -> MutableMapping: ...
|
||||
|
||||
# From pyrsistent documentation:
|
||||
# This set function differs slightly from that in the PMap
|
||||
# class. First of all it accepts key-value pairs. Second it accepts multiple key-value
|
||||
# pairs to perform one, atomic, update of multiple fields.
|
||||
@overload
|
||||
def set(self, key: KT, val: VT) -> Any: ...
|
||||
@overload
|
||||
def set(self, **kwargs: VT) -> Any: ...
|
||||
|
||||
def immutable(
|
||||
members: Union[str, Iterable[str]] = '',
|
||||
name: str = 'Immutable',
|
||||
verbose: bool = False,
|
||||
) -> Tuple: ... # actually a namedtuple
|
||||
|
||||
# ignore mypy warning "Overloaded function signatures 1 and 5 overlap with
|
||||
# incompatible return types"
|
||||
@overload
|
||||
def freeze(o: Mapping[KT, VT]) -> PMap[KT, VT]: ... # type: ignore
|
||||
@overload
|
||||
def freeze(o: List[T]) -> PVector[T]: ... # type: ignore
|
||||
@overload
|
||||
def freeze(o: Tuple[T, ...]) -> Tuple[T, ...]: ...
|
||||
@overload
|
||||
def freeze(o: Set[T]) -> PSet[T]: ... # type: ignore
|
||||
@overload
|
||||
def freeze(o: T) -> T: ...
|
||||
|
||||
|
||||
@overload
|
||||
def thaw(o: PMap[KT, VT]) -> MutableMapping[KT, VT]: ... # type: ignore
|
||||
@overload
|
||||
def thaw(o: PVector[T]) -> List[T]: ... # type: ignore
|
||||
@overload
|
||||
def thaw(o: Tuple[T, ...]) -> Tuple[T, ...]: ...
|
||||
# collections.abc.MutableSet is kind of garbage:
|
||||
# https://stackoverflow.com/questions/24977898/why-does-collections-mutableset-not-bestow-an-update-method
|
||||
@overload
|
||||
def thaw(o: PSet[T]) -> Set[T]: ... # type: ignore
|
||||
@overload
|
||||
def thaw(o: T) -> T: ...
|
||||
|
||||
def mutant(fn: Callable) -> Callable: ...
|
||||
|
||||
def inc(x: int) -> int: ...
|
||||
@overload
|
||||
def discard(evolver: PMapEvolver[KT, VT], key: KT) -> None: ...
|
||||
@overload
|
||||
def discard(evolver: PVectorEvolver[T], key: int) -> None: ...
|
||||
@overload
|
||||
def discard(evolver: PSetEvolver[T], key: T) -> None: ...
|
||||
def rex(expr: str) -> Callable[[Any], bool]: ...
|
||||
def ny(_: Any) -> bool: ...
|
||||
|
||||
def get_in(keys: Iterable, coll: Mapping, default: Optional[Any] = None, no_default: bool = False) -> Any: ...
|
||||
@@ -1,542 +0,0 @@
|
||||
from enum import Enum
|
||||
|
||||
from abc import abstractmethod, ABCMeta
|
||||
from collections.abc import Iterable
|
||||
|
||||
from pyrsistent._pmap import PMap, pmap
|
||||
from pyrsistent._pset import PSet, pset
|
||||
from pyrsistent._pvector import PythonPVector, python_pvector
|
||||
|
||||
|
||||
class CheckedType(object):
|
||||
"""
|
||||
Marker class to enable creation and serialization of checked object graphs.
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
def create(cls, source_data, _factory_fields=None):
|
||||
raise NotImplementedError()
|
||||
|
||||
@abstractmethod
|
||||
def serialize(self, format=None):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def _restore_pickle(cls, data):
|
||||
return cls.create(data, _factory_fields=set())
|
||||
|
||||
|
||||
class InvariantException(Exception):
|
||||
"""
|
||||
Exception raised from a :py:class:`CheckedType` when invariant tests fail or when a mandatory
|
||||
field is missing.
|
||||
|
||||
Contains two fields of interest:
|
||||
invariant_errors, a tuple of error data for the failing invariants
|
||||
missing_fields, a tuple of strings specifying the missing names
|
||||
"""
|
||||
|
||||
def __init__(self, error_codes=(), missing_fields=(), *args, **kwargs):
|
||||
self.invariant_errors = tuple(e() if callable(e) else e for e in error_codes)
|
||||
self.missing_fields = missing_fields
|
||||
super(InvariantException, self).__init__(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return super(InvariantException, self).__str__() + \
|
||||
", invariant_errors=[{invariant_errors}], missing_fields=[{missing_fields}]".format(
|
||||
invariant_errors=', '.join(str(e) for e in self.invariant_errors),
|
||||
missing_fields=', '.join(self.missing_fields))
|
||||
|
||||
|
||||
_preserved_iterable_types = (
|
||||
Enum,
|
||||
)
|
||||
"""Some types are themselves iterable, but we want to use the type itself and
|
||||
not its members for the type specification. This defines a set of such types
|
||||
that we explicitly preserve.
|
||||
|
||||
Note that strings are not such types because the string inputs we pass in are
|
||||
values, not types.
|
||||
"""
|
||||
|
||||
|
||||
def maybe_parse_user_type(t):
|
||||
"""Try to coerce a user-supplied type directive into a list of types.
|
||||
|
||||
This function should be used in all places where a user specifies a type,
|
||||
for consistency.
|
||||
|
||||
The policy for what defines valid user input should be clear from the implementation.
|
||||
"""
|
||||
is_type = isinstance(t, type)
|
||||
is_preserved = isinstance(t, type) and issubclass(t, _preserved_iterable_types)
|
||||
is_string = isinstance(t, str)
|
||||
is_iterable = isinstance(t, Iterable)
|
||||
|
||||
if is_preserved:
|
||||
return [t]
|
||||
elif is_string:
|
||||
return [t]
|
||||
elif is_type and not is_iterable:
|
||||
return [t]
|
||||
elif is_iterable:
|
||||
# Recur to validate contained types as well.
|
||||
ts = t
|
||||
return tuple(e for t in ts for e in maybe_parse_user_type(t))
|
||||
else:
|
||||
# If this raises because `t` cannot be formatted, so be it.
|
||||
raise TypeError(
|
||||
'Type specifications must be types or strings. Input: {}'.format(t)
|
||||
)
|
||||
|
||||
|
||||
def maybe_parse_many_user_types(ts):
|
||||
# Just a different name to communicate that you're parsing multiple user
|
||||
# inputs. `maybe_parse_user_type` handles the iterable case anyway.
|
||||
return maybe_parse_user_type(ts)
|
||||
|
||||
|
||||
def _store_types(dct, bases, destination_name, source_name):
|
||||
maybe_types = maybe_parse_many_user_types([
|
||||
d[source_name]
|
||||
for d in ([dct] + [b.__dict__ for b in bases]) if source_name in d
|
||||
])
|
||||
|
||||
dct[destination_name] = maybe_types
|
||||
|
||||
|
||||
def _merge_invariant_results(result):
|
||||
verdict = True
|
||||
data = []
|
||||
for verd, dat in result:
|
||||
if not verd:
|
||||
verdict = False
|
||||
data.append(dat)
|
||||
|
||||
return verdict, tuple(data)
|
||||
|
||||
|
||||
def wrap_invariant(invariant):
|
||||
# Invariant functions may return the outcome of several tests
|
||||
# In those cases the results have to be merged before being passed
|
||||
# back to the client.
|
||||
def f(*args, **kwargs):
|
||||
result = invariant(*args, **kwargs)
|
||||
if isinstance(result[0], bool):
|
||||
return result
|
||||
|
||||
return _merge_invariant_results(result)
|
||||
|
||||
return f
|
||||
|
||||
|
||||
def _all_dicts(bases, seen=None):
|
||||
"""
|
||||
Yield each class in ``bases`` and each of their base classes.
|
||||
"""
|
||||
if seen is None:
|
||||
seen = set()
|
||||
for cls in bases:
|
||||
if cls in seen:
|
||||
continue
|
||||
seen.add(cls)
|
||||
yield cls.__dict__
|
||||
for b in _all_dicts(cls.__bases__, seen):
|
||||
yield b
|
||||
|
||||
|
||||
def store_invariants(dct, bases, destination_name, source_name):
|
||||
# Invariants are inherited
|
||||
invariants = []
|
||||
for ns in [dct] + list(_all_dicts(bases)):
|
||||
try:
|
||||
invariant = ns[source_name]
|
||||
except KeyError:
|
||||
continue
|
||||
invariants.append(invariant)
|
||||
|
||||
if not all(callable(invariant) for invariant in invariants):
|
||||
raise TypeError('Invariants must be callable')
|
||||
dct[destination_name] = tuple(wrap_invariant(inv) for inv in invariants)
|
||||
|
||||
|
||||
class _CheckedTypeMeta(ABCMeta):
|
||||
def __new__(mcs, name, bases, dct):
|
||||
_store_types(dct, bases, '_checked_types', '__type__')
|
||||
store_invariants(dct, bases, '_checked_invariants', '__invariant__')
|
||||
|
||||
def default_serializer(self, _, value):
|
||||
if isinstance(value, CheckedType):
|
||||
return value.serialize()
|
||||
return value
|
||||
|
||||
dct.setdefault('__serializer__', default_serializer)
|
||||
|
||||
dct['__slots__'] = ()
|
||||
|
||||
return super(_CheckedTypeMeta, mcs).__new__(mcs, name, bases, dct)
|
||||
|
||||
|
||||
class CheckedTypeError(TypeError):
|
||||
def __init__(self, source_class, expected_types, actual_type, actual_value, *args, **kwargs):
|
||||
super(CheckedTypeError, self).__init__(*args, **kwargs)
|
||||
self.source_class = source_class
|
||||
self.expected_types = expected_types
|
||||
self.actual_type = actual_type
|
||||
self.actual_value = actual_value
|
||||
|
||||
|
||||
class CheckedKeyTypeError(CheckedTypeError):
|
||||
"""
|
||||
Raised when trying to set a value using a key with a type that doesn't match the declared type.
|
||||
|
||||
Attributes:
|
||||
source_class -- The class of the collection
|
||||
expected_types -- Allowed types
|
||||
actual_type -- The non matching type
|
||||
actual_value -- Value of the variable with the non matching type
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class CheckedValueTypeError(CheckedTypeError):
|
||||
"""
|
||||
Raised when trying to set a value using a key with a type that doesn't match the declared type.
|
||||
|
||||
Attributes:
|
||||
source_class -- The class of the collection
|
||||
expected_types -- Allowed types
|
||||
actual_type -- The non matching type
|
||||
actual_value -- Value of the variable with the non matching type
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def _get_class(type_name):
|
||||
module_name, class_name = type_name.rsplit('.', 1)
|
||||
module = __import__(module_name, fromlist=[class_name])
|
||||
return getattr(module, class_name)
|
||||
|
||||
|
||||
def get_type(typ):
|
||||
if isinstance(typ, type):
|
||||
return typ
|
||||
|
||||
return _get_class(typ)
|
||||
|
||||
|
||||
def get_types(typs):
|
||||
return [get_type(typ) for typ in typs]
|
||||
|
||||
|
||||
def _check_types(it, expected_types, source_class, exception_type=CheckedValueTypeError):
|
||||
if expected_types:
|
||||
for e in it:
|
||||
if not any(isinstance(e, get_type(t)) for t in expected_types):
|
||||
actual_type = type(e)
|
||||
msg = "Type {source_class} can only be used with {expected_types}, not {actual_type}".format(
|
||||
source_class=source_class.__name__,
|
||||
expected_types=tuple(get_type(et).__name__ for et in expected_types),
|
||||
actual_type=actual_type.__name__)
|
||||
raise exception_type(source_class, expected_types, actual_type, e, msg)
|
||||
|
||||
|
||||
def _invariant_errors(elem, invariants):
|
||||
return [data for valid, data in (invariant(elem) for invariant in invariants) if not valid]
|
||||
|
||||
|
||||
def _invariant_errors_iterable(it, invariants):
|
||||
return sum([_invariant_errors(elem, invariants) for elem in it], [])
|
||||
|
||||
|
||||
def optional(*typs):
|
||||
""" Convenience function to specify that a value may be of any of the types in type 'typs' or None """
|
||||
return tuple(typs) + (type(None),)
|
||||
|
||||
|
||||
def _checked_type_create(cls, source_data, _factory_fields=None, ignore_extra=False):
|
||||
if isinstance(source_data, cls):
|
||||
return source_data
|
||||
|
||||
# Recursively apply create methods of checked types if the types of the supplied data
|
||||
# does not match any of the valid types.
|
||||
types = get_types(cls._checked_types)
|
||||
checked_type = next((t for t in types if issubclass(t, CheckedType)), None)
|
||||
if checked_type:
|
||||
return cls([checked_type.create(data, ignore_extra=ignore_extra)
|
||||
if not any(isinstance(data, t) for t in types) else data
|
||||
for data in source_data])
|
||||
|
||||
return cls(source_data)
|
||||
|
||||
class CheckedPVector(PythonPVector, CheckedType, metaclass=_CheckedTypeMeta):
|
||||
"""
|
||||
A CheckedPVector is a PVector which allows specifying type and invariant checks.
|
||||
|
||||
>>> class Positives(CheckedPVector):
|
||||
... __type__ = (int, float)
|
||||
... __invariant__ = lambda n: (n >= 0, 'Negative')
|
||||
...
|
||||
>>> Positives([1, 2, 3])
|
||||
Positives([1, 2, 3])
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls, initial=()):
|
||||
if type(initial) == PythonPVector:
|
||||
return super(CheckedPVector, cls).__new__(cls, initial._count, initial._shift, initial._root, initial._tail)
|
||||
|
||||
return CheckedPVector.Evolver(cls, python_pvector()).extend(initial).persistent()
|
||||
|
||||
def set(self, key, value):
|
||||
return self.evolver().set(key, value).persistent()
|
||||
|
||||
def append(self, val):
|
||||
return self.evolver().append(val).persistent()
|
||||
|
||||
def extend(self, it):
|
||||
return self.evolver().extend(it).persistent()
|
||||
|
||||
create = classmethod(_checked_type_create)
|
||||
|
||||
def serialize(self, format=None):
|
||||
serializer = self.__serializer__
|
||||
return list(serializer(format, v) for v in self)
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return _restore_pickle, (self.__class__, list(self),)
|
||||
|
||||
class Evolver(PythonPVector.Evolver):
|
||||
__slots__ = ('_destination_class', '_invariant_errors')
|
||||
|
||||
def __init__(self, destination_class, vector):
|
||||
super(CheckedPVector.Evolver, self).__init__(vector)
|
||||
self._destination_class = destination_class
|
||||
self._invariant_errors = []
|
||||
|
||||
def _check(self, it):
|
||||
_check_types(it, self._destination_class._checked_types, self._destination_class)
|
||||
error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants)
|
||||
self._invariant_errors.extend(error_data)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self._check([value])
|
||||
return super(CheckedPVector.Evolver, self).__setitem__(key, value)
|
||||
|
||||
def append(self, elem):
|
||||
self._check([elem])
|
||||
return super(CheckedPVector.Evolver, self).append(elem)
|
||||
|
||||
def extend(self, it):
|
||||
it = list(it)
|
||||
self._check(it)
|
||||
return super(CheckedPVector.Evolver, self).extend(it)
|
||||
|
||||
def persistent(self):
|
||||
if self._invariant_errors:
|
||||
raise InvariantException(error_codes=self._invariant_errors)
|
||||
|
||||
result = self._orig_pvector
|
||||
if self.is_dirty() or (self._destination_class != type(self._orig_pvector)):
|
||||
pv = super(CheckedPVector.Evolver, self).persistent().extend(self._extra_tail)
|
||||
result = self._destination_class(pv)
|
||||
self._reset(result)
|
||||
|
||||
return result
|
||||
|
||||
def __repr__(self):
|
||||
return self.__class__.__name__ + "({0})".format(self.tolist())
|
||||
|
||||
__str__ = __repr__
|
||||
|
||||
def evolver(self):
|
||||
return CheckedPVector.Evolver(self.__class__, self)
|
||||
|
||||
|
||||
class CheckedPSet(PSet, CheckedType, metaclass=_CheckedTypeMeta):
|
||||
"""
|
||||
A CheckedPSet is a PSet which allows specifying type and invariant checks.
|
||||
|
||||
>>> class Positives(CheckedPSet):
|
||||
... __type__ = (int, float)
|
||||
... __invariant__ = lambda n: (n >= 0, 'Negative')
|
||||
...
|
||||
>>> Positives([1, 2, 3])
|
||||
Positives([1, 2, 3])
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls, initial=()):
|
||||
if type(initial) is PMap:
|
||||
return super(CheckedPSet, cls).__new__(cls, initial)
|
||||
|
||||
evolver = CheckedPSet.Evolver(cls, pset())
|
||||
for e in initial:
|
||||
evolver.add(e)
|
||||
|
||||
return evolver.persistent()
|
||||
|
||||
def __repr__(self):
|
||||
return self.__class__.__name__ + super(CheckedPSet, self).__repr__()[4:]
|
||||
|
||||
def __str__(self):
|
||||
return self.__repr__()
|
||||
|
||||
def serialize(self, format=None):
|
||||
serializer = self.__serializer__
|
||||
return set(serializer(format, v) for v in self)
|
||||
|
||||
create = classmethod(_checked_type_create)
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return _restore_pickle, (self.__class__, list(self),)
|
||||
|
||||
def evolver(self):
|
||||
return CheckedPSet.Evolver(self.__class__, self)
|
||||
|
||||
class Evolver(PSet._Evolver):
|
||||
__slots__ = ('_destination_class', '_invariant_errors')
|
||||
|
||||
def __init__(self, destination_class, original_set):
|
||||
super(CheckedPSet.Evolver, self).__init__(original_set)
|
||||
self._destination_class = destination_class
|
||||
self._invariant_errors = []
|
||||
|
||||
def _check(self, it):
|
||||
_check_types(it, self._destination_class._checked_types, self._destination_class)
|
||||
error_data = _invariant_errors_iterable(it, self._destination_class._checked_invariants)
|
||||
self._invariant_errors.extend(error_data)
|
||||
|
||||
def add(self, element):
|
||||
self._check([element])
|
||||
self._pmap_evolver[element] = True
|
||||
return self
|
||||
|
||||
def persistent(self):
|
||||
if self._invariant_errors:
|
||||
raise InvariantException(error_codes=self._invariant_errors)
|
||||
|
||||
if self.is_dirty() or self._destination_class != type(self._original_pset):
|
||||
return self._destination_class(self._pmap_evolver.persistent())
|
||||
|
||||
return self._original_pset
|
||||
|
||||
|
||||
class _CheckedMapTypeMeta(type):
|
||||
def __new__(mcs, name, bases, dct):
|
||||
_store_types(dct, bases, '_checked_key_types', '__key_type__')
|
||||
_store_types(dct, bases, '_checked_value_types', '__value_type__')
|
||||
store_invariants(dct, bases, '_checked_invariants', '__invariant__')
|
||||
|
||||
def default_serializer(self, _, key, value):
|
||||
sk = key
|
||||
if isinstance(key, CheckedType):
|
||||
sk = key.serialize()
|
||||
|
||||
sv = value
|
||||
if isinstance(value, CheckedType):
|
||||
sv = value.serialize()
|
||||
|
||||
return sk, sv
|
||||
|
||||
dct.setdefault('__serializer__', default_serializer)
|
||||
|
||||
dct['__slots__'] = ()
|
||||
|
||||
return super(_CheckedMapTypeMeta, mcs).__new__(mcs, name, bases, dct)
|
||||
|
||||
# Marker object
|
||||
_UNDEFINED_CHECKED_PMAP_SIZE = object()
|
||||
|
||||
|
||||
class CheckedPMap(PMap, CheckedType, metaclass=_CheckedMapTypeMeta):
|
||||
"""
|
||||
A CheckedPMap is a PMap which allows specifying type and invariant checks.
|
||||
|
||||
>>> class IntToFloatMap(CheckedPMap):
|
||||
... __key_type__ = int
|
||||
... __value_type__ = float
|
||||
... __invariant__ = lambda k, v: (int(v) == k, 'Invalid mapping')
|
||||
...
|
||||
>>> IntToFloatMap({1: 1.5, 2: 2.25})
|
||||
IntToFloatMap({1: 1.5, 2: 2.25})
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __new__(cls, initial={}, size=_UNDEFINED_CHECKED_PMAP_SIZE):
|
||||
if size is not _UNDEFINED_CHECKED_PMAP_SIZE:
|
||||
return super(CheckedPMap, cls).__new__(cls, size, initial)
|
||||
|
||||
evolver = CheckedPMap.Evolver(cls, pmap())
|
||||
for k, v in initial.items():
|
||||
evolver.set(k, v)
|
||||
|
||||
return evolver.persistent()
|
||||
|
||||
def evolver(self):
|
||||
return CheckedPMap.Evolver(self.__class__, self)
|
||||
|
||||
def __repr__(self):
|
||||
return self.__class__.__name__ + "({0})".format(str(dict(self)))
|
||||
|
||||
__str__ = __repr__
|
||||
|
||||
def serialize(self, format=None):
|
||||
serializer = self.__serializer__
|
||||
return dict(serializer(format, k, v) for k, v in self.items())
|
||||
|
||||
@classmethod
|
||||
def create(cls, source_data, _factory_fields=None):
|
||||
if isinstance(source_data, cls):
|
||||
return source_data
|
||||
|
||||
# Recursively apply create methods of checked types if the types of the supplied data
|
||||
# does not match any of the valid types.
|
||||
key_types = get_types(cls._checked_key_types)
|
||||
checked_key_type = next((t for t in key_types if issubclass(t, CheckedType)), None)
|
||||
value_types = get_types(cls._checked_value_types)
|
||||
checked_value_type = next((t for t in value_types if issubclass(t, CheckedType)), None)
|
||||
|
||||
if checked_key_type or checked_value_type:
|
||||
return cls(dict((checked_key_type.create(key) if checked_key_type and not any(isinstance(key, t) for t in key_types) else key,
|
||||
checked_value_type.create(value) if checked_value_type and not any(isinstance(value, t) for t in value_types) else value)
|
||||
for key, value in source_data.items()))
|
||||
|
||||
return cls(source_data)
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return _restore_pickle, (self.__class__, dict(self),)
|
||||
|
||||
class Evolver(PMap._Evolver):
|
||||
__slots__ = ('_destination_class', '_invariant_errors')
|
||||
|
||||
def __init__(self, destination_class, original_map):
|
||||
super(CheckedPMap.Evolver, self).__init__(original_map)
|
||||
self._destination_class = destination_class
|
||||
self._invariant_errors = []
|
||||
|
||||
def set(self, key, value):
|
||||
_check_types([key], self._destination_class._checked_key_types, self._destination_class, CheckedKeyTypeError)
|
||||
_check_types([value], self._destination_class._checked_value_types, self._destination_class)
|
||||
self._invariant_errors.extend(data for valid, data in (invariant(key, value)
|
||||
for invariant in self._destination_class._checked_invariants)
|
||||
if not valid)
|
||||
|
||||
return super(CheckedPMap.Evolver, self).set(key, value)
|
||||
|
||||
def persistent(self):
|
||||
if self._invariant_errors:
|
||||
raise InvariantException(error_codes=self._invariant_errors)
|
||||
|
||||
if self.is_dirty() or type(self._original_pmap) != self._destination_class:
|
||||
return self._destination_class(self._buckets_evolver.persistent(), self._size)
|
||||
|
||||
return self._original_pmap
|
||||
@@ -1,329 +0,0 @@
|
||||
import sys
|
||||
|
||||
from pyrsistent._checked_types import (
|
||||
CheckedPMap,
|
||||
CheckedPSet,
|
||||
CheckedPVector,
|
||||
CheckedType,
|
||||
InvariantException,
|
||||
_restore_pickle,
|
||||
get_type,
|
||||
maybe_parse_user_type,
|
||||
maybe_parse_many_user_types,
|
||||
)
|
||||
from pyrsistent._checked_types import optional as optional_type
|
||||
from pyrsistent._checked_types import wrap_invariant
|
||||
import inspect
|
||||
|
||||
PY2 = sys.version_info[0] < 3
|
||||
|
||||
|
||||
def set_fields(dct, bases, name):
|
||||
dct[name] = dict(sum([list(b.__dict__.get(name, {}).items()) for b in bases], []))
|
||||
|
||||
for k, v in list(dct.items()):
|
||||
if isinstance(v, _PField):
|
||||
dct[name][k] = v
|
||||
del dct[k]
|
||||
|
||||
|
||||
def check_global_invariants(subject, invariants):
|
||||
error_codes = tuple(error_code for is_ok, error_code in
|
||||
(invariant(subject) for invariant in invariants) if not is_ok)
|
||||
if error_codes:
|
||||
raise InvariantException(error_codes, (), 'Global invariant failed')
|
||||
|
||||
|
||||
def serialize(serializer, format, value):
|
||||
if isinstance(value, CheckedType) and serializer is PFIELD_NO_SERIALIZER:
|
||||
return value.serialize(format)
|
||||
|
||||
return serializer(format, value)
|
||||
|
||||
|
||||
def check_type(destination_cls, field, name, value):
|
||||
if field.type and not any(isinstance(value, get_type(t)) for t in field.type):
|
||||
actual_type = type(value)
|
||||
message = "Invalid type for field {0}.{1}, was {2}".format(destination_cls.__name__, name, actual_type.__name__)
|
||||
raise PTypeError(destination_cls, name, field.type, actual_type, message)
|
||||
|
||||
|
||||
def is_type_cls(type_cls, field_type):
|
||||
if type(field_type) is set:
|
||||
return True
|
||||
types = tuple(field_type)
|
||||
if len(types) == 0:
|
||||
return False
|
||||
return issubclass(get_type(types[0]), type_cls)
|
||||
|
||||
|
||||
def is_field_ignore_extra_complaint(type_cls, field, ignore_extra):
|
||||
# ignore_extra param has default False value, for speed purpose no need to propagate False
|
||||
if not ignore_extra:
|
||||
return False
|
||||
|
||||
if not is_type_cls(type_cls, field.type):
|
||||
return False
|
||||
|
||||
if PY2:
|
||||
return 'ignore_extra' in inspect.getargspec(field.factory).args
|
||||
else:
|
||||
return 'ignore_extra' in inspect.signature(field.factory).parameters
|
||||
|
||||
|
||||
|
||||
class _PField(object):
|
||||
__slots__ = ('type', 'invariant', 'initial', 'mandatory', '_factory', 'serializer')
|
||||
|
||||
def __init__(self, type, invariant, initial, mandatory, factory, serializer):
|
||||
self.type = type
|
||||
self.invariant = invariant
|
||||
self.initial = initial
|
||||
self.mandatory = mandatory
|
||||
self._factory = factory
|
||||
self.serializer = serializer
|
||||
|
||||
@property
|
||||
def factory(self):
|
||||
# If no factory is specified and the type is another CheckedType use the factory method of that CheckedType
|
||||
if self._factory is PFIELD_NO_FACTORY and len(self.type) == 1:
|
||||
typ = get_type(tuple(self.type)[0])
|
||||
if issubclass(typ, CheckedType):
|
||||
return typ.create
|
||||
|
||||
return self._factory
|
||||
|
||||
PFIELD_NO_TYPE = ()
|
||||
PFIELD_NO_INVARIANT = lambda _: (True, None)
|
||||
PFIELD_NO_FACTORY = lambda x: x
|
||||
PFIELD_NO_INITIAL = object()
|
||||
PFIELD_NO_SERIALIZER = lambda _, value: value
|
||||
|
||||
|
||||
def field(type=PFIELD_NO_TYPE, invariant=PFIELD_NO_INVARIANT, initial=PFIELD_NO_INITIAL,
|
||||
mandatory=False, factory=PFIELD_NO_FACTORY, serializer=PFIELD_NO_SERIALIZER):
|
||||
"""
|
||||
Field specification factory for :py:class:`PRecord`.
|
||||
|
||||
:param type: a type or iterable with types that are allowed for this field
|
||||
:param invariant: a function specifying an invariant that must hold for the field
|
||||
:param initial: value of field if not specified when instantiating the record
|
||||
:param mandatory: boolean specifying if the field is mandatory or not
|
||||
:param factory: function called when field is set.
|
||||
:param serializer: function that returns a serialized version of the field
|
||||
"""
|
||||
|
||||
# NB: We have to check this predicate separately from the predicates in
|
||||
# `maybe_parse_user_type` et al. because this one is related to supporting
|
||||
# the argspec for `field`, while those are related to supporting the valid
|
||||
# ways to specify types.
|
||||
|
||||
# Multiple types must be passed in one of the following containers. Note
|
||||
# that a type that is a subclass of one of these containers, like a
|
||||
# `collections.namedtuple`, will work as expected, since we check
|
||||
# `isinstance` and not `issubclass`.
|
||||
if isinstance(type, (list, set, tuple)):
|
||||
types = set(maybe_parse_many_user_types(type))
|
||||
else:
|
||||
types = set(maybe_parse_user_type(type))
|
||||
|
||||
invariant_function = wrap_invariant(invariant) if invariant != PFIELD_NO_INVARIANT and callable(invariant) else invariant
|
||||
field = _PField(type=types, invariant=invariant_function, initial=initial,
|
||||
mandatory=mandatory, factory=factory, serializer=serializer)
|
||||
|
||||
_check_field_parameters(field)
|
||||
|
||||
return field
|
||||
|
||||
|
||||
def _check_field_parameters(field):
|
||||
for t in field.type:
|
||||
if not isinstance(t, type) and not isinstance(t, str):
|
||||
raise TypeError('Type parameter expected, not {0}'.format(type(t)))
|
||||
|
||||
if field.initial is not PFIELD_NO_INITIAL and \
|
||||
not callable(field.initial) and \
|
||||
field.type and not any(isinstance(field.initial, t) for t in field.type):
|
||||
raise TypeError('Initial has invalid type {0}'.format(type(field.initial)))
|
||||
|
||||
if not callable(field.invariant):
|
||||
raise TypeError('Invariant must be callable')
|
||||
|
||||
if not callable(field.factory):
|
||||
raise TypeError('Factory must be callable')
|
||||
|
||||
if not callable(field.serializer):
|
||||
raise TypeError('Serializer must be callable')
|
||||
|
||||
|
||||
class PTypeError(TypeError):
|
||||
"""
|
||||
Raised when trying to assign a value with a type that doesn't match the declared type.
|
||||
|
||||
Attributes:
|
||||
source_class -- The class of the record
|
||||
field -- Field name
|
||||
expected_types -- Types allowed for the field
|
||||
actual_type -- The non matching type
|
||||
"""
|
||||
def __init__(self, source_class, field, expected_types, actual_type, *args, **kwargs):
|
||||
super(PTypeError, self).__init__(*args, **kwargs)
|
||||
self.source_class = source_class
|
||||
self.field = field
|
||||
self.expected_types = expected_types
|
||||
self.actual_type = actual_type
|
||||
|
||||
|
||||
SEQ_FIELD_TYPE_SUFFIXES = {
|
||||
CheckedPVector: "PVector",
|
||||
CheckedPSet: "PSet",
|
||||
}
|
||||
|
||||
# Global dictionary to hold auto-generated field types: used for unpickling
|
||||
_seq_field_types = {}
|
||||
|
||||
def _restore_seq_field_pickle(checked_class, item_type, data):
|
||||
"""Unpickling function for auto-generated PVec/PSet field types."""
|
||||
type_ = _seq_field_types[checked_class, item_type]
|
||||
return _restore_pickle(type_, data)
|
||||
|
||||
def _types_to_names(types):
|
||||
"""Convert a tuple of types to a human-readable string."""
|
||||
return "".join(get_type(typ).__name__.capitalize() for typ in types)
|
||||
|
||||
def _make_seq_field_type(checked_class, item_type):
|
||||
"""Create a subclass of the given checked class with the given item type."""
|
||||
type_ = _seq_field_types.get((checked_class, item_type))
|
||||
if type_ is not None:
|
||||
return type_
|
||||
|
||||
class TheType(checked_class):
|
||||
__type__ = item_type
|
||||
|
||||
def __reduce__(self):
|
||||
return (_restore_seq_field_pickle,
|
||||
(checked_class, item_type, list(self)))
|
||||
|
||||
suffix = SEQ_FIELD_TYPE_SUFFIXES[checked_class]
|
||||
TheType.__name__ = _types_to_names(TheType._checked_types) + suffix
|
||||
_seq_field_types[checked_class, item_type] = TheType
|
||||
return TheType
|
||||
|
||||
def _sequence_field(checked_class, item_type, optional, initial):
|
||||
"""
|
||||
Create checked field for either ``PSet`` or ``PVector``.
|
||||
|
||||
:param checked_class: ``CheckedPSet`` or ``CheckedPVector``.
|
||||
:param item_type: The required type for the items in the set.
|
||||
:param optional: If true, ``None`` can be used as a value for
|
||||
this field.
|
||||
:param initial: Initial value to pass to factory.
|
||||
|
||||
:return: A ``field`` containing a checked class.
|
||||
"""
|
||||
TheType = _make_seq_field_type(checked_class, item_type)
|
||||
|
||||
if optional:
|
||||
def factory(argument, _factory_fields=None, ignore_extra=False):
|
||||
if argument is None:
|
||||
return None
|
||||
else:
|
||||
return TheType.create(argument, _factory_fields=_factory_fields, ignore_extra=ignore_extra)
|
||||
else:
|
||||
factory = TheType.create
|
||||
|
||||
return field(type=optional_type(TheType) if optional else TheType,
|
||||
factory=factory, mandatory=True,
|
||||
initial=factory(initial))
|
||||
|
||||
|
||||
def pset_field(item_type, optional=False, initial=()):
|
||||
"""
|
||||
Create checked ``PSet`` field.
|
||||
|
||||
:param item_type: The required type for the items in the set.
|
||||
:param optional: If true, ``None`` can be used as a value for
|
||||
this field.
|
||||
:param initial: Initial value to pass to factory if no value is given
|
||||
for the field.
|
||||
|
||||
:return: A ``field`` containing a ``CheckedPSet`` of the given type.
|
||||
"""
|
||||
return _sequence_field(CheckedPSet, item_type, optional,
|
||||
initial)
|
||||
|
||||
|
||||
def pvector_field(item_type, optional=False, initial=()):
|
||||
"""
|
||||
Create checked ``PVector`` field.
|
||||
|
||||
:param item_type: The required type for the items in the vector.
|
||||
:param optional: If true, ``None`` can be used as a value for
|
||||
this field.
|
||||
:param initial: Initial value to pass to factory if no value is given
|
||||
for the field.
|
||||
|
||||
:return: A ``field`` containing a ``CheckedPVector`` of the given type.
|
||||
"""
|
||||
return _sequence_field(CheckedPVector, item_type, optional,
|
||||
initial)
|
||||
|
||||
|
||||
_valid = lambda item: (True, "")
|
||||
|
||||
|
||||
# Global dictionary to hold auto-generated field types: used for unpickling
|
||||
_pmap_field_types = {}
|
||||
|
||||
def _restore_pmap_field_pickle(key_type, value_type, data):
|
||||
"""Unpickling function for auto-generated PMap field types."""
|
||||
type_ = _pmap_field_types[key_type, value_type]
|
||||
return _restore_pickle(type_, data)
|
||||
|
||||
def _make_pmap_field_type(key_type, value_type):
|
||||
"""Create a subclass of CheckedPMap with the given key and value types."""
|
||||
type_ = _pmap_field_types.get((key_type, value_type))
|
||||
if type_ is not None:
|
||||
return type_
|
||||
|
||||
class TheMap(CheckedPMap):
|
||||
__key_type__ = key_type
|
||||
__value_type__ = value_type
|
||||
|
||||
def __reduce__(self):
|
||||
return (_restore_pmap_field_pickle,
|
||||
(self.__key_type__, self.__value_type__, dict(self)))
|
||||
|
||||
TheMap.__name__ = "{0}To{1}PMap".format(
|
||||
_types_to_names(TheMap._checked_key_types),
|
||||
_types_to_names(TheMap._checked_value_types))
|
||||
_pmap_field_types[key_type, value_type] = TheMap
|
||||
return TheMap
|
||||
|
||||
|
||||
def pmap_field(key_type, value_type, optional=False, invariant=PFIELD_NO_INVARIANT):
|
||||
"""
|
||||
Create a checked ``PMap`` field.
|
||||
|
||||
:param key: The required type for the keys of the map.
|
||||
:param value: The required type for the values of the map.
|
||||
:param optional: If true, ``None`` can be used as a value for
|
||||
this field.
|
||||
:param invariant: Pass-through to ``field``.
|
||||
|
||||
:return: A ``field`` containing a ``CheckedPMap``.
|
||||
"""
|
||||
TheMap = _make_pmap_field_type(key_type, value_type)
|
||||
|
||||
if optional:
|
||||
def factory(argument):
|
||||
if argument is None:
|
||||
return None
|
||||
else:
|
||||
return TheMap.create(argument)
|
||||
else:
|
||||
factory = TheMap.create
|
||||
|
||||
return field(mandatory=True, initial=TheMap(),
|
||||
type=optional_type(TheMap) if optional else TheMap,
|
||||
factory=factory, invariant=invariant)
|
||||
@@ -1,97 +0,0 @@
|
||||
from functools import wraps
|
||||
from pyrsistent._pmap import PMap, pmap
|
||||
from pyrsistent._pset import PSet, pset
|
||||
from pyrsistent._pvector import PVector, pvector
|
||||
|
||||
def freeze(o, strict=True):
|
||||
"""
|
||||
Recursively convert simple Python containers into pyrsistent versions
|
||||
of those containers.
|
||||
|
||||
- list is converted to pvector, recursively
|
||||
- dict is converted to pmap, recursively on values (but not keys)
|
||||
- set is converted to pset, but not recursively
|
||||
- tuple is converted to tuple, recursively.
|
||||
|
||||
If strict == True (default):
|
||||
|
||||
- freeze is called on elements of pvectors
|
||||
- freeze is called on values of pmaps
|
||||
|
||||
Sets and dict keys are not recursively frozen because they do not contain
|
||||
mutable data by convention. The main exception to this rule is that
|
||||
dict keys and set elements are often instances of mutable objects that
|
||||
support hash-by-id, which this function can't convert anyway.
|
||||
|
||||
>>> freeze(set([1, 2]))
|
||||
pset([1, 2])
|
||||
>>> freeze([1, {'a': 3}])
|
||||
pvector([1, pmap({'a': 3})])
|
||||
>>> freeze((1, []))
|
||||
(1, pvector([]))
|
||||
"""
|
||||
typ = type(o)
|
||||
if typ is dict or (strict and isinstance(o, PMap)):
|
||||
return pmap({k: freeze(v, strict) for k, v in o.items()})
|
||||
if typ is list or (strict and isinstance(o, PVector)):
|
||||
curried_freeze = lambda x: freeze(x, strict)
|
||||
return pvector(map(curried_freeze, o))
|
||||
if typ is tuple:
|
||||
curried_freeze = lambda x: freeze(x, strict)
|
||||
return tuple(map(curried_freeze, o))
|
||||
if typ is set:
|
||||
# impossible to have anything that needs freezing inside a set or pset
|
||||
return pset(o)
|
||||
return o
|
||||
|
||||
|
||||
def thaw(o, strict=True):
|
||||
"""
|
||||
Recursively convert pyrsistent containers into simple Python containers.
|
||||
|
||||
- pvector is converted to list, recursively
|
||||
- pmap is converted to dict, recursively on values (but not keys)
|
||||
- pset is converted to set, but not recursively
|
||||
- tuple is converted to tuple, recursively.
|
||||
|
||||
If strict == True (the default):
|
||||
|
||||
- thaw is called on elements of lists
|
||||
- thaw is called on values in dicts
|
||||
|
||||
>>> from pyrsistent import s, m, v
|
||||
>>> thaw(s(1, 2))
|
||||
{1, 2}
|
||||
>>> thaw(v(1, m(a=3)))
|
||||
[1, {'a': 3}]
|
||||
>>> thaw((1, v()))
|
||||
(1, [])
|
||||
"""
|
||||
typ = type(o)
|
||||
if isinstance(o, PVector) or (strict and typ is list):
|
||||
curried_thaw = lambda x: thaw(x, strict)
|
||||
return list(map(curried_thaw, o))
|
||||
if isinstance(o, PMap) or (strict and typ is dict):
|
||||
return {k: thaw(v, strict) for k, v in o.items()}
|
||||
if typ is tuple:
|
||||
curried_thaw = lambda x: thaw(x, strict)
|
||||
return tuple(map(curried_thaw, o))
|
||||
if isinstance(o, PSet):
|
||||
# impossible to thaw inside psets or sets
|
||||
return set(o)
|
||||
return o
|
||||
|
||||
|
||||
def mutant(fn):
|
||||
"""
|
||||
Convenience decorator to isolate mutation to within the decorated function (with respect
|
||||
to the input arguments).
|
||||
|
||||
All arguments to the decorated function will be frozen so that they are guaranteed not to change.
|
||||
The return value is also frozen.
|
||||
"""
|
||||
@wraps(fn)
|
||||
def inner_f(*args, **kwargs):
|
||||
return freeze(fn(*[freeze(e) for e in args], **dict(freeze(item) for item in kwargs.items())))
|
||||
|
||||
return inner_f
|
||||
@@ -1,103 +0,0 @@
|
||||
import sys
|
||||
|
||||
|
||||
def immutable(members='', name='Immutable', verbose=False):
|
||||
"""
|
||||
Produces a class that either can be used standalone or as a base class for persistent classes.
|
||||
|
||||
This is a thin wrapper around a named tuple.
|
||||
|
||||
Constructing a type and using it to instantiate objects:
|
||||
|
||||
>>> Point = immutable('x, y', name='Point')
|
||||
>>> p = Point(1, 2)
|
||||
>>> p2 = p.set(x=3)
|
||||
>>> p
|
||||
Point(x=1, y=2)
|
||||
>>> p2
|
||||
Point(x=3, y=2)
|
||||
|
||||
Inheriting from a constructed type. In this case no type name needs to be supplied:
|
||||
|
||||
>>> class PositivePoint(immutable('x, y')):
|
||||
... __slots__ = tuple()
|
||||
... def __new__(cls, x, y):
|
||||
... if x > 0 and y > 0:
|
||||
... return super(PositivePoint, cls).__new__(cls, x, y)
|
||||
... raise Exception('Coordinates must be positive!')
|
||||
...
|
||||
>>> p = PositivePoint(1, 2)
|
||||
>>> p.set(x=3)
|
||||
PositivePoint(x=3, y=2)
|
||||
>>> p.set(y=-3)
|
||||
Traceback (most recent call last):
|
||||
Exception: Coordinates must be positive!
|
||||
|
||||
The persistent class also supports the notion of frozen members. The value of a frozen member
|
||||
cannot be updated. For example it could be used to implement an ID that should remain the same
|
||||
over time. A frozen member is denoted by a trailing underscore.
|
||||
|
||||
>>> Point = immutable('x, y, id_', name='Point')
|
||||
>>> p = Point(1, 2, id_=17)
|
||||
>>> p.set(x=3)
|
||||
Point(x=3, y=2, id_=17)
|
||||
>>> p.set(id_=18)
|
||||
Traceback (most recent call last):
|
||||
AttributeError: Cannot set frozen members id_
|
||||
"""
|
||||
|
||||
if isinstance(members, str):
|
||||
members = members.replace(',', ' ').split()
|
||||
|
||||
def frozen_member_test():
|
||||
frozen_members = ["'%s'" % f for f in members if f.endswith('_')]
|
||||
if frozen_members:
|
||||
return """
|
||||
frozen_fields = fields_to_modify & set([{frozen_members}])
|
||||
if frozen_fields:
|
||||
raise AttributeError('Cannot set frozen members %s' % ', '.join(frozen_fields))
|
||||
""".format(frozen_members=', '.join(frozen_members))
|
||||
|
||||
return ''
|
||||
|
||||
verbose_string = ""
|
||||
if sys.version_info < (3, 7):
|
||||
# Verbose is no longer supported in Python 3.7
|
||||
verbose_string = ", verbose={verbose}".format(verbose=verbose)
|
||||
|
||||
quoted_members = ', '.join("'%s'" % m for m in members)
|
||||
template = """
|
||||
class {class_name}(namedtuple('ImmutableBase', [{quoted_members}]{verbose_string})):
|
||||
__slots__ = tuple()
|
||||
|
||||
def __repr__(self):
|
||||
return super({class_name}, self).__repr__().replace('ImmutableBase', self.__class__.__name__)
|
||||
|
||||
def set(self, **kwargs):
|
||||
if not kwargs:
|
||||
return self
|
||||
|
||||
fields_to_modify = set(kwargs.keys())
|
||||
if not fields_to_modify <= {member_set}:
|
||||
raise AttributeError("'%s' is not a member" % ', '.join(fields_to_modify - {member_set}))
|
||||
|
||||
{frozen_member_test}
|
||||
|
||||
return self.__class__.__new__(self.__class__, *map(kwargs.pop, [{quoted_members}], self))
|
||||
""".format(quoted_members=quoted_members,
|
||||
member_set="set([%s])" % quoted_members if quoted_members else 'set()',
|
||||
frozen_member_test=frozen_member_test(),
|
||||
verbose_string=verbose_string,
|
||||
class_name=name)
|
||||
|
||||
if verbose:
|
||||
print(template)
|
||||
|
||||
from collections import namedtuple
|
||||
namespace = dict(namedtuple=namedtuple, __name__='pyrsistent_immutable')
|
||||
try:
|
||||
exec(template, namespace)
|
||||
except SyntaxError as e:
|
||||
raise SyntaxError(str(e) + ':\n' + template) from e
|
||||
|
||||
return namespace[name]
|
||||
267
lib/spack/external/_vendoring/pyrsistent/_pbag.py
vendored
267
lib/spack/external/_vendoring/pyrsistent/_pbag.py
vendored
@@ -1,267 +0,0 @@
|
||||
from collections.abc import Container, Iterable, Sized, Hashable
|
||||
from functools import reduce
|
||||
from pyrsistent._pmap import pmap
|
||||
|
||||
|
||||
def _add_to_counters(counters, element):
|
||||
return counters.set(element, counters.get(element, 0) + 1)
|
||||
|
||||
|
||||
class PBag(object):
|
||||
"""
|
||||
A persistent bag/multiset type.
|
||||
|
||||
Requires elements to be hashable, and allows duplicates, but has no
|
||||
ordering. Bags are hashable.
|
||||
|
||||
Do not instantiate directly, instead use the factory functions :py:func:`b`
|
||||
or :py:func:`pbag` to create an instance.
|
||||
|
||||
Some examples:
|
||||
|
||||
>>> s = pbag([1, 2, 3, 1])
|
||||
>>> s2 = s.add(4)
|
||||
>>> s3 = s2.remove(1)
|
||||
>>> s
|
||||
pbag([1, 1, 2, 3])
|
||||
>>> s2
|
||||
pbag([1, 1, 2, 3, 4])
|
||||
>>> s3
|
||||
pbag([1, 2, 3, 4])
|
||||
"""
|
||||
|
||||
__slots__ = ('_counts', '__weakref__')
|
||||
|
||||
def __init__(self, counts):
|
||||
self._counts = counts
|
||||
|
||||
def add(self, element):
|
||||
"""
|
||||
Add an element to the bag.
|
||||
|
||||
>>> s = pbag([1])
|
||||
>>> s2 = s.add(1)
|
||||
>>> s3 = s.add(2)
|
||||
>>> s2
|
||||
pbag([1, 1])
|
||||
>>> s3
|
||||
pbag([1, 2])
|
||||
"""
|
||||
return PBag(_add_to_counters(self._counts, element))
|
||||
|
||||
def update(self, iterable):
|
||||
"""
|
||||
Update bag with all elements in iterable.
|
||||
|
||||
>>> s = pbag([1])
|
||||
>>> s.update([1, 2])
|
||||
pbag([1, 1, 2])
|
||||
"""
|
||||
if iterable:
|
||||
return PBag(reduce(_add_to_counters, iterable, self._counts))
|
||||
|
||||
return self
|
||||
|
||||
def remove(self, element):
|
||||
"""
|
||||
Remove an element from the bag.
|
||||
|
||||
>>> s = pbag([1, 1, 2])
|
||||
>>> s2 = s.remove(1)
|
||||
>>> s3 = s.remove(2)
|
||||
>>> s2
|
||||
pbag([1, 2])
|
||||
>>> s3
|
||||
pbag([1, 1])
|
||||
"""
|
||||
if element not in self._counts:
|
||||
raise KeyError(element)
|
||||
elif self._counts[element] == 1:
|
||||
newc = self._counts.remove(element)
|
||||
else:
|
||||
newc = self._counts.set(element, self._counts[element] - 1)
|
||||
return PBag(newc)
|
||||
|
||||
def count(self, element):
|
||||
"""
|
||||
Return the number of times an element appears.
|
||||
|
||||
|
||||
>>> pbag([]).count('non-existent')
|
||||
0
|
||||
>>> pbag([1, 1, 2]).count(1)
|
||||
2
|
||||
"""
|
||||
return self._counts.get(element, 0)
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Return the length including duplicates.
|
||||
|
||||
>>> len(pbag([1, 1, 2]))
|
||||
3
|
||||
"""
|
||||
return sum(self._counts.itervalues())
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Return an iterator of all elements, including duplicates.
|
||||
|
||||
>>> list(pbag([1, 1, 2]))
|
||||
[1, 1, 2]
|
||||
>>> list(pbag([1, 2]))
|
||||
[1, 2]
|
||||
"""
|
||||
for elt, count in self._counts.iteritems():
|
||||
for i in range(count):
|
||||
yield elt
|
||||
|
||||
def __contains__(self, elt):
|
||||
"""
|
||||
Check if an element is in the bag.
|
||||
|
||||
>>> 1 in pbag([1, 1, 2])
|
||||
True
|
||||
>>> 0 in pbag([1, 2])
|
||||
False
|
||||
"""
|
||||
return elt in self._counts
|
||||
|
||||
def __repr__(self):
|
||||
return "pbag({0})".format(list(self))
|
||||
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Check if two bags are equivalent, honoring the number of duplicates,
|
||||
and ignoring insertion order.
|
||||
|
||||
>>> pbag([1, 1, 2]) == pbag([1, 2])
|
||||
False
|
||||
>>> pbag([2, 1, 0]) == pbag([0, 1, 2])
|
||||
True
|
||||
"""
|
||||
if type(other) is not PBag:
|
||||
raise TypeError("Can only compare PBag with PBags")
|
||||
return self._counts == other._counts
|
||||
|
||||
def __lt__(self, other):
|
||||
raise TypeError('PBags are not orderable')
|
||||
|
||||
__le__ = __lt__
|
||||
__gt__ = __lt__
|
||||
__ge__ = __lt__
|
||||
|
||||
# Multiset-style operations similar to collections.Counter
|
||||
|
||||
def __add__(self, other):
|
||||
"""
|
||||
Combine elements from two PBags.
|
||||
|
||||
>>> pbag([1, 2, 2]) + pbag([2, 3, 3])
|
||||
pbag([1, 2, 2, 2, 3, 3])
|
||||
"""
|
||||
if not isinstance(other, PBag):
|
||||
return NotImplemented
|
||||
result = self._counts.evolver()
|
||||
for elem, other_count in other._counts.iteritems():
|
||||
result[elem] = self.count(elem) + other_count
|
||||
return PBag(result.persistent())
|
||||
|
||||
def __sub__(self, other):
|
||||
"""
|
||||
Remove elements from one PBag that are present in another.
|
||||
|
||||
>>> pbag([1, 2, 2, 2, 3]) - pbag([2, 3, 3, 4])
|
||||
pbag([1, 2, 2])
|
||||
"""
|
||||
if not isinstance(other, PBag):
|
||||
return NotImplemented
|
||||
result = self._counts.evolver()
|
||||
for elem, other_count in other._counts.iteritems():
|
||||
newcount = self.count(elem) - other_count
|
||||
if newcount > 0:
|
||||
result[elem] = newcount
|
||||
elif elem in self:
|
||||
result.remove(elem)
|
||||
return PBag(result.persistent())
|
||||
|
||||
def __or__(self, other):
|
||||
"""
|
||||
Union: Keep elements that are present in either of two PBags.
|
||||
|
||||
>>> pbag([1, 2, 2, 2]) | pbag([2, 3, 3])
|
||||
pbag([1, 2, 2, 2, 3, 3])
|
||||
"""
|
||||
if not isinstance(other, PBag):
|
||||
return NotImplemented
|
||||
result = self._counts.evolver()
|
||||
for elem, other_count in other._counts.iteritems():
|
||||
count = self.count(elem)
|
||||
newcount = max(count, other_count)
|
||||
result[elem] = newcount
|
||||
return PBag(result.persistent())
|
||||
|
||||
def __and__(self, other):
|
||||
"""
|
||||
Intersection: Only keep elements that are present in both PBags.
|
||||
|
||||
>>> pbag([1, 2, 2, 2]) & pbag([2, 3, 3])
|
||||
pbag([2])
|
||||
"""
|
||||
if not isinstance(other, PBag):
|
||||
return NotImplemented
|
||||
result = pmap().evolver()
|
||||
for elem, count in self._counts.iteritems():
|
||||
newcount = min(count, other.count(elem))
|
||||
if newcount > 0:
|
||||
result[elem] = newcount
|
||||
return PBag(result.persistent())
|
||||
|
||||
def __hash__(self):
|
||||
"""
|
||||
Hash based on value of elements.
|
||||
|
||||
>>> m = pmap({pbag([1, 2]): "it's here!"})
|
||||
>>> m[pbag([2, 1])]
|
||||
"it's here!"
|
||||
>>> pbag([1, 1, 2]) in m
|
||||
False
|
||||
"""
|
||||
return hash(self._counts)
|
||||
|
||||
|
||||
Container.register(PBag)
|
||||
Iterable.register(PBag)
|
||||
Sized.register(PBag)
|
||||
Hashable.register(PBag)
|
||||
|
||||
|
||||
def b(*elements):
|
||||
"""
|
||||
Construct a persistent bag.
|
||||
|
||||
Takes an arbitrary number of arguments to insert into the new persistent
|
||||
bag.
|
||||
|
||||
>>> b(1, 2, 3, 2)
|
||||
pbag([1, 2, 2, 3])
|
||||
"""
|
||||
return pbag(elements)
|
||||
|
||||
|
||||
def pbag(elements):
|
||||
"""
|
||||
Convert an iterable to a persistent bag.
|
||||
|
||||
Takes an iterable with elements to insert.
|
||||
|
||||
>>> pbag([1, 2, 3, 2])
|
||||
pbag([1, 2, 2, 3])
|
||||
"""
|
||||
if not elements:
|
||||
return _EMPTY_PBAG
|
||||
return PBag(reduce(_add_to_counters, elements, pmap()))
|
||||
|
||||
|
||||
_EMPTY_PBAG = PBag(pmap())
|
||||
|
||||
262
lib/spack/external/_vendoring/pyrsistent/_pclass.py
vendored
262
lib/spack/external/_vendoring/pyrsistent/_pclass.py
vendored
@@ -1,262 +0,0 @@
|
||||
from pyrsistent._checked_types import (InvariantException, CheckedType, _restore_pickle, store_invariants)
|
||||
from pyrsistent._field_common import (
|
||||
set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants
|
||||
)
|
||||
from pyrsistent._transformations import transform
|
||||
|
||||
|
||||
def _is_pclass(bases):
|
||||
return len(bases) == 1 and bases[0] == CheckedType
|
||||
|
||||
|
||||
class PClassMeta(type):
|
||||
def __new__(mcs, name, bases, dct):
|
||||
set_fields(dct, bases, name='_pclass_fields')
|
||||
store_invariants(dct, bases, '_pclass_invariants', '__invariant__')
|
||||
dct['__slots__'] = ('_pclass_frozen',) + tuple(key for key in dct['_pclass_fields'])
|
||||
|
||||
# There must only be one __weakref__ entry in the inheritance hierarchy,
|
||||
# lets put it on the top level class.
|
||||
if _is_pclass(bases):
|
||||
dct['__slots__'] += ('__weakref__',)
|
||||
|
||||
return super(PClassMeta, mcs).__new__(mcs, name, bases, dct)
|
||||
|
||||
_MISSING_VALUE = object()
|
||||
|
||||
|
||||
def _check_and_set_attr(cls, field, name, value, result, invariant_errors):
|
||||
check_type(cls, field, name, value)
|
||||
is_ok, error_code = field.invariant(value)
|
||||
if not is_ok:
|
||||
invariant_errors.append(error_code)
|
||||
else:
|
||||
setattr(result, name, value)
|
||||
|
||||
|
||||
class PClass(CheckedType, metaclass=PClassMeta):
|
||||
"""
|
||||
A PClass is a python class with a fixed set of specified fields. PClasses are declared as python classes inheriting
|
||||
from PClass. It is defined the same way that PRecords are and behaves like a PRecord in all aspects except that it
|
||||
is not a PMap and hence not a collection but rather a plain Python object.
|
||||
|
||||
|
||||
More documentation and examples of PClass usage is available at https://github.com/tobgu/pyrsistent
|
||||
"""
|
||||
def __new__(cls, **kwargs): # Support *args?
|
||||
result = super(PClass, cls).__new__(cls)
|
||||
factory_fields = kwargs.pop('_factory_fields', None)
|
||||
ignore_extra = kwargs.pop('ignore_extra', None)
|
||||
missing_fields = []
|
||||
invariant_errors = []
|
||||
for name, field in cls._pclass_fields.items():
|
||||
if name in kwargs:
|
||||
if factory_fields is None or name in factory_fields:
|
||||
if is_field_ignore_extra_complaint(PClass, field, ignore_extra):
|
||||
value = field.factory(kwargs[name], ignore_extra=ignore_extra)
|
||||
else:
|
||||
value = field.factory(kwargs[name])
|
||||
else:
|
||||
value = kwargs[name]
|
||||
_check_and_set_attr(cls, field, name, value, result, invariant_errors)
|
||||
del kwargs[name]
|
||||
elif field.initial is not PFIELD_NO_INITIAL:
|
||||
initial = field.initial() if callable(field.initial) else field.initial
|
||||
_check_and_set_attr(
|
||||
cls, field, name, initial, result, invariant_errors)
|
||||
elif field.mandatory:
|
||||
missing_fields.append('{0}.{1}'.format(cls.__name__, name))
|
||||
|
||||
if invariant_errors or missing_fields:
|
||||
raise InvariantException(tuple(invariant_errors), tuple(missing_fields), 'Field invariant failed')
|
||||
|
||||
if kwargs:
|
||||
raise AttributeError("'{0}' are not among the specified fields for {1}".format(
|
||||
', '.join(kwargs), cls.__name__))
|
||||
|
||||
check_global_invariants(result, cls._pclass_invariants)
|
||||
|
||||
result._pclass_frozen = True
|
||||
return result
|
||||
|
||||
def set(self, *args, **kwargs):
|
||||
"""
|
||||
Set a field in the instance. Returns a new instance with the updated value. The original instance remains
|
||||
unmodified. Accepts key-value pairs or single string representing the field name and a value.
|
||||
|
||||
>>> from pyrsistent import PClass, field
|
||||
>>> class AClass(PClass):
|
||||
... x = field()
|
||||
...
|
||||
>>> a = AClass(x=1)
|
||||
>>> a2 = a.set(x=2)
|
||||
>>> a3 = a.set('x', 3)
|
||||
>>> a
|
||||
AClass(x=1)
|
||||
>>> a2
|
||||
AClass(x=2)
|
||||
>>> a3
|
||||
AClass(x=3)
|
||||
"""
|
||||
if args:
|
||||
kwargs[args[0]] = args[1]
|
||||
|
||||
factory_fields = set(kwargs)
|
||||
|
||||
for key in self._pclass_fields:
|
||||
if key not in kwargs:
|
||||
value = getattr(self, key, _MISSING_VALUE)
|
||||
if value is not _MISSING_VALUE:
|
||||
kwargs[key] = value
|
||||
|
||||
return self.__class__(_factory_fields=factory_fields, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def create(cls, kwargs, _factory_fields=None, ignore_extra=False):
|
||||
"""
|
||||
Factory method. Will create a new PClass of the current type and assign the values
|
||||
specified in kwargs.
|
||||
|
||||
:param ignore_extra: A boolean which when set to True will ignore any keys which appear in kwargs that are not
|
||||
in the set of fields on the PClass.
|
||||
"""
|
||||
if isinstance(kwargs, cls):
|
||||
return kwargs
|
||||
|
||||
if ignore_extra:
|
||||
kwargs = {k: kwargs[k] for k in cls._pclass_fields if k in kwargs}
|
||||
|
||||
return cls(_factory_fields=_factory_fields, ignore_extra=ignore_extra, **kwargs)
|
||||
|
||||
def serialize(self, format=None):
|
||||
"""
|
||||
Serialize the current PClass using custom serializer functions for fields where
|
||||
such have been supplied.
|
||||
"""
|
||||
result = {}
|
||||
for name in self._pclass_fields:
|
||||
value = getattr(self, name, _MISSING_VALUE)
|
||||
if value is not _MISSING_VALUE:
|
||||
result[name] = serialize(self._pclass_fields[name].serializer, format, value)
|
||||
|
||||
return result
|
||||
|
||||
def transform(self, *transformations):
|
||||
"""
|
||||
Apply transformations to the currency PClass. For more details on transformations see
|
||||
the documentation for PMap. Transformations on PClasses do not support key matching
|
||||
since the PClass is not a collection. Apart from that the transformations available
|
||||
for other persistent types work as expected.
|
||||
"""
|
||||
return transform(self, transformations)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
for name in self._pclass_fields:
|
||||
if getattr(self, name, _MISSING_VALUE) != getattr(other, name, _MISSING_VALUE):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
return NotImplemented
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
# May want to optimize this by caching the hash somehow
|
||||
return hash(tuple((key, getattr(self, key, _MISSING_VALUE)) for key in self._pclass_fields))
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if getattr(self, '_pclass_frozen', False):
|
||||
raise AttributeError("Can't set attribute, key={0}, value={1}".format(key, value))
|
||||
|
||||
super(PClass, self).__setattr__(key, value)
|
||||
|
||||
def __delattr__(self, key):
|
||||
raise AttributeError("Can't delete attribute, key={0}, use remove()".format(key))
|
||||
|
||||
def _to_dict(self):
|
||||
result = {}
|
||||
for key in self._pclass_fields:
|
||||
value = getattr(self, key, _MISSING_VALUE)
|
||||
if value is not _MISSING_VALUE:
|
||||
result[key] = value
|
||||
|
||||
return result
|
||||
|
||||
def __repr__(self):
|
||||
return "{0}({1})".format(self.__class__.__name__,
|
||||
', '.join('{0}={1}'.format(k, repr(v)) for k, v in self._to_dict().items()))
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
data = dict((key, getattr(self, key)) for key in self._pclass_fields if hasattr(self, key))
|
||||
return _restore_pickle, (self.__class__, data,)
|
||||
|
||||
def evolver(self):
|
||||
"""
|
||||
Returns an evolver for this object.
|
||||
"""
|
||||
return _PClassEvolver(self, self._to_dict())
|
||||
|
||||
def remove(self, name):
|
||||
"""
|
||||
Remove attribute given by name from the current instance. Raises AttributeError if the
|
||||
attribute doesn't exist.
|
||||
"""
|
||||
evolver = self.evolver()
|
||||
del evolver[name]
|
||||
return evolver.persistent()
|
||||
|
||||
|
||||
class _PClassEvolver(object):
|
||||
__slots__ = ('_pclass_evolver_original', '_pclass_evolver_data', '_pclass_evolver_data_is_dirty', '_factory_fields')
|
||||
|
||||
def __init__(self, original, initial_dict):
|
||||
self._pclass_evolver_original = original
|
||||
self._pclass_evolver_data = initial_dict
|
||||
self._pclass_evolver_data_is_dirty = False
|
||||
self._factory_fields = set()
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self._pclass_evolver_data[item]
|
||||
|
||||
def set(self, key, value):
|
||||
if self._pclass_evolver_data.get(key, _MISSING_VALUE) is not value:
|
||||
self._pclass_evolver_data[key] = value
|
||||
self._factory_fields.add(key)
|
||||
self._pclass_evolver_data_is_dirty = True
|
||||
|
||||
return self
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
self.set(key, value)
|
||||
|
||||
def remove(self, item):
|
||||
if item in self._pclass_evolver_data:
|
||||
del self._pclass_evolver_data[item]
|
||||
self._factory_fields.discard(item)
|
||||
self._pclass_evolver_data_is_dirty = True
|
||||
return self
|
||||
|
||||
raise AttributeError(item)
|
||||
|
||||
def __delitem__(self, item):
|
||||
self.remove(item)
|
||||
|
||||
def persistent(self):
|
||||
if self._pclass_evolver_data_is_dirty:
|
||||
return self._pclass_evolver_original.__class__(_factory_fields=self._factory_fields,
|
||||
**self._pclass_evolver_data)
|
||||
|
||||
return self._pclass_evolver_original
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if key not in self.__slots__:
|
||||
self.set(key, value)
|
||||
else:
|
||||
super(_PClassEvolver, self).__setattr__(key, value)
|
||||
|
||||
def __getattr__(self, item):
|
||||
return self[item]
|
||||
376
lib/spack/external/_vendoring/pyrsistent/_pdeque.py
vendored
376
lib/spack/external/_vendoring/pyrsistent/_pdeque.py
vendored
@@ -1,376 +0,0 @@
|
||||
from collections.abc import Sequence, Hashable
|
||||
from itertools import islice, chain
|
||||
from numbers import Integral
|
||||
from pyrsistent._plist import plist
|
||||
|
||||
|
||||
class PDeque(object):
|
||||
"""
|
||||
Persistent double ended queue (deque). Allows quick appends and pops in both ends. Implemented
|
||||
using two persistent lists.
|
||||
|
||||
A maximum length can be specified to create a bounded queue.
|
||||
|
||||
Fully supports the Sequence and Hashable protocols including indexing and slicing but
|
||||
if you need fast random access go for the PVector instead.
|
||||
|
||||
Do not instantiate directly, instead use the factory functions :py:func:`dq` or :py:func:`pdeque` to
|
||||
create an instance.
|
||||
|
||||
Some examples:
|
||||
|
||||
>>> x = pdeque([1, 2, 3])
|
||||
>>> x.left
|
||||
1
|
||||
>>> x.right
|
||||
3
|
||||
>>> x[0] == x.left
|
||||
True
|
||||
>>> x[-1] == x.right
|
||||
True
|
||||
>>> x.pop()
|
||||
pdeque([1, 2])
|
||||
>>> x.pop() == x[:-1]
|
||||
True
|
||||
>>> x.popleft()
|
||||
pdeque([2, 3])
|
||||
>>> x.append(4)
|
||||
pdeque([1, 2, 3, 4])
|
||||
>>> x.appendleft(4)
|
||||
pdeque([4, 1, 2, 3])
|
||||
|
||||
>>> y = pdeque([1, 2, 3], maxlen=3)
|
||||
>>> y.append(4)
|
||||
pdeque([2, 3, 4], maxlen=3)
|
||||
>>> y.appendleft(4)
|
||||
pdeque([4, 1, 2], maxlen=3)
|
||||
"""
|
||||
__slots__ = ('_left_list', '_right_list', '_length', '_maxlen', '__weakref__')
|
||||
|
||||
def __new__(cls, left_list, right_list, length, maxlen=None):
|
||||
instance = super(PDeque, cls).__new__(cls)
|
||||
instance._left_list = left_list
|
||||
instance._right_list = right_list
|
||||
instance._length = length
|
||||
|
||||
if maxlen is not None:
|
||||
if not isinstance(maxlen, Integral):
|
||||
raise TypeError('An integer is required as maxlen')
|
||||
|
||||
if maxlen < 0:
|
||||
raise ValueError("maxlen must be non-negative")
|
||||
|
||||
instance._maxlen = maxlen
|
||||
return instance
|
||||
|
||||
@property
|
||||
def right(self):
|
||||
"""
|
||||
Rightmost element in dqueue.
|
||||
"""
|
||||
return PDeque._tip_from_lists(self._right_list, self._left_list)
|
||||
|
||||
@property
|
||||
def left(self):
|
||||
"""
|
||||
Leftmost element in dqueue.
|
||||
"""
|
||||
return PDeque._tip_from_lists(self._left_list, self._right_list)
|
||||
|
||||
@staticmethod
|
||||
def _tip_from_lists(primary_list, secondary_list):
|
||||
if primary_list:
|
||||
return primary_list.first
|
||||
|
||||
if secondary_list:
|
||||
return secondary_list[-1]
|
||||
|
||||
raise IndexError('No elements in empty deque')
|
||||
|
||||
def __iter__(self):
|
||||
return chain(self._left_list, self._right_list.reverse())
|
||||
|
||||
def __repr__(self):
|
||||
return "pdeque({0}{1})".format(list(self),
|
||||
', maxlen={0}'.format(self._maxlen) if self._maxlen is not None else '')
|
||||
__str__ = __repr__
|
||||
|
||||
@property
|
||||
def maxlen(self):
|
||||
"""
|
||||
Maximum length of the queue.
|
||||
"""
|
||||
return self._maxlen
|
||||
|
||||
def pop(self, count=1):
|
||||
"""
|
||||
Return new deque with rightmost element removed. Popping the empty queue
|
||||
will return the empty queue. A optional count can be given to indicate the
|
||||
number of elements to pop. Popping with a negative index is the same as
|
||||
popleft. Executes in amortized O(k) where k is the number of elements to pop.
|
||||
|
||||
>>> pdeque([1, 2]).pop()
|
||||
pdeque([1])
|
||||
>>> pdeque([1, 2]).pop(2)
|
||||
pdeque([])
|
||||
>>> pdeque([1, 2]).pop(-1)
|
||||
pdeque([2])
|
||||
"""
|
||||
if count < 0:
|
||||
return self.popleft(-count)
|
||||
|
||||
new_right_list, new_left_list = PDeque._pop_lists(self._right_list, self._left_list, count)
|
||||
return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen)
|
||||
|
||||
def popleft(self, count=1):
|
||||
"""
|
||||
Return new deque with leftmost element removed. Otherwise functionally
|
||||
equivalent to pop().
|
||||
|
||||
>>> pdeque([1, 2]).popleft()
|
||||
pdeque([2])
|
||||
"""
|
||||
if count < 0:
|
||||
return self.pop(-count)
|
||||
|
||||
new_left_list, new_right_list = PDeque._pop_lists(self._left_list, self._right_list, count)
|
||||
return PDeque(new_left_list, new_right_list, max(self._length - count, 0), self._maxlen)
|
||||
|
||||
@staticmethod
|
||||
def _pop_lists(primary_list, secondary_list, count):
|
||||
new_primary_list = primary_list
|
||||
new_secondary_list = secondary_list
|
||||
|
||||
while count > 0 and (new_primary_list or new_secondary_list):
|
||||
count -= 1
|
||||
if new_primary_list.rest:
|
||||
new_primary_list = new_primary_list.rest
|
||||
elif new_primary_list:
|
||||
new_primary_list = new_secondary_list.reverse()
|
||||
new_secondary_list = plist()
|
||||
else:
|
||||
new_primary_list = new_secondary_list.reverse().rest
|
||||
new_secondary_list = plist()
|
||||
|
||||
return new_primary_list, new_secondary_list
|
||||
|
||||
def _is_empty(self):
|
||||
return not self._left_list and not self._right_list
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, PDeque):
|
||||
return NotImplemented
|
||||
|
||||
return tuple(self) < tuple(other)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, PDeque):
|
||||
return NotImplemented
|
||||
|
||||
if tuple(self) == tuple(other):
|
||||
# Sanity check of the length value since it is redundant (there for performance)
|
||||
assert len(self) == len(other)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def __hash__(self):
|
||||
return hash(tuple(self))
|
||||
|
||||
def __len__(self):
|
||||
return self._length
|
||||
|
||||
def append(self, elem):
|
||||
"""
|
||||
Return new deque with elem as the rightmost element.
|
||||
|
||||
>>> pdeque([1, 2]).append(3)
|
||||
pdeque([1, 2, 3])
|
||||
"""
|
||||
new_left_list, new_right_list, new_length = self._append(self._left_list, self._right_list, elem)
|
||||
return PDeque(new_left_list, new_right_list, new_length, self._maxlen)
|
||||
|
||||
def appendleft(self, elem):
|
||||
"""
|
||||
Return new deque with elem as the leftmost element.
|
||||
|
||||
>>> pdeque([1, 2]).appendleft(3)
|
||||
pdeque([3, 1, 2])
|
||||
"""
|
||||
new_right_list, new_left_list, new_length = self._append(self._right_list, self._left_list, elem)
|
||||
return PDeque(new_left_list, new_right_list, new_length, self._maxlen)
|
||||
|
||||
def _append(self, primary_list, secondary_list, elem):
|
||||
if self._maxlen is not None and self._length == self._maxlen:
|
||||
if self._maxlen == 0:
|
||||
return primary_list, secondary_list, 0
|
||||
new_primary_list, new_secondary_list = PDeque._pop_lists(primary_list, secondary_list, 1)
|
||||
return new_primary_list, new_secondary_list.cons(elem), self._length
|
||||
|
||||
return primary_list, secondary_list.cons(elem), self._length + 1
|
||||
|
||||
@staticmethod
|
||||
def _extend_list(the_list, iterable):
|
||||
count = 0
|
||||
for elem in iterable:
|
||||
the_list = the_list.cons(elem)
|
||||
count += 1
|
||||
|
||||
return the_list, count
|
||||
|
||||
def _extend(self, primary_list, secondary_list, iterable):
|
||||
new_primary_list, extend_count = PDeque._extend_list(primary_list, iterable)
|
||||
new_secondary_list = secondary_list
|
||||
current_len = self._length + extend_count
|
||||
if self._maxlen is not None and current_len > self._maxlen:
|
||||
pop_len = current_len - self._maxlen
|
||||
new_secondary_list, new_primary_list = PDeque._pop_lists(new_secondary_list, new_primary_list, pop_len)
|
||||
extend_count -= pop_len
|
||||
|
||||
return new_primary_list, new_secondary_list, extend_count
|
||||
|
||||
def extend(self, iterable):
|
||||
"""
|
||||
Return new deque with all elements of iterable appended to the right.
|
||||
|
||||
>>> pdeque([1, 2]).extend([3, 4])
|
||||
pdeque([1, 2, 3, 4])
|
||||
"""
|
||||
new_right_list, new_left_list, extend_count = self._extend(self._right_list, self._left_list, iterable)
|
||||
return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen)
|
||||
|
||||
def extendleft(self, iterable):
|
||||
"""
|
||||
Return new deque with all elements of iterable appended to the left.
|
||||
|
||||
NB! The elements will be inserted in reverse order compared to the order in the iterable.
|
||||
|
||||
>>> pdeque([1, 2]).extendleft([3, 4])
|
||||
pdeque([4, 3, 1, 2])
|
||||
"""
|
||||
new_left_list, new_right_list, extend_count = self._extend(self._left_list, self._right_list, iterable)
|
||||
return PDeque(new_left_list, new_right_list, self._length + extend_count, self._maxlen)
|
||||
|
||||
def count(self, elem):
|
||||
"""
|
||||
Return the number of elements equal to elem present in the queue
|
||||
|
||||
>>> pdeque([1, 2, 1]).count(1)
|
||||
2
|
||||
"""
|
||||
return self._left_list.count(elem) + self._right_list.count(elem)
|
||||
|
||||
def remove(self, elem):
|
||||
"""
|
||||
Return new deque with first element from left equal to elem removed. If no such element is found
|
||||
a ValueError is raised.
|
||||
|
||||
>>> pdeque([2, 1, 2]).remove(2)
|
||||
pdeque([1, 2])
|
||||
"""
|
||||
try:
|
||||
return PDeque(self._left_list.remove(elem), self._right_list, self._length - 1)
|
||||
except ValueError:
|
||||
# Value not found in left list, try the right list
|
||||
try:
|
||||
# This is severely inefficient with a double reverse, should perhaps implement a remove_last()?
|
||||
return PDeque(self._left_list,
|
||||
self._right_list.reverse().remove(elem).reverse(), self._length - 1)
|
||||
except ValueError as e:
|
||||
raise ValueError('{0} not found in PDeque'.format(elem)) from e
|
||||
|
||||
def reverse(self):
|
||||
"""
|
||||
Return reversed deque.
|
||||
|
||||
>>> pdeque([1, 2, 3]).reverse()
|
||||
pdeque([3, 2, 1])
|
||||
|
||||
Also supports the standard python reverse function.
|
||||
|
||||
>>> reversed(pdeque([1, 2, 3]))
|
||||
pdeque([3, 2, 1])
|
||||
"""
|
||||
return PDeque(self._right_list, self._left_list, self._length)
|
||||
__reversed__ = reverse
|
||||
|
||||
def rotate(self, steps):
|
||||
"""
|
||||
Return deque with elements rotated steps steps.
|
||||
|
||||
>>> x = pdeque([1, 2, 3])
|
||||
>>> x.rotate(1)
|
||||
pdeque([3, 1, 2])
|
||||
>>> x.rotate(-2)
|
||||
pdeque([3, 1, 2])
|
||||
"""
|
||||
popped_deque = self.pop(steps)
|
||||
if steps >= 0:
|
||||
return popped_deque.extendleft(islice(self.reverse(), steps))
|
||||
|
||||
return popped_deque.extend(islice(self, -steps))
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return pdeque, (list(self), self._maxlen)
|
||||
|
||||
def __getitem__(self, index):
|
||||
if isinstance(index, slice):
|
||||
if index.step is not None and index.step != 1:
|
||||
# Too difficult, no structural sharing possible
|
||||
return pdeque(tuple(self)[index], maxlen=self._maxlen)
|
||||
|
||||
result = self
|
||||
if index.start is not None:
|
||||
result = result.popleft(index.start % self._length)
|
||||
if index.stop is not None:
|
||||
result = result.pop(self._length - (index.stop % self._length))
|
||||
|
||||
return result
|
||||
|
||||
if not isinstance(index, Integral):
|
||||
raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
|
||||
|
||||
if index >= 0:
|
||||
return self.popleft(index).left
|
||||
|
||||
shifted = len(self) + index
|
||||
if shifted < 0:
|
||||
raise IndexError(
|
||||
"pdeque index {0} out of range {1}".format(index, len(self)),
|
||||
)
|
||||
return self.popleft(shifted).left
|
||||
|
||||
index = Sequence.index
|
||||
|
||||
Sequence.register(PDeque)
|
||||
Hashable.register(PDeque)
|
||||
|
||||
|
||||
def pdeque(iterable=(), maxlen=None):
|
||||
"""
|
||||
Return deque containing the elements of iterable. If maxlen is specified then
|
||||
len(iterable) - maxlen elements are discarded from the left to if len(iterable) > maxlen.
|
||||
|
||||
>>> pdeque([1, 2, 3])
|
||||
pdeque([1, 2, 3])
|
||||
>>> pdeque([1, 2, 3, 4], maxlen=2)
|
||||
pdeque([3, 4], maxlen=2)
|
||||
"""
|
||||
t = tuple(iterable)
|
||||
if maxlen is not None:
|
||||
t = t[-maxlen:]
|
||||
length = len(t)
|
||||
pivot = int(length / 2)
|
||||
left = plist(t[:pivot])
|
||||
right = plist(t[pivot:], reverse=True)
|
||||
return PDeque(left, right, length, maxlen)
|
||||
|
||||
def dq(*elements):
|
||||
"""
|
||||
Return deque containing all arguments.
|
||||
|
||||
>>> dq(1, 2, 3)
|
||||
pdeque([1, 2, 3])
|
||||
"""
|
||||
return pdeque(elements)
|
||||
313
lib/spack/external/_vendoring/pyrsistent/_plist.py
vendored
313
lib/spack/external/_vendoring/pyrsistent/_plist.py
vendored
@@ -1,313 +0,0 @@
|
||||
from collections.abc import Sequence, Hashable
|
||||
from numbers import Integral
|
||||
from functools import reduce
|
||||
|
||||
|
||||
class _PListBuilder(object):
|
||||
"""
|
||||
Helper class to allow construction of a list without
|
||||
having to reverse it in the end.
|
||||
"""
|
||||
__slots__ = ('_head', '_tail')
|
||||
|
||||
def __init__(self):
|
||||
self._head = _EMPTY_PLIST
|
||||
self._tail = _EMPTY_PLIST
|
||||
|
||||
def _append(self, elem, constructor):
|
||||
if not self._tail:
|
||||
self._head = constructor(elem)
|
||||
self._tail = self._head
|
||||
else:
|
||||
self._tail.rest = constructor(elem)
|
||||
self._tail = self._tail.rest
|
||||
|
||||
return self._head
|
||||
|
||||
def append_elem(self, elem):
|
||||
return self._append(elem, lambda e: PList(e, _EMPTY_PLIST))
|
||||
|
||||
def append_plist(self, pl):
|
||||
return self._append(pl, lambda l: l)
|
||||
|
||||
def build(self):
|
||||
return self._head
|
||||
|
||||
|
||||
class _PListBase(object):
|
||||
__slots__ = ('__weakref__',)
|
||||
|
||||
# Selected implementations can be taken straight from the Sequence
|
||||
# class, other are less suitable. Especially those that work with
|
||||
# index lookups.
|
||||
count = Sequence.count
|
||||
index = Sequence.index
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return plist, (list(self),)
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Return the length of the list, computed by traversing it.
|
||||
|
||||
This is obviously O(n) but with the current implementation
|
||||
where a list is also a node the overhead of storing the length
|
||||
in every node would be quite significant.
|
||||
"""
|
||||
return sum(1 for _ in self)
|
||||
|
||||
def __repr__(self):
|
||||
return "plist({0})".format(list(self))
|
||||
__str__ = __repr__
|
||||
|
||||
def cons(self, elem):
|
||||
"""
|
||||
Return a new list with elem inserted as new head.
|
||||
|
||||
>>> plist([1, 2]).cons(3)
|
||||
plist([3, 1, 2])
|
||||
"""
|
||||
return PList(elem, self)
|
||||
|
||||
def mcons(self, iterable):
|
||||
"""
|
||||
Return a new list with all elements of iterable repeatedly cons:ed to the current list.
|
||||
NB! The elements will be inserted in the reverse order of the iterable.
|
||||
Runs in O(len(iterable)).
|
||||
|
||||
>>> plist([1, 2]).mcons([3, 4])
|
||||
plist([4, 3, 1, 2])
|
||||
"""
|
||||
head = self
|
||||
for elem in iterable:
|
||||
head = head.cons(elem)
|
||||
|
||||
return head
|
||||
|
||||
def reverse(self):
|
||||
"""
|
||||
Return a reversed version of list. Runs in O(n) where n is the length of the list.
|
||||
|
||||
>>> plist([1, 2, 3]).reverse()
|
||||
plist([3, 2, 1])
|
||||
|
||||
Also supports the standard reversed function.
|
||||
|
||||
>>> reversed(plist([1, 2, 3]))
|
||||
plist([3, 2, 1])
|
||||
"""
|
||||
result = plist()
|
||||
head = self
|
||||
while head:
|
||||
result = result.cons(head.first)
|
||||
head = head.rest
|
||||
|
||||
return result
|
||||
__reversed__ = reverse
|
||||
|
||||
def split(self, index):
|
||||
"""
|
||||
Spilt the list at position specified by index. Returns a tuple containing the
|
||||
list up until index and the list after the index. Runs in O(index).
|
||||
|
||||
>>> plist([1, 2, 3, 4]).split(2)
|
||||
(plist([1, 2]), plist([3, 4]))
|
||||
"""
|
||||
lb = _PListBuilder()
|
||||
right_list = self
|
||||
i = 0
|
||||
while right_list and i < index:
|
||||
lb.append_elem(right_list.first)
|
||||
right_list = right_list.rest
|
||||
i += 1
|
||||
|
||||
if not right_list:
|
||||
# Just a small optimization in the cases where no split occurred
|
||||
return self, _EMPTY_PLIST
|
||||
|
||||
return lb.build(), right_list
|
||||
|
||||
def __iter__(self):
|
||||
li = self
|
||||
while li:
|
||||
yield li.first
|
||||
li = li.rest
|
||||
|
||||
def __lt__(self, other):
|
||||
if not isinstance(other, _PListBase):
|
||||
return NotImplemented
|
||||
|
||||
return tuple(self) < tuple(other)
|
||||
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Traverses the lists, checking equality of elements.
|
||||
|
||||
This is an O(n) operation, but preserves the standard semantics of list equality.
|
||||
"""
|
||||
if not isinstance(other, _PListBase):
|
||||
return NotImplemented
|
||||
|
||||
self_head = self
|
||||
other_head = other
|
||||
while self_head and other_head:
|
||||
if not self_head.first == other_head.first:
|
||||
return False
|
||||
self_head = self_head.rest
|
||||
other_head = other_head.rest
|
||||
|
||||
return not self_head and not other_head
|
||||
|
||||
def __getitem__(self, index):
|
||||
# Don't use this this data structure if you plan to do a lot of indexing, it is
|
||||
# very inefficient! Use a PVector instead!
|
||||
|
||||
if isinstance(index, slice):
|
||||
if index.start is not None and index.stop is None and (index.step is None or index.step == 1):
|
||||
return self._drop(index.start)
|
||||
|
||||
# Take the easy way out for all other slicing cases, not much structural reuse possible anyway
|
||||
return plist(tuple(self)[index])
|
||||
|
||||
if not isinstance(index, Integral):
|
||||
raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
|
||||
|
||||
if index < 0:
|
||||
# NB: O(n)!
|
||||
index += len(self)
|
||||
|
||||
try:
|
||||
return self._drop(index).first
|
||||
except AttributeError as e:
|
||||
raise IndexError("PList index out of range") from e
|
||||
|
||||
def _drop(self, count):
|
||||
if count < 0:
|
||||
raise IndexError("PList index out of range")
|
||||
|
||||
head = self
|
||||
while count > 0:
|
||||
head = head.rest
|
||||
count -= 1
|
||||
|
||||
return head
|
||||
|
||||
def __hash__(self):
|
||||
return hash(tuple(self))
|
||||
|
||||
def remove(self, elem):
|
||||
"""
|
||||
Return new list with first element equal to elem removed. O(k) where k is the position
|
||||
of the element that is removed.
|
||||
|
||||
Raises ValueError if no matching element is found.
|
||||
|
||||
>>> plist([1, 2, 1]).remove(1)
|
||||
plist([2, 1])
|
||||
"""
|
||||
|
||||
builder = _PListBuilder()
|
||||
head = self
|
||||
while head:
|
||||
if head.first == elem:
|
||||
return builder.append_plist(head.rest)
|
||||
|
||||
builder.append_elem(head.first)
|
||||
head = head.rest
|
||||
|
||||
raise ValueError('{0} not found in PList'.format(elem))
|
||||
|
||||
|
||||
class PList(_PListBase):
|
||||
"""
|
||||
Classical Lisp style singly linked list. Adding elements to the head using cons is O(1).
|
||||
Element access is O(k) where k is the position of the element in the list. Taking the
|
||||
length of the list is O(n).
|
||||
|
||||
Fully supports the Sequence and Hashable protocols including indexing and slicing but
|
||||
if you need fast random access go for the PVector instead.
|
||||
|
||||
Do not instantiate directly, instead use the factory functions :py:func:`l` or :py:func:`plist` to
|
||||
create an instance.
|
||||
|
||||
Some examples:
|
||||
|
||||
>>> x = plist([1, 2])
|
||||
>>> y = x.cons(3)
|
||||
>>> x
|
||||
plist([1, 2])
|
||||
>>> y
|
||||
plist([3, 1, 2])
|
||||
>>> y.first
|
||||
3
|
||||
>>> y.rest == x
|
||||
True
|
||||
>>> y[:2]
|
||||
plist([3, 1])
|
||||
"""
|
||||
__slots__ = ('first', 'rest')
|
||||
|
||||
def __new__(cls, first, rest):
|
||||
instance = super(PList, cls).__new__(cls)
|
||||
instance.first = first
|
||||
instance.rest = rest
|
||||
return instance
|
||||
|
||||
def __bool__(self):
|
||||
return True
|
||||
__nonzero__ = __bool__
|
||||
|
||||
|
||||
Sequence.register(PList)
|
||||
Hashable.register(PList)
|
||||
|
||||
|
||||
class _EmptyPList(_PListBase):
|
||||
__slots__ = ()
|
||||
|
||||
def __bool__(self):
|
||||
return False
|
||||
__nonzero__ = __bool__
|
||||
|
||||
@property
|
||||
def first(self):
|
||||
raise AttributeError("Empty PList has no first")
|
||||
|
||||
@property
|
||||
def rest(self):
|
||||
return self
|
||||
|
||||
|
||||
Sequence.register(_EmptyPList)
|
||||
Hashable.register(_EmptyPList)
|
||||
|
||||
_EMPTY_PLIST = _EmptyPList()
|
||||
|
||||
|
||||
def plist(iterable=(), reverse=False):
|
||||
"""
|
||||
Creates a new persistent list containing all elements of iterable.
|
||||
Optional parameter reverse specifies if the elements should be inserted in
|
||||
reverse order or not.
|
||||
|
||||
>>> plist([1, 2, 3])
|
||||
plist([1, 2, 3])
|
||||
>>> plist([1, 2, 3], reverse=True)
|
||||
plist([3, 2, 1])
|
||||
"""
|
||||
if not reverse:
|
||||
iterable = list(iterable)
|
||||
iterable.reverse()
|
||||
|
||||
return reduce(lambda pl, elem: pl.cons(elem), iterable, _EMPTY_PLIST)
|
||||
|
||||
|
||||
def l(*elements):
|
||||
"""
|
||||
Creates a new persistent list containing all arguments.
|
||||
|
||||
>>> l(1, 2, 3)
|
||||
plist([1, 2, 3])
|
||||
"""
|
||||
return plist(elements)
|
||||
167
lib/spack/external/_vendoring/pyrsistent/_precord.py
vendored
167
lib/spack/external/_vendoring/pyrsistent/_precord.py
vendored
@@ -1,167 +0,0 @@
|
||||
from pyrsistent._checked_types import CheckedType, _restore_pickle, InvariantException, store_invariants
|
||||
from pyrsistent._field_common import (
|
||||
set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants
|
||||
)
|
||||
from pyrsistent._pmap import PMap, pmap
|
||||
|
||||
|
||||
class _PRecordMeta(type):
|
||||
def __new__(mcs, name, bases, dct):
|
||||
set_fields(dct, bases, name='_precord_fields')
|
||||
store_invariants(dct, bases, '_precord_invariants', '__invariant__')
|
||||
|
||||
dct['_precord_mandatory_fields'] = \
|
||||
set(name for name, field in dct['_precord_fields'].items() if field.mandatory)
|
||||
|
||||
dct['_precord_initial_values'] = \
|
||||
dict((k, field.initial) for k, field in dct['_precord_fields'].items() if field.initial is not PFIELD_NO_INITIAL)
|
||||
|
||||
|
||||
dct['__slots__'] = ()
|
||||
|
||||
return super(_PRecordMeta, mcs).__new__(mcs, name, bases, dct)
|
||||
|
||||
|
||||
class PRecord(PMap, CheckedType, metaclass=_PRecordMeta):
|
||||
"""
|
||||
A PRecord is a PMap with a fixed set of specified fields. Records are declared as python classes inheriting
|
||||
from PRecord. Because it is a PMap it has full support for all Mapping methods such as iteration and element
|
||||
access using subscript notation.
|
||||
|
||||
More documentation and examples of PRecord usage is available at https://github.com/tobgu/pyrsistent
|
||||
"""
|
||||
def __new__(cls, **kwargs):
|
||||
# Hack total! If these two special attributes exist that means we can create
|
||||
# ourselves. Otherwise we need to go through the Evolver to create the structures
|
||||
# for us.
|
||||
if '_precord_size' in kwargs and '_precord_buckets' in kwargs:
|
||||
return super(PRecord, cls).__new__(cls, kwargs['_precord_size'], kwargs['_precord_buckets'])
|
||||
|
||||
factory_fields = kwargs.pop('_factory_fields', None)
|
||||
ignore_extra = kwargs.pop('_ignore_extra', False)
|
||||
|
||||
initial_values = kwargs
|
||||
if cls._precord_initial_values:
|
||||
initial_values = dict((k, v() if callable(v) else v)
|
||||
for k, v in cls._precord_initial_values.items())
|
||||
initial_values.update(kwargs)
|
||||
|
||||
e = _PRecordEvolver(cls, pmap(pre_size=len(cls._precord_fields)), _factory_fields=factory_fields, _ignore_extra=ignore_extra)
|
||||
for k, v in initial_values.items():
|
||||
e[k] = v
|
||||
|
||||
return e.persistent()
|
||||
|
||||
def set(self, *args, **kwargs):
|
||||
"""
|
||||
Set a field in the record. This set function differs slightly from that in the PMap
|
||||
class. First of all it accepts key-value pairs. Second it accepts multiple key-value
|
||||
pairs to perform one, atomic, update of multiple fields.
|
||||
"""
|
||||
|
||||
# The PRecord set() can accept kwargs since all fields that have been declared are
|
||||
# valid python identifiers. Also allow multiple fields to be set in one operation.
|
||||
if args:
|
||||
return super(PRecord, self).set(args[0], args[1])
|
||||
|
||||
return self.update(kwargs)
|
||||
|
||||
def evolver(self):
|
||||
"""
|
||||
Returns an evolver of this object.
|
||||
"""
|
||||
return _PRecordEvolver(self.__class__, self)
|
||||
|
||||
def __repr__(self):
|
||||
return "{0}({1})".format(self.__class__.__name__,
|
||||
', '.join('{0}={1}'.format(k, repr(v)) for k, v in self.items()))
|
||||
|
||||
@classmethod
|
||||
def create(cls, kwargs, _factory_fields=None, ignore_extra=False):
|
||||
"""
|
||||
Factory method. Will create a new PRecord of the current type and assign the values
|
||||
specified in kwargs.
|
||||
|
||||
:param ignore_extra: A boolean which when set to True will ignore any keys which appear in kwargs that are not
|
||||
in the set of fields on the PRecord.
|
||||
"""
|
||||
if isinstance(kwargs, cls):
|
||||
return kwargs
|
||||
|
||||
if ignore_extra:
|
||||
kwargs = {k: kwargs[k] for k in cls._precord_fields if k in kwargs}
|
||||
|
||||
return cls(_factory_fields=_factory_fields, _ignore_extra=ignore_extra, **kwargs)
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return _restore_pickle, (self.__class__, dict(self),)
|
||||
|
||||
def serialize(self, format=None):
|
||||
"""
|
||||
Serialize the current PRecord using custom serializer functions for fields where
|
||||
such have been supplied.
|
||||
"""
|
||||
return dict((k, serialize(self._precord_fields[k].serializer, format, v)) for k, v in self.items())
|
||||
|
||||
|
||||
class _PRecordEvolver(PMap._Evolver):
|
||||
__slots__ = ('_destination_cls', '_invariant_error_codes', '_missing_fields', '_factory_fields', '_ignore_extra')
|
||||
|
||||
def __init__(self, cls, original_pmap, _factory_fields=None, _ignore_extra=False):
|
||||
super(_PRecordEvolver, self).__init__(original_pmap)
|
||||
self._destination_cls = cls
|
||||
self._invariant_error_codes = []
|
||||
self._missing_fields = []
|
||||
self._factory_fields = _factory_fields
|
||||
self._ignore_extra = _ignore_extra
|
||||
|
||||
def __setitem__(self, key, original_value):
|
||||
self.set(key, original_value)
|
||||
|
||||
def set(self, key, original_value):
|
||||
field = self._destination_cls._precord_fields.get(key)
|
||||
if field:
|
||||
if self._factory_fields is None or field in self._factory_fields:
|
||||
try:
|
||||
if is_field_ignore_extra_complaint(PRecord, field, self._ignore_extra):
|
||||
value = field.factory(original_value, ignore_extra=self._ignore_extra)
|
||||
else:
|
||||
value = field.factory(original_value)
|
||||
except InvariantException as e:
|
||||
self._invariant_error_codes += e.invariant_errors
|
||||
self._missing_fields += e.missing_fields
|
||||
return self
|
||||
else:
|
||||
value = original_value
|
||||
|
||||
check_type(self._destination_cls, field, key, value)
|
||||
|
||||
is_ok, error_code = field.invariant(value)
|
||||
if not is_ok:
|
||||
self._invariant_error_codes.append(error_code)
|
||||
|
||||
return super(_PRecordEvolver, self).set(key, value)
|
||||
else:
|
||||
raise AttributeError("'{0}' is not among the specified fields for {1}".format(key, self._destination_cls.__name__))
|
||||
|
||||
def persistent(self):
|
||||
cls = self._destination_cls
|
||||
is_dirty = self.is_dirty()
|
||||
pm = super(_PRecordEvolver, self).persistent()
|
||||
if is_dirty or not isinstance(pm, cls):
|
||||
result = cls(_precord_buckets=pm._buckets, _precord_size=pm._size)
|
||||
else:
|
||||
result = pm
|
||||
|
||||
if cls._precord_mandatory_fields:
|
||||
self._missing_fields += tuple('{0}.{1}'.format(cls.__name__, f) for f
|
||||
in (cls._precord_mandatory_fields - set(result.keys())))
|
||||
|
||||
if self._invariant_error_codes or self._missing_fields:
|
||||
raise InvariantException(tuple(self._invariant_error_codes), tuple(self._missing_fields),
|
||||
'Field invariant failed')
|
||||
|
||||
check_global_invariants(result, cls._precord_invariants)
|
||||
|
||||
return result
|
||||
227
lib/spack/external/_vendoring/pyrsistent/_pset.py
vendored
227
lib/spack/external/_vendoring/pyrsistent/_pset.py
vendored
@@ -1,227 +0,0 @@
|
||||
from collections.abc import Set, Hashable
|
||||
import sys
|
||||
from pyrsistent._pmap import pmap
|
||||
|
||||
|
||||
class PSet(object):
|
||||
"""
|
||||
Persistent set implementation. Built on top of the persistent map. The set supports all operations
|
||||
in the Set protocol and is Hashable.
|
||||
|
||||
Do not instantiate directly, instead use the factory functions :py:func:`s` or :py:func:`pset`
|
||||
to create an instance.
|
||||
|
||||
Random access and insert is log32(n) where n is the size of the set.
|
||||
|
||||
Some examples:
|
||||
|
||||
>>> s = pset([1, 2, 3, 1])
|
||||
>>> s2 = s.add(4)
|
||||
>>> s3 = s2.remove(2)
|
||||
>>> s
|
||||
pset([1, 2, 3])
|
||||
>>> s2
|
||||
pset([1, 2, 3, 4])
|
||||
>>> s3
|
||||
pset([1, 3, 4])
|
||||
"""
|
||||
__slots__ = ('_map', '__weakref__')
|
||||
|
||||
def __new__(cls, m):
|
||||
self = super(PSet, cls).__new__(cls)
|
||||
self._map = m
|
||||
return self
|
||||
|
||||
def __contains__(self, element):
|
||||
return element in self._map
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._map)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._map)
|
||||
|
||||
def __repr__(self):
|
||||
if not self:
|
||||
return 'p' + str(set(self))
|
||||
|
||||
return 'pset([{0}])'.format(str(set(self))[1:-1])
|
||||
|
||||
def __str__(self):
|
||||
return self.__repr__()
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._map)
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return pset, (list(self),)
|
||||
|
||||
@classmethod
|
||||
def _from_iterable(cls, it, pre_size=8):
|
||||
return PSet(pmap(dict((k, True) for k in it), pre_size=pre_size))
|
||||
|
||||
def add(self, element):
|
||||
"""
|
||||
Return a new PSet with element added
|
||||
|
||||
>>> s1 = s(1, 2)
|
||||
>>> s1.add(3)
|
||||
pset([1, 2, 3])
|
||||
"""
|
||||
return self.evolver().add(element).persistent()
|
||||
|
||||
def update(self, iterable):
|
||||
"""
|
||||
Return a new PSet with elements in iterable added
|
||||
|
||||
>>> s1 = s(1, 2)
|
||||
>>> s1.update([3, 4, 4])
|
||||
pset([1, 2, 3, 4])
|
||||
"""
|
||||
e = self.evolver()
|
||||
for element in iterable:
|
||||
e.add(element)
|
||||
|
||||
return e.persistent()
|
||||
|
||||
def remove(self, element):
|
||||
"""
|
||||
Return a new PSet with element removed. Raises KeyError if element is not present.
|
||||
|
||||
>>> s1 = s(1, 2)
|
||||
>>> s1.remove(2)
|
||||
pset([1])
|
||||
"""
|
||||
if element in self._map:
|
||||
return self.evolver().remove(element).persistent()
|
||||
|
||||
raise KeyError("Element '%s' not present in PSet" % repr(element))
|
||||
|
||||
def discard(self, element):
|
||||
"""
|
||||
Return a new PSet with element removed. Returns itself if element is not present.
|
||||
"""
|
||||
if element in self._map:
|
||||
return self.evolver().remove(element).persistent()
|
||||
|
||||
return self
|
||||
|
||||
class _Evolver(object):
|
||||
__slots__ = ('_original_pset', '_pmap_evolver')
|
||||
|
||||
def __init__(self, original_pset):
|
||||
self._original_pset = original_pset
|
||||
self._pmap_evolver = original_pset._map.evolver()
|
||||
|
||||
def add(self, element):
|
||||
self._pmap_evolver[element] = True
|
||||
return self
|
||||
|
||||
def remove(self, element):
|
||||
del self._pmap_evolver[element]
|
||||
return self
|
||||
|
||||
def is_dirty(self):
|
||||
return self._pmap_evolver.is_dirty()
|
||||
|
||||
def persistent(self):
|
||||
if not self.is_dirty():
|
||||
return self._original_pset
|
||||
|
||||
return PSet(self._pmap_evolver.persistent())
|
||||
|
||||
def __len__(self):
|
||||
return len(self._pmap_evolver)
|
||||
|
||||
def copy(self):
|
||||
return self
|
||||
|
||||
def evolver(self):
|
||||
"""
|
||||
Create a new evolver for this pset. For a discussion on evolvers in general see the
|
||||
documentation for the pvector evolver.
|
||||
|
||||
Create the evolver and perform various mutating updates to it:
|
||||
|
||||
>>> s1 = s(1, 2, 3)
|
||||
>>> e = s1.evolver()
|
||||
>>> _ = e.add(4)
|
||||
>>> len(e)
|
||||
4
|
||||
>>> _ = e.remove(1)
|
||||
|
||||
The underlying pset remains the same:
|
||||
|
||||
>>> s1
|
||||
pset([1, 2, 3])
|
||||
|
||||
The changes are kept in the evolver. An updated pmap can be created using the
|
||||
persistent() function on the evolver.
|
||||
|
||||
>>> s2 = e.persistent()
|
||||
>>> s2
|
||||
pset([2, 3, 4])
|
||||
|
||||
The new pset will share data with the original pset in the same way that would have
|
||||
been done if only using operations on the pset.
|
||||
"""
|
||||
return PSet._Evolver(self)
|
||||
|
||||
# All the operations and comparisons you would expect on a set.
|
||||
#
|
||||
# This is not very beautiful. If we avoid inheriting from PSet we can use the
|
||||
# __slots__ concepts (which requires a new style class) and hopefully save some memory.
|
||||
__le__ = Set.__le__
|
||||
__lt__ = Set.__lt__
|
||||
__gt__ = Set.__gt__
|
||||
__ge__ = Set.__ge__
|
||||
__eq__ = Set.__eq__
|
||||
__ne__ = Set.__ne__
|
||||
|
||||
__and__ = Set.__and__
|
||||
__or__ = Set.__or__
|
||||
__sub__ = Set.__sub__
|
||||
__xor__ = Set.__xor__
|
||||
|
||||
issubset = __le__
|
||||
issuperset = __ge__
|
||||
union = __or__
|
||||
intersection = __and__
|
||||
difference = __sub__
|
||||
symmetric_difference = __xor__
|
||||
|
||||
isdisjoint = Set.isdisjoint
|
||||
|
||||
Set.register(PSet)
|
||||
Hashable.register(PSet)
|
||||
|
||||
_EMPTY_PSET = PSet(pmap())
|
||||
|
||||
|
||||
def pset(iterable=(), pre_size=8):
|
||||
"""
|
||||
Creates a persistent set from iterable. Optionally takes a sizing parameter equivalent to that
|
||||
used for :py:func:`pmap`.
|
||||
|
||||
>>> s1 = pset([1, 2, 3, 2])
|
||||
>>> s1
|
||||
pset([1, 2, 3])
|
||||
"""
|
||||
if not iterable:
|
||||
return _EMPTY_PSET
|
||||
|
||||
return PSet._from_iterable(iterable, pre_size=pre_size)
|
||||
|
||||
|
||||
def s(*elements):
|
||||
"""
|
||||
Create a persistent set.
|
||||
|
||||
Takes an arbitrary number of arguments to insert into the new set.
|
||||
|
||||
>>> s1 = s(1, 2, 3, 2)
|
||||
>>> s1
|
||||
pset([1, 2, 3])
|
||||
"""
|
||||
return pset(elements)
|
||||
@@ -1,83 +0,0 @@
|
||||
"""
|
||||
Functionality copied from the toolz package to avoid having
|
||||
to add toolz as a dependency.
|
||||
|
||||
See https://github.com/pytoolz/toolz/.
|
||||
|
||||
toolz is relased under BSD licence. Below is the licence text
|
||||
from toolz as it appeared when copying the code.
|
||||
|
||||
--------------------------------------------------------------
|
||||
|
||||
Copyright (c) 2013 Matthew Rocklin
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
a. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
b. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
c. Neither the name of toolz nor the names of its contributors
|
||||
may be used to endorse or promote products derived from this software
|
||||
without specific prior written permission.
|
||||
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
|
||||
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
|
||||
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||
DAMAGE.
|
||||
"""
|
||||
import operator
|
||||
from functools import reduce
|
||||
|
||||
|
||||
def get_in(keys, coll, default=None, no_default=False):
|
||||
"""
|
||||
NB: This is a straight copy of the get_in implementation found in
|
||||
the toolz library (https://github.com/pytoolz/toolz/). It works
|
||||
with persistent data structures as well as the corresponding
|
||||
datastructures from the stdlib.
|
||||
|
||||
Returns coll[i0][i1]...[iX] where [i0, i1, ..., iX]==keys.
|
||||
|
||||
If coll[i0][i1]...[iX] cannot be found, returns ``default``, unless
|
||||
``no_default`` is specified, then it raises KeyError or IndexError.
|
||||
|
||||
``get_in`` is a generalization of ``operator.getitem`` for nested data
|
||||
structures such as dictionaries and lists.
|
||||
>>> from pyrsistent import freeze
|
||||
>>> transaction = freeze({'name': 'Alice',
|
||||
... 'purchase': {'items': ['Apple', 'Orange'],
|
||||
... 'costs': [0.50, 1.25]},
|
||||
... 'credit card': '5555-1234-1234-1234'})
|
||||
>>> get_in(['purchase', 'items', 0], transaction)
|
||||
'Apple'
|
||||
>>> get_in(['name'], transaction)
|
||||
'Alice'
|
||||
>>> get_in(['purchase', 'total'], transaction)
|
||||
>>> get_in(['purchase', 'items', 'apple'], transaction)
|
||||
>>> get_in(['purchase', 'items', 10], transaction)
|
||||
>>> get_in(['purchase', 'total'], transaction, 0)
|
||||
0
|
||||
>>> get_in(['y'], {}, no_default=True)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
KeyError: 'y'
|
||||
"""
|
||||
try:
|
||||
return reduce(operator.getitem, keys, coll)
|
||||
except (KeyError, IndexError, TypeError):
|
||||
if no_default:
|
||||
raise
|
||||
return default
|
||||
@@ -1,80 +0,0 @@
|
||||
"""Helpers for use with type annotation.
|
||||
|
||||
Use the empty classes in this module when annotating the types of Pyrsistent
|
||||
objects, instead of using the actual collection class.
|
||||
|
||||
For example,
|
||||
|
||||
from pyrsistent import pvector
|
||||
from pyrsistent.typing import PVector
|
||||
|
||||
myvector: PVector[str] = pvector(['a', 'b', 'c'])
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
try:
|
||||
from typing import Container
|
||||
from typing import Hashable
|
||||
from typing import Generic
|
||||
from typing import Iterable
|
||||
from typing import Mapping
|
||||
from typing import Sequence
|
||||
from typing import Sized
|
||||
from typing import TypeVar
|
||||
|
||||
__all__ = [
|
||||
'CheckedPMap',
|
||||
'CheckedPSet',
|
||||
'CheckedPVector',
|
||||
'PBag',
|
||||
'PDeque',
|
||||
'PList',
|
||||
'PMap',
|
||||
'PSet',
|
||||
'PVector',
|
||||
]
|
||||
|
||||
T = TypeVar('T')
|
||||
KT = TypeVar('KT')
|
||||
VT = TypeVar('VT')
|
||||
|
||||
class CheckedPMap(Mapping[KT, VT], Hashable):
|
||||
pass
|
||||
|
||||
# PSet.add and PSet.discard have different type signatures than that of Set.
|
||||
class CheckedPSet(Generic[T], Hashable):
|
||||
pass
|
||||
|
||||
class CheckedPVector(Sequence[T], Hashable):
|
||||
pass
|
||||
|
||||
class PBag(Container[T], Iterable[T], Sized, Hashable):
|
||||
pass
|
||||
|
||||
class PDeque(Sequence[T], Hashable):
|
||||
pass
|
||||
|
||||
class PList(Sequence[T], Hashable):
|
||||
pass
|
||||
|
||||
class PMap(Mapping[KT, VT], Hashable):
|
||||
pass
|
||||
|
||||
# PSet.add and PSet.discard have different type signatures than that of Set.
|
||||
class PSet(Generic[T], Hashable):
|
||||
pass
|
||||
|
||||
class PVector(Sequence[T], Hashable):
|
||||
pass
|
||||
|
||||
class PVectorEvolver(Generic[T]):
|
||||
pass
|
||||
|
||||
class PMapEvolver(Generic[KT, VT]):
|
||||
pass
|
||||
|
||||
class PSetEvolver(Generic[T]):
|
||||
pass
|
||||
except ImportError:
|
||||
pass
|
||||
292
lib/spack/external/_vendoring/pyrsistent/typing.pyi
vendored
292
lib/spack/external/_vendoring/pyrsistent/typing.pyi
vendored
@@ -1,292 +0,0 @@
|
||||
# flake8: noqa: E704
|
||||
# from https://gist.github.com/WuTheFWasThat/091a17d4b5cab597dfd5d4c2d96faf09
|
||||
# Stubs for pyrsistent (Python 3.6)
|
||||
#
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Generic
|
||||
from typing import Hashable
|
||||
from typing import Iterator
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Sequence
|
||||
from typing import AbstractSet
|
||||
from typing import Sized
|
||||
from typing import Set
|
||||
from typing import Tuple
|
||||
from typing import TypeVar
|
||||
from typing import Type
|
||||
from typing import Union
|
||||
from typing import overload
|
||||
|
||||
T = TypeVar('T')
|
||||
KT = TypeVar('KT')
|
||||
VT = TypeVar('VT')
|
||||
|
||||
|
||||
class PMap(Mapping[KT, VT], Hashable):
|
||||
def __add__(self, other: PMap[KT, VT]) -> PMap[KT, VT]: ...
|
||||
def __getitem__(self, key: KT) -> VT: ...
|
||||
def __getattr__(self, key: str) -> VT: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __iter__(self) -> Iterator[KT]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def copy(self) -> PMap[KT, VT]: ...
|
||||
def discard(self, key: KT) -> PMap[KT, VT]: ...
|
||||
def evolver(self) -> PMapEvolver[KT, VT]: ...
|
||||
def iteritems(self) -> Iterable[Tuple[KT, VT]]: ...
|
||||
def iterkeys(self) -> Iterable[KT]: ...
|
||||
def itervalues(self) -> Iterable[VT]: ...
|
||||
def remove(self, key: KT) -> PMap[KT, VT]: ...
|
||||
def set(self, key: KT, val: VT) -> PMap[KT, VT]: ...
|
||||
def transform(self, *transformations: Any) -> PMap[KT, VT]: ...
|
||||
def update(self, *args: Mapping): ...
|
||||
def update_with(self, update_fn: Callable[[VT, VT], VT], *args: Mapping) -> Any: ...
|
||||
|
||||
|
||||
class PMapEvolver(Generic[KT, VT]):
|
||||
def __delitem__(self, key: KT) -> None: ...
|
||||
def __getitem__(self, key: KT) -> VT: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __setitem__(self, key: KT, val: VT) -> None: ...
|
||||
def is_dirty(self) -> bool: ...
|
||||
def persistent(self) -> PMap[KT, VT]: ...
|
||||
def remove(self, key: KT) -> PMapEvolver[KT, VT]: ...
|
||||
def set(self, key: KT, val: VT) -> PMapEvolver[KT, VT]: ...
|
||||
|
||||
|
||||
class PVector(Sequence[T], Hashable):
|
||||
def __add__(self, other: PVector[T]) -> PVector[T]: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> PVector[T]: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __mul__(self, other: PVector[T]) -> PVector[T]: ...
|
||||
def append(self, val: T) -> PVector[T]: ...
|
||||
def delete(self, index: int, stop: Optional[int]) -> PVector[T]: ...
|
||||
def evolver(self) -> PVectorEvolver[T]: ...
|
||||
def extend(self, obj: Iterable[T]) -> PVector[T]: ...
|
||||
def tolist(self) -> List[T]: ...
|
||||
def mset(self, *args: Iterable[Union[T, int]]) -> PVector[T]: ...
|
||||
def remove(self, value: T) -> PVector[T]: ...
|
||||
# Not compatible with MutableSequence
|
||||
def set(self, i: int, val: T) -> PVector[T]: ...
|
||||
def transform(self, *transformations: Any) -> PVector[T]: ...
|
||||
|
||||
|
||||
class PVectorEvolver(Sequence[T], Sized):
|
||||
def __delitem__(self, i: Union[int, slice]) -> None: ...
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> T: ...
|
||||
# Not actually supported
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> PVectorEvolver[T]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __setitem__(self, index: int, val: T) -> None: ...
|
||||
def append(self, val: T) -> PVectorEvolver[T]: ...
|
||||
def delete(self, value: T) -> PVectorEvolver[T]: ...
|
||||
def extend(self, obj: Iterable[T]) -> PVectorEvolver[T]: ...
|
||||
def is_dirty(self) -> bool: ...
|
||||
def persistent(self) -> PVector[T]: ...
|
||||
def set(self, i: int, val: T) -> PVectorEvolver[T]: ...
|
||||
|
||||
|
||||
class PSet(AbstractSet[T], Hashable):
|
||||
def __contains__(self, element: object) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __iter__(self) -> Iterator[T]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def add(self, element: T) -> PSet[T]: ...
|
||||
def copy(self) -> PSet[T]: ...
|
||||
def difference(self, iterable: Iterable) -> PSet[T]: ...
|
||||
def discard(self, element: T) -> PSet[T]: ...
|
||||
def evolver(self) -> PSetEvolver[T]: ...
|
||||
def intersection(self, iterable: Iterable) -> PSet[T]: ...
|
||||
def issubset(self, iterable: Iterable) -> bool: ...
|
||||
def issuperset(self, iterable: Iterable) -> bool: ...
|
||||
def remove(self, element: T) -> PSet[T]: ...
|
||||
def symmetric_difference(self, iterable: Iterable[T]) -> PSet[T]: ...
|
||||
def union(self, iterable: Iterable[T]) -> PSet[T]: ...
|
||||
def update(self, iterable: Iterable[T]) -> PSet[T]: ...
|
||||
|
||||
|
||||
class PSetEvolver(Generic[T], Sized):
|
||||
def __len__(self) -> int: ...
|
||||
def add(self, element: T) -> PSetEvolver[T]: ...
|
||||
def is_dirty(self) -> bool: ...
|
||||
def persistent(self) -> PSet[T]: ...
|
||||
def remove(self, element: T) -> PSetEvolver[T]: ...
|
||||
|
||||
|
||||
class PBag(Generic[T], Sized, Hashable):
|
||||
def __add__(self, other: PBag[T]) -> PBag[T]: ...
|
||||
def __and__(self, other: PBag[T]) -> PBag[T]: ...
|
||||
def __contains__(self, elem: object) -> bool: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __iter__(self) -> Iterator[T]: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __or__(self, other: PBag[T]) -> PBag[T]: ...
|
||||
def __sub__(self, other: PBag[T]) -> PBag[T]: ...
|
||||
def add(self, elem: T) -> PBag[T]: ...
|
||||
def count(self, elem: T) -> int: ...
|
||||
def remove(self, elem: T) -> PBag[T]: ...
|
||||
def update(self, iterable: Iterable[T]) -> PBag[T]: ...
|
||||
|
||||
|
||||
class PDeque(Sequence[T], Hashable):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> PDeque[T]: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __lt__(self, other: PDeque[T]) -> bool: ...
|
||||
def append(self, elem: T) -> PDeque[T]: ...
|
||||
def appendleft(self, elem: T) -> PDeque[T]: ...
|
||||
def extend(self, iterable: Iterable[T]) -> PDeque[T]: ...
|
||||
def extendleft(self, iterable: Iterable[T]) -> PDeque[T]: ...
|
||||
@property
|
||||
def left(self) -> T: ...
|
||||
# The real return type is Integral according to what pyrsistent
|
||||
# checks at runtime but mypy doesn't deal in numeric.*:
|
||||
# https://github.com/python/mypy/issues/2636
|
||||
@property
|
||||
def maxlen(self) -> int: ...
|
||||
def pop(self, count: int = 1) -> PDeque[T]: ...
|
||||
def popleft(self, count: int = 1) -> PDeque[T]: ...
|
||||
def remove(self, elem: T) -> PDeque[T]: ...
|
||||
def reverse(self) -> PDeque[T]: ...
|
||||
@property
|
||||
def right(self) -> T: ...
|
||||
def rotate(self, steps: int) -> PDeque[T]: ...
|
||||
|
||||
|
||||
class PList(Sequence[T], Hashable):
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> T: ...
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> PList[T]: ...
|
||||
def __hash__(self) -> int: ...
|
||||
def __len__(self) -> int: ...
|
||||
def __lt__(self, other: PList[T]) -> bool: ...
|
||||
def __gt__(self, other: PList[T]) -> bool: ...
|
||||
def cons(self, elem: T) -> PList[T]: ...
|
||||
@property
|
||||
def first(self) -> T: ...
|
||||
def mcons(self, iterable: Iterable[T]) -> PList[T]: ...
|
||||
def remove(self, elem: T) -> PList[T]: ...
|
||||
@property
|
||||
def rest(self) -> PList[T]: ...
|
||||
def reverse(self) -> PList[T]: ...
|
||||
def split(self, index: int) -> Tuple[PList[T], PList[T]]: ...
|
||||
|
||||
T_PClass = TypeVar('T_PClass', bound='PClass')
|
||||
|
||||
class PClass(Hashable):
|
||||
def __new__(cls, **kwargs: Any): ...
|
||||
def set(self: T_PClass, *args: Any, **kwargs: Any) -> T_PClass: ...
|
||||
@classmethod
|
||||
def create(
|
||||
cls: Type[T_PClass],
|
||||
kwargs: Any,
|
||||
_factory_fields: Optional[Any] = ...,
|
||||
ignore_extra: bool = ...,
|
||||
) -> T_PClass: ...
|
||||
def serialize(self, format: Optional[Any] = ...): ...
|
||||
def transform(self, *transformations: Any): ...
|
||||
def __eq__(self, other: object): ...
|
||||
def __ne__(self, other: object): ...
|
||||
def __hash__(self): ...
|
||||
def __reduce__(self): ...
|
||||
def evolver(self) -> PClassEvolver: ...
|
||||
def remove(self: T_PClass, name: Any) -> T_PClass: ...
|
||||
|
||||
class PClassEvolver:
|
||||
def __init__(self, original: Any, initial_dict: Any) -> None: ...
|
||||
def __getitem__(self, item: Any): ...
|
||||
def set(self, key: Any, value: Any): ...
|
||||
def __setitem__(self, key: Any, value: Any) -> None: ...
|
||||
def remove(self, item: Any): ...
|
||||
def __delitem__(self, item: Any) -> None: ...
|
||||
def persistent(self) -> PClass: ...
|
||||
def __getattr__(self, item: Any): ...
|
||||
|
||||
|
||||
|
||||
class CheckedPMap(PMap[KT, VT]):
|
||||
__key_type__: Type[KT]
|
||||
__value_type__: Type[VT]
|
||||
def __new__(cls, source: Mapping[KT, VT] = ..., size: int = ...) -> CheckedPMap: ...
|
||||
@classmethod
|
||||
def create(cls, source_data: Mapping[KT, VT], _factory_fields: Any = ...) -> CheckedPMap[KT, VT]: ...
|
||||
def serialize(self, format: Optional[Any] = ...) -> Dict[KT, VT]: ...
|
||||
|
||||
|
||||
class CheckedPVector(PVector[T]):
|
||||
__type__: Type[T]
|
||||
def __new__(self, initial: Iterable[T] = ...) -> CheckedPVector: ...
|
||||
@classmethod
|
||||
def create(cls, source_data: Iterable[T], _factory_fields: Any = ...) -> CheckedPVector[T]: ...
|
||||
def serialize(self, format: Optional[Any] = ...) -> List[T]: ...
|
||||
|
||||
|
||||
class CheckedPSet(PSet[T]):
|
||||
__type__: Type[T]
|
||||
def __new__(cls, initial: Iterable[T] = ...) -> CheckedPSet: ...
|
||||
@classmethod
|
||||
def create(cls, source_data: Iterable[T], _factory_fields: Any = ...) -> CheckedPSet[T]: ...
|
||||
def serialize(self, format: Optional[Any] = ...) -> Set[T]: ...
|
||||
|
||||
|
||||
class InvariantException(Exception):
|
||||
invariant_errors: Tuple[Any, ...] = ... # possibly nested tuple
|
||||
missing_fields: Tuple[str, ...] = ...
|
||||
def __init__(
|
||||
self,
|
||||
error_codes: Any = ...,
|
||||
missing_fields: Any = ...,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
) -> None: ...
|
||||
|
||||
|
||||
class CheckedTypeError(TypeError):
|
||||
source_class: Type[Any]
|
||||
expected_types: Tuple[Any, ...]
|
||||
actual_type: Type[Any]
|
||||
actual_value: Any
|
||||
def __init__(
|
||||
self,
|
||||
source_class: Any,
|
||||
expected_types: Any,
|
||||
actual_type: Any,
|
||||
actual_value: Any,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
) -> None: ...
|
||||
|
||||
|
||||
class CheckedKeyTypeError(CheckedTypeError): ...
|
||||
class CheckedValueTypeError(CheckedTypeError): ...
|
||||
class CheckedType: ...
|
||||
|
||||
|
||||
class PTypeError(TypeError):
|
||||
source_class: Type[Any] = ...
|
||||
field: str = ...
|
||||
expected_types: Tuple[Any, ...] = ...
|
||||
actual_type: Type[Any] = ...
|
||||
def __init__(
|
||||
self,
|
||||
source_class: Any,
|
||||
field: Any,
|
||||
expected_types: Any,
|
||||
actual_type: Any,
|
||||
*args: Any,
|
||||
**kwargs: Any
|
||||
) -> None: ...
|
||||
@@ -1 +0,0 @@
|
||||
from six import *
|
||||
@@ -1 +0,0 @@
|
||||
from six.moves import *
|
||||
@@ -1 +0,0 @@
|
||||
from six.moves.configparser import *
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user