diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index c72bb2a260e..4a32ed11871 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -1,3 +1,5 @@ # .git-blame-ignore-revs -# Formatted entire codebase with black +# Formatted entire codebase with black 23 +603569e321013a1a63a637813c94c2834d0a0023 +# Formatted entire codebase with black 22 f52f6e99dbf1131886a80112b8c79dfc414afb7c diff --git a/.gitattributes b/.gitattributes index 5299044761e..6dee4b041c5 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,4 @@ *.py diff=python *.lp linguist-language=Prolog lib/spack/external/* linguist-vendored +*.bat text eol=crlf \ No newline at end of file diff --git a/.github/workflows/audit.yaml b/.github/workflows/audit.yaml index 275abb2d539..bd8d8b1661c 100644 --- a/.github/workflows/audit.yaml +++ b/.github/workflows/audit.yaml @@ -19,8 +19,8 @@ jobs: package-audits: runs-on: ubuntu-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2 + - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2 with: python-version: ${{inputs.python_version}} - name: Install Python packages diff --git a/.github/workflows/bootstrap.yml b/.github/workflows/bootstrap.yml index 70935c1d6f1..94b4bc92164 100644 --- a/.github/workflows/bootstrap.yml +++ b/.github/workflows/bootstrap.yml @@ -24,7 +24,7 @@ jobs: make patch unzip which xz python3 python3-devel tree \ cmake bison bison-devel libstdc++-static - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c with: fetch-depth: 0 - name: Setup non-root user @@ -62,7 +62,7 @@ jobs: make patch unzip xz-utils python3 python3-dev tree \ cmake bison - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c with: fetch-depth: 0 - name: Setup non-root user @@ -99,7 +99,7 @@ jobs: bzip2 curl file g++ gcc gfortran git gnupg2 gzip \ make patch unzip xz-utils python3 python3-dev tree - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c with: fetch-depth: 0 - name: Setup non-root user @@ -133,7 +133,7 @@ jobs: make patch unzip which xz python3 python3-devel tree \ cmake bison - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c with: fetch-depth: 0 - name: Setup repo @@ -158,7 +158,7 @@ jobs: run: | brew install cmake bison@2.7 tree - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - name: Bootstrap clingo run: | source share/spack/setup-env.sh @@ -179,7 +179,7 @@ jobs: run: | brew install tree - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - name: Bootstrap clingo run: | set -ex @@ -204,7 +204,7 @@ jobs: runs-on: ubuntu-20.04 steps: - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c with: fetch-depth: 0 - name: Setup repo @@ -247,7 +247,7 @@ jobs: bzip2 curl file g++ gcc patchelf gfortran git gzip \ make patch unzip xz-utils python3 python3-dev tree - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c with: fetch-depth: 0 - name: Setup non-root user @@ -283,7 +283,7 @@ jobs: make patch unzip xz-utils python3 python3-dev tree \ gawk - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c with: fetch-depth: 0 - name: Setup non-root user @@ -316,7 +316,7 @@ jobs: # Remove GnuPG since we want to bootstrap it sudo rm -rf /usr/local/bin/gpg - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - name: Bootstrap GnuPG run: | source share/spack/setup-env.sh @@ -333,7 +333,7 @@ jobs: # Remove GnuPG since we want to bootstrap it sudo rm -rf /usr/local/bin/gpg - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c - name: Bootstrap GnuPG run: | source share/spack/setup-env.sh diff --git a/.github/workflows/build-containers.yml b/.github/workflows/build-containers.yml index 1030ba6428c..e7f0d73eca2 100644 --- a/.github/workflows/build-containers.yml +++ b/.github/workflows/build-containers.yml @@ -50,7 +50,7 @@ jobs: if: github.repository == 'spack/spack' steps: - name: Checkout - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2 - name: Set Container Tag Normal (Nightly) run: | @@ -80,7 +80,7 @@ jobs: fi - name: Upload Dockerfile - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb + uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce with: name: dockerfiles path: dockerfiles @@ -89,7 +89,7 @@ jobs: uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 # @v1 + uses: docker/setup-buildx-action@f03ac48505955848960e80bbb68046aa35c7b9e7 # @v1 - name: Log in to GitHub Container Registry uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1 @@ -106,7 +106,7 @@ jobs: password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build & Deploy ${{ matrix.dockerfile[0] }} - uses: docker/build-push-action@c56af957549030174b10d6867f20e78cfd7debc5 # @v2 + uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2 with: context: dockerfiles/${{ matrix.dockerfile[0] }} platforms: ${{ matrix.dockerfile[1] }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ef951f341b5..c086011a065 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -35,7 +35,7 @@ jobs: core: ${{ steps.filter.outputs.core }} packages: ${{ steps.filter.outputs.packages }} steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2 if: ${{ github.event_name == 'push' }} with: fetch-depth: 0 diff --git a/.github/workflows/setup_git.ps1 b/.github/workflows/setup_git.ps1 index 0acb9a9f460..836b7f8a2c7 100644 --- a/.github/workflows/setup_git.ps1 +++ b/.github/workflows/setup_git.ps1 @@ -1,15 +1,9 @@ -# (c) 2021 Lawrence Livermore National Laboratory - -Set-Location spack +# (c) 2022 Lawrence Livermore National Laboratory git config --global user.email "spack@example.com" git config --global user.name "Test User" git config --global core.longpaths true -# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253) -# This is needed to let some fixture in our unit-test suite run -git config --global protocol.file.allow always - if ($(git branch --show-current) -ne "develop") { git branch develop origin/develop diff --git a/.github/workflows/setup_git.sh b/.github/workflows/setup_git.sh index ee555ff71a9..4eb416720be 100755 --- a/.github/workflows/setup_git.sh +++ b/.github/workflows/setup_git.sh @@ -2,10 +2,6 @@ git config --global user.email "spack@example.com" git config --global user.name "Test User" -# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253) -# This is needed to let some fixture in our unit-test suite run -git config --global protocol.file.allow always - # create a local pr base branch if [[ -n $GITHUB_BASE_REF ]]; then git fetch origin "${GITHUB_BASE_REF}:${GITHUB_BASE_REF}" diff --git a/.github/workflows/unit_tests.yaml b/.github/workflows/unit_tests.yaml index ad70cd82eef..9a94868f359 100644 --- a/.github/workflows/unit_tests.yaml +++ b/.github/workflows/unit_tests.yaml @@ -47,10 +47,10 @@ jobs: on_develop: false steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 + - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2 with: python-version: ${{ matrix.python-version }} - name: Install System packages @@ -94,10 +94,10 @@ jobs: shell: runs-on: ubuntu-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 + - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2 with: python-version: '3.11' - name: Install System packages @@ -133,7 +133,7 @@ jobs: dnf install -y \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \ make patch tcl unzip which xz - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2 - name: Setup repo and non-root user run: | git --version @@ -151,10 +151,10 @@ jobs: clingo-cffi: runs-on: ubuntu-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 + - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2 with: python-version: '3.11' - name: Install System packages @@ -185,10 +185,10 @@ jobs: matrix: python-version: ["3.10"] steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 + - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2 with: python-version: ${{ matrix.python-version }} - name: Install Python packages diff --git a/.github/workflows/valid-style.yml b/.github/workflows/valid-style.yml index 2d704064710..1bf8faa77a5 100644 --- a/.github/workflows/valid-style.yml +++ b/.github/workflows/valid-style.yml @@ -18,8 +18,8 @@ jobs: validate: runs-on: ubuntu-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2 + - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2 with: python-version: '3.11' cache: 'pip' @@ -35,16 +35,16 @@ jobs: style: runs-on: ubuntu-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # @v2 with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2 + - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2 with: python-version: '3.11' cache: 'pip' - name: Install Python packages run: | - python3 -m pip install --upgrade pip six setuptools types-six black mypy isort clingo flake8 + python3 -m pip install --upgrade pip six setuptools types-six black==23.1.0 mypy isort clingo flake8 - name: Setup git configuration run: | # Need this for the git tests to succeed. diff --git a/.github/workflows/windows_python.yml b/.github/workflows/windows_python.yml index 05a98c4cba9..f9f2e693857 100644 --- a/.github/workflows/windows_python.yml +++ b/.github/workflows/windows_python.yml @@ -10,15 +10,15 @@ concurrency: defaults: run: shell: - powershell Invoke-Expression -Command ".\share\spack\qa\windows_test_setup.ps1"; {0} + powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0} jobs: unit-tests: runs-on: windows-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 + - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 with: python-version: 3.9 - name: Install Python packages @@ -26,13 +26,11 @@ jobs: python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov clingo - name: Create local develop run: | - .\spack\.github\workflows\setup_git.ps1 + ./.github/workflows/setup_git.ps1 - name: Unit Test run: | - echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml - cd spack - dir spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd + ./share/spack/qa/validate_last_exit.ps1 coverage combine -a coverage xml - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 @@ -41,10 +39,10 @@ jobs: unit-tests-cmd: runs-on: windows-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 + - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 with: python-version: 3.9 - name: Install Python packages @@ -52,12 +50,11 @@ jobs: python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov clingo - name: Create local develop run: | - .\spack\.github\workflows\setup_git.ps1 + ./.github/workflows/setup_git.ps1 - name: Command Unit Test run: | - echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml - cd spack spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd + ./share/spack/qa/validate_last_exit.ps1 coverage combine -a coverage xml - uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 @@ -66,10 +63,10 @@ jobs: build-abseil: runs-on: windows-latest steps: - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 + - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c with: fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 + - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 with: python-version: 3.9 - name: Install Python packages @@ -78,81 +75,81 @@ jobs: - name: Build Test run: | spack compiler find - echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml spack external find cmake spack external find ninja spack -d install abseil-cpp - make-installer: - runs-on: windows-latest - steps: - - name: Disable Windows Symlinks - run: | - git config --global core.symlinks false - shell: - powershell - - uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 - with: - fetch-depth: 0 - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 - with: - python-version: 3.9 - - name: Install Python packages - run: | - python -m pip install --upgrade pip six pywin32 setuptools - - name: Add Light and Candle to Path - run: | - $env:WIX >> $GITHUB_PATH - - name: Run Installer - run: | - .\spack\share\spack\qa\setup_spack.ps1 - spack make-installer -s spack -g SILENT pkg - echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append - env: - ProgressPreference: SilentlyContinue - - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb - with: - name: Windows Spack Installer Bundle - path: ${{ env.installer_root }}\pkg\Spack.exe - - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb - with: - name: Windows Spack Installer - path: ${{ env.installer_root}}\pkg\Spack.msi - execute-installer: - needs: make-installer - runs-on: windows-latest - defaults: - run: - shell: pwsh - steps: - - uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 - with: - python-version: 3.9 - - name: Install Python packages - run: | - python -m pip install --upgrade pip six pywin32 setuptools - - name: Setup installer directory - run: | - mkdir -p spack_installer - echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append - - uses: actions/download-artifact@v3 - with: - name: Windows Spack Installer Bundle - path: ${{ env.spack_installer }} - - name: Execute Bundled Installer - run: | - $proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru - $handle = $proc.Handle # cache proc.Handle - $proc.WaitForExit(); - $LASTEXITCODE - env: - ProgressPreference: SilentlyContinue - - uses: actions/download-artifact@v3 - with: - name: Windows Spack Installer - path: ${{ env.spack_installer }} - - name: Execute MSI - run: | - $proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru - $handle = $proc.Handle # cache proc.Handle - $proc.WaitForExit(); - $LASTEXITCODE + # TODO: johnwparent - reduce the size of the installer operations + # make-installer: + # runs-on: windows-latest + # steps: + # - name: Disable Windows Symlinks + # run: | + # git config --global core.symlinks false + # shell: + # powershell + # - uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c + # with: + # fetch-depth: 0 + # - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 + # with: + # python-version: 3.9 + # - name: Install Python packages + # run: | + # python -m pip install --upgrade pip six pywin32 setuptools + # - name: Add Light and Candle to Path + # run: | + # $env:WIX >> $GITHUB_PATH + # - name: Run Installer + # run: | + # ./share/spack/qa/setup_spack_installer.ps1 + # spack make-installer -s . -g SILENT pkg + # echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append + # env: + # ProgressPreference: SilentlyContinue + # - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb + # with: + # name: Windows Spack Installer Bundle + # path: ${{ env.installer_root }}\pkg\Spack.exe + # - uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb + # with: + # name: Windows Spack Installer + # path: ${{ env.installer_root}}\pkg\Spack.msi + # execute-installer: + # needs: make-installer + # runs-on: windows-latest + # defaults: + # run: + # shell: pwsh + # steps: + # - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 + # with: + # python-version: 3.9 + # - name: Install Python packages + # run: | + # python -m pip install --upgrade pip six pywin32 setuptools + # - name: Setup installer directory + # run: | + # mkdir -p spack_installer + # echo "spack_installer=$((pwd).Path)\spack_installer" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append + # - uses: actions/download-artifact@v3 + # with: + # name: Windows Spack Installer Bundle + # path: ${{ env.spack_installer }} + # - name: Execute Bundled Installer + # run: | + # $proc = Start-Process ${{ env.spack_installer }}\spack.exe "/install /quiet" -Passthru + # $handle = $proc.Handle # cache proc.Handle + # $proc.WaitForExit(); + # $LASTEXITCODE + # env: + # ProgressPreference: SilentlyContinue + # - uses: actions/download-artifact@v3 + # with: + # name: Windows Spack Installer + # path: ${{ env.spack_installer }} + # - name: Execute MSI + # run: | + # $proc = Start-Process ${{ env.spack_installer }}\spack.msi "/quiet" -Passthru + # $handle = $proc.Handle # cache proc.Handle + # $proc.WaitForExit(); + # $LASTEXITCODE diff --git a/CHANGELOG.md b/CHANGELOG.md index 634c316e122..a37d422008a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,28 @@ +# v0.19.1 (2023-02-07) + +### Spack Bugfixes + +* `buildcache create`: make "file exists" less verbose (#35019) +* `spack mirror create`: don't change paths to urls (#34992) +* Improve error message for requirements (#33988) +* uninstall: fix accidental cubic complexity (#34005) +* scons: fix signature for `install_args` (#34481) +* Fix `combine_phase_logs` text encoding issues (#34657) +* Use a module-like object to propagate changes in the MRO, when setting build env (#34059) +* PackageBase should not define builder legacy attributes (#33942) +* Forward lookup of the "run_tests" attribute (#34531) +* Bugfix for timers (#33917, #33900) +* Fix path handling in prefix inspections (#35318) +* Fix libtool filter for Fujitsu compilers (#34916) +* Bug fix for duplicate rpath errors on macOS when creating build caches (#34375) +* FileCache: delete the new cache file on exception (#34623) +* Propagate exceptions from Spack python console (#34547) +* Tests: Fix a bug/typo in a `config_values.py` fixture (#33886) +* Various CI fixes (#33953, #34560, #34560, #34828) +* Docs: remove monitors and analyzers, typos (#34358, #33926) +* bump release version for tutorial command (#33859) + + # v0.19.0 (2022-11-11) `v0.19.0` is a major feature release. diff --git a/LICENSE-MIT b/LICENSE-MIT index 05285accef8..54bef34ec5a 100644 --- a/LICENSE-MIT +++ b/LICENSE-MIT @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2013-2022 LLNS, LLC and other Spack Project Developers. +Copyright (c) 2013-2023 LLNS, LLC and other Spack Project Developers. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/bin/haspywin.py b/bin/haspywin.py index e19d902e283..da9e5ab7de7 100644 --- a/bin/haspywin.py +++ b/bin/haspywin.py @@ -1,4 +1,4 @@ -# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -10,6 +10,7 @@ def getpywin(): try: import win32con # noqa: F401 except ImportError: + print("pyWin32 not installed but is required...\nInstalling via pip:") subprocess.check_call([sys.executable, "-m", "pip", "-q", "install", "--upgrade", "pip"]) subprocess.check_call([sys.executable, "-m", "pip", "-q", "install", "pywin32"]) diff --git a/bin/sbang b/bin/sbang index 806bc3a4af8..727a3bbce6d 100755 --- a/bin/sbang +++ b/bin/sbang @@ -1,6 +1,6 @@ #!/bin/sh # -# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # sbang project developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/bin/spack b/bin/spack index d0eb8d81603..fe90edf7d3d 100755 --- a/bin/spack +++ b/bin/spack @@ -1,7 +1,7 @@ #!/bin/sh # -*- python -*- # -# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/bin/spack-python b/bin/spack-python index b2deca2df48..30196d46c16 100755 --- a/bin/spack-python +++ b/bin/spack-python @@ -1,6 +1,6 @@ #!/bin/sh # -# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/bin/spack-tmpconfig b/bin/spack-tmpconfig index a3d053d3406..a477daa4d35 100755 --- a/bin/spack-tmpconfig +++ b/bin/spack-tmpconfig @@ -72,6 +72,7 @@ config: root: $TMP_DIR/install misc_cache: $$user_cache_path/cache source_cache: $$user_cache_path/source + environments_root: $TMP_DIR/envs EOF cat >"$SPACK_USER_CONFIG_PATH/bootstrap.yaml" <flags @@ -92,24 +102,24 @@ endlocal set /p _sp_subcommand=NUL ) set "EDITOR=notepad" diff --git a/bin/spack_pwsh.ps1 b/bin/spack_pwsh.ps1 index b7344217806..0f5f0725496 100644 --- a/bin/spack_pwsh.ps1 +++ b/bin/spack_pwsh.ps1 @@ -1,4 +1,4 @@ -# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/etc/spack/defaults/config.yaml b/etc/spack/defaults/config.yaml index f42a6525e2d..43f8a98dff5 100644 --- a/etc/spack/defaults/config.yaml +++ b/etc/spack/defaults/config.yaml @@ -54,6 +54,11 @@ config: # are that it precludes its use as a system package and its ability to be # pip installable. # + # In Spack environment files, chaining onto existing system Spack + # installations, the $env variable can be used to download, cache and build + # into user-writable paths that are relative to the currently active + # environment. + # # In any case, if the username is not already in the path, Spack will append # the value of `$user` in an attempt to avoid potential conflicts between # users in shared temporary spaces. @@ -76,6 +81,10 @@ config: source_cache: $spack/var/spack/cache + ## Directory where spack managed environments are created and stored + # environments_root: $spack/var/spack/environments + + # Cache directory for miscellaneous files, like the package index. # This can be purged with `spack clean --misc-cache` misc_cache: $user_cache_path/cache @@ -176,7 +185,7 @@ config: # when Spack needs to manage its own package metadata and all operations are # expected to complete within the default time limit. The timeout should # therefore generally be left untouched. - db_lock_timeout: 3 + db_lock_timeout: 60 # How long to wait when attempting to modify a package (e.g. to install it). diff --git a/etc/spack/defaults/windows/packages.yaml b/etc/spack/defaults/windows/packages.yaml new file mode 100644 index 00000000000..863cf7cf182 --- /dev/null +++ b/etc/spack/defaults/windows/packages.yaml @@ -0,0 +1,21 @@ +# ------------------------------------------------------------------------- +# This file controls default concretization preferences for Spack. +# +# Settings here are versioned with Spack and are intended to provide +# sensible defaults out of the box. Spack maintainers should edit this +# file to keep it current. +# +# Users can override these settings by editing the following files. +# +# Per-spack-instance settings (overrides defaults): +# $SPACK_ROOT/etc/spack/packages.yaml +# +# Per-user settings (overrides default and site settings): +# ~/.spack/packages.yaml +# ------------------------------------------------------------------------- +packages: + all: + compiler: + - msvc + providers: + mpi: [msmpi] diff --git a/lib/spack/docs/.gitignore b/lib/spack/docs/.gitignore index 1624dfeaeab..d481aa0923b 100644 --- a/lib/spack/docs/.gitignore +++ b/lib/spack/docs/.gitignore @@ -5,3 +5,4 @@ llnl*.rst _build .spack-env spack.lock +_spack_root diff --git a/lib/spack/docs/analyze.rst b/lib/spack/docs/analyze.rst deleted file mode 100644 index 197c127e356..00000000000 --- a/lib/spack/docs/analyze.rst +++ /dev/null @@ -1,162 +0,0 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other - Spack Project Developers. See the top-level COPYRIGHT file for details. - - SPDX-License-Identifier: (Apache-2.0 OR MIT) - -.. _analyze: - -======= -Analyze -======= - - -The analyze command is a front-end to various tools that let us analyze -package installations. Each analyzer is a module for a different kind -of analysis that can be done on a package installation, including (but not -limited to) binary, log, or text analysis. Thus, the analyze command group -allows you to take an existing package install, choose an analyzer, -and extract some output for the package using it. - - ------------------ -Analyzer Metadata ------------------ - -For all analyzers, we write to an ``analyzers`` folder in ``~/.spack``, or the -value that you specify in your spack config at ``config:analyzers_dir``. -For example, here we see the results of running an analysis on zlib: - -.. code-block:: console - - $ tree ~/.spack/analyzers/ - └── linux-ubuntu20.04-skylake - └── gcc-9.3.0 - └── zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2 - ├── environment_variables - │   └── spack-analyzer-environment-variables.json - ├── install_files - │   └── spack-analyzer-install-files.json - └── libabigail - └── spack-analyzer-libabigail-libz.so.1.2.11.xml - - -This means that you can always find analyzer output in this folder, and it -is organized with the same logic as the package install it was run for. -If you want to customize this top level folder, simply provide the ``--path`` -argument to ``spack analyze run``. The nested organization will be maintained -within your custom root. - ------------------ -Listing Analyzers ------------------ - -If you aren't familiar with Spack's analyzers, you can quickly list those that -are available: - -.. code-block:: console - - $ spack analyze list-analyzers - install_files : install file listing read from install_manifest.json - environment_variables : environment variables parsed from spack-build-env.txt - config_args : config args loaded from spack-configure-args.txt - libabigail : Application Binary Interface (ABI) features for objects - - -In the above, the first three are fairly simple - parsing metadata files from -a package install directory to save - -------------------- -Analyzing a Package -------------------- - -The analyze command, akin to install, will accept a package spec to perform -an analysis for. The package must be installed. Let's walk through an example -with zlib. We first ask to analyze it. However, since we have more than one -install, we are asked to disambiguate: - -.. code-block:: console - - $ spack analyze run zlib - ==> Error: zlib matches multiple packages. - Matching packages: - fz2bs56 zlib@1.2.11%gcc@7.5.0 arch=linux-ubuntu18.04-skylake - sl7m27m zlib@1.2.11%gcc@9.3.0 arch=linux-ubuntu20.04-skylake - Use a more specific spec. - - -We can then specify the spec version that we want to analyze: - -.. code-block:: console - - $ spack analyze run zlib/fz2bs56 - -If you don't provide any specific analyzer names, by default all analyzers -(shown in the ``list-analyzers`` subcommand list) will be run. If an analyzer does not -have any result, it will be skipped. For example, here is a result running for -zlib: - -.. code-block:: console - - $ ls ~/.spack/analyzers/linux-ubuntu20.04-skylake/gcc-9.3.0/zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2/ - spack-analyzer-environment-variables.json - spack-analyzer-install-files.json - spack-analyzer-libabigail-libz.so.1.2.11.xml - -If you want to run a specific analyzer, ask for it with `--analyzer`. Here we run -spack analyze on libabigail (already installed) _using_ libabigail1 - -.. code-block:: console - - $ spack analyze run --analyzer abigail libabigail - - -.. _analyze_monitoring: - ----------------------- -Monitoring An Analysis ----------------------- - -For any kind of analysis, you can -use a `spack monitor `_ "Spackmon" -as a server to upload the same run metadata to. You can -follow the instructions in the `spack monitor documentation `_ -to first create a server along with a username and token for yourself. -You can then use this guide to interact with the server. - -You should first export our spack monitor token and username to the environment: - -.. code-block:: console - - $ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438 - $ export SPACKMON_USER=spacky - - -By default, the host for your server is expected to be at ``http://127.0.0.1`` -with a prefix of ``ms1``, and if this is the case, you can simply add the -``--monitor`` flag to the install command: - -.. code-block:: console - - $ spack analyze run --monitor wget - -If you need to customize the host or the prefix, you can do that as well: - -.. code-block:: console - - $ spack analyze run --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io wget - -If your server doesn't have authentication, you can skip it: - -.. code-block:: console - - $ spack analyze run --monitor --monitor-disable-auth wget - -Regardless of your choice, when you run analyze on an installed package (whether -it was installed with ``--monitor`` or not, you'll see the results generating as they did -before, and a message that the monitor server was pinged: - -.. code-block:: console - - $ spack analyze --monitor wget - ... - ==> Sending result for wget bin/wget to monitor. diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index db1cf519f11..adf0f273a1a 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/binary_caches.rst b/lib/spack/docs/binary_caches.rst index 7c41485df86..af8d9d35e86 100644 --- a/lib/spack/docs/binary_caches.rst +++ b/lib/spack/docs/binary_caches.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -13,49 +13,51 @@ Some sites may encourage users to set up their own test environments before carrying out central installations, or some users may prefer to set up these environments on their own motivation. To reduce the load of recompiling otherwise identical package specs in different installations, -installed packages can be put into build cache tarballs, uploaded to +installed packages can be put into build cache tarballs, pushed to your Spack mirror and then downloaded and installed by others. +Whenever a mirror provides prebuilt packages, Spack will take these packages +into account during concretization and installation, making ``spack install`` +signficantly faster. --------------------------- -Creating build cache files --------------------------- -A compressed tarball of an installed package is created. Tarballs are created -for all of its link and run dependency packages as well. Compressed tarballs are -signed with gpg and signature and tarball and put in a ``.spack`` file. Optionally, -the rpaths (and ids and deps on macOS) can be changed to paths relative to -the Spack install tree before the tarball is created. +.. note:: + + We use the terms "build cache" and "mirror" often interchangeably. Mirrors + are used during installation both for sources and prebuilt packages. Build + caches refer to mirrors that provide prebuilt packages. + + +---------------------- +Creating a build cache +---------------------- Build caches are created via: .. code-block:: console - $ spack buildcache create + $ spack buildcache create +This command takes the locally installed spec and its dependencies, and +creates tarballs of their install prefixes. It also generates metadata files, +signed with GPG. These tarballs and metadata files are then pushed to the +provided binary cache, which can be a local directory or a remote URL. -If you wanted to create a build cache in a local directory, you would provide -the ``-d`` argument to target that directory, again also specifying the spec. -Here is an example creating a local directory, "spack-cache" and creating -build cache files for the "ninja" spec: +Here is an example where a build cache is created in a local directory named +"spack-cache", to which we push the "ninja" spec: .. code-block:: console - $ mkdir -p ./spack-cache - $ spack buildcache create -d ./spack-cache ninja - ==> Buildcache files will be output to file:///home/spackuser/spack/spack-cache/build_cache - gpgconf: socketdir is '/run/user/1000/gnupg' - gpg: using "E6DF6A8BD43208E4D6F392F23777740B7DBD643D" as default secret key for signing + $ spack buildcache create --allow-root ./spack-cache ninja + ==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache -Note that the targeted spec must already be installed. Once you have a build cache, -you can add it as a mirror, discussed next. +Not that ``ninja`` must be installed locally for this to work. -.. warning:: +We're using the ``--allow-root`` flag to tell Spack that is OK when any of +the binaries we're pushing contain references to the local Spack install +directory. - Spack improved the format used for binary caches in v0.18. The entire v0.18 series - will be able to verify and install binary caches both in the new and in the old format. - Support for using the old format is expected to end in v0.19, so we advise users to - recreate relevant buildcaches using Spack v0.18 or higher. +Once you have a build cache, you can add it as a mirror, discussed next. --------------------------------------- Finding or installing build cache files @@ -66,10 +68,10 @@ with: .. code-block:: console - $ spack mirror add + $ spack mirror add -Note that the url can be a web url _or_ a local filesystem location. In the previous +Both web URLs and local paths on the filesystem can be specified. In the previous example, you might add the directory "spack-cache" and call it ``mymirror``: @@ -94,7 +96,7 @@ this new build cache as follows: .. code-block:: console - $ spack buildcache update-index -d spack-cache/ + $ spack buildcache update-index ./spack-cache Now you can use list: @@ -105,46 +107,38 @@ Now you can use list: -- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------ ninja@1.10.2 - -Great! So now let's say you have a different spack installation, or perhaps just -a different environment for the same one, and you want to install a package from -that build cache. Let's first uninstall the actual library "ninja" to see if we can -re-install it from the cache. +With ``mymirror`` configured and an index available, Spack will automatically +use it during concretization and installation. That means that you can expect +``spack install ninja`` to fetch prebuilt packages from the mirror. Let's +verify by re-installing ninja: .. code-block:: console $ spack uninstall ninja - - -And now reinstall from the buildcache - -.. code-block:: console - - $ spack buildcache install ninja - ==> buildcache spec(s) matching ninja - ==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.10.2/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-i4e5luour7jxdpc3bkiykd4imke3mkym.spack - ####################################################################################################################################### 100.0% - ==> Installing buildcache for spec ninja@1.10.2%gcc@9.3.0 arch=linux-ubuntu20.04-skylake - gpgconf: socketdir is '/run/user/1000/gnupg' - gpg: Signature made Tue 23 Mar 2021 10:16:29 PM MDT - gpg: using RSA key E6DF6A8BD43208E4D6F392F23777740B7DBD643D - gpg: Good signature from "spackuser (GPG created for Spack) " [ultimate] + $ spack install ninja + ==> Installing ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz + ==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spec.json.sig + gpg: Signature made Do 12 Jan 2023 16:01:04 CET + gpg: using RSA key 61B82B2B2350E171BD17A1744E3A689061D57BF6 + gpg: Good signature from "example (GPG created for Spack) " [ultimate] + ==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.10.2/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack + ==> Extracting ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz from binary cache + ==> ninja: Successfully installed ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz + Search: 0.00s. Fetch: 0.17s. Install: 0.12s. Total: 0.29s + [+] /home/harmen/spack/opt/spack/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz It worked! You've just completed a full example of creating a build cache with -a spec of interest, adding it as a mirror, updating it's index, listing the contents, +a spec of interest, adding it as a mirror, updating its index, listing the contents, and finally, installing from it. - -Note that the above command is intended to install a particular package to a -build cache you have created, and not to install a package from a build cache. -For the latter, once a mirror is added, by default when you do ``spack install`` the ``--use-cache`` -flag is set, and you will install a package from a build cache if it is available. -If you want to always use the cache, you can do: +By default Spack falls back to building from sources when the mirror is not available +or when the package is simply not already available. To force Spack to only install +prebuilt packages, you can use .. code-block:: console - $ spack install --cache-only + $ spack install --use-buildcache only For example, to combine all of the commands above to add the E4S build cache and then install from it exclusively, you would do: @@ -153,7 +147,7 @@ and then install from it exclusively, you would do: $ spack mirror add E4S https://cache.e4s.io $ spack buildcache keys --install --trust - $ spack install --cache-only + $ spack install --use-buildache only We use ``--install`` and ``--trust`` to say that we are installing keys to our keyring, and trusting all downloaded keys. diff --git a/lib/spack/docs/bootstrapping.rst b/lib/spack/docs/bootstrapping.rst index 86ce2eb77b9..dec548e9c6c 100644 --- a/lib/spack/docs/bootstrapping.rst +++ b/lib/spack/docs/bootstrapping.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_settings.rst b/lib/spack/docs/build_settings.rst index 900a612c4c6..3adbbb4c762 100644 --- a/lib/spack/docs/build_settings.rst +++ b/lib/spack/docs/build_settings.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -272,7 +272,7 @@ Selection of the target microarchitectures ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The options under the ``targets`` attribute control which targets are considered during a solve. -Currently the options in this section are only configurable from the ``concretization.yaml`` file +Currently the options in this section are only configurable from the ``concretizer.yaml`` file and there are no corresponding command line arguments to enable them for a single solve. The ``granularity`` option can take two possible values: ``microarchitectures`` and ``generic``. diff --git a/lib/spack/docs/build_systems.rst b/lib/spack/docs/build_systems.rst index 1ce8d6746e4..f659d63e4a3 100644 --- a/lib/spack/docs/build_systems.rst +++ b/lib/spack/docs/build_systems.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/autotoolspackage.rst b/lib/spack/docs/build_systems/autotoolspackage.rst index 88fabc0c5df..abf25f149bc 100644 --- a/lib/spack/docs/build_systems/autotoolspackage.rst +++ b/lib/spack/docs/build_systems/autotoolspackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/bundlepackage.rst b/lib/spack/docs/build_systems/bundlepackage.rst index 7a826f5e178..ca2be812408 100644 --- a/lib/spack/docs/build_systems/bundlepackage.rst +++ b/lib/spack/docs/build_systems/bundlepackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/cachedcmakepackage.rst b/lib/spack/docs/build_systems/cachedcmakepackage.rst index 0d68b4b4389..f1649926795 100644 --- a/lib/spack/docs/build_systems/cachedcmakepackage.rst +++ b/lib/spack/docs/build_systems/cachedcmakepackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/cmakepackage.rst b/lib/spack/docs/build_systems/cmakepackage.rst index 7a1db842de1..fc1de918fd9 100644 --- a/lib/spack/docs/build_systems/cmakepackage.rst +++ b/lib/spack/docs/build_systems/cmakepackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/cudapackage.rst b/lib/spack/docs/build_systems/cudapackage.rst index d34895b4052..f586b719523 100644 --- a/lib/spack/docs/build_systems/cudapackage.rst +++ b/lib/spack/docs/build_systems/cudapackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/custompackage.rst b/lib/spack/docs/build_systems/custompackage.rst index 27b8d49702e..4979f458ba9 100644 --- a/lib/spack/docs/build_systems/custompackage.rst +++ b/lib/spack/docs/build_systems/custompackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/inteloneapipackage.rst b/lib/spack/docs/build_systems/inteloneapipackage.rst index f5124643486..6cc0b33192b 100644 --- a/lib/spack/docs/build_systems/inteloneapipackage.rst +++ b/lib/spack/docs/build_systems/inteloneapipackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/intelpackage.rst b/lib/spack/docs/build_systems/intelpackage.rst index 58a1ed3bbfd..f565f205ef3 100644 --- a/lib/spack/docs/build_systems/intelpackage.rst +++ b/lib/spack/docs/build_systems/intelpackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/luapackage.rst b/lib/spack/docs/build_systems/luapackage.rst index fd70f90c492..71a0c3962c6 100644 --- a/lib/spack/docs/build_systems/luapackage.rst +++ b/lib/spack/docs/build_systems/luapackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/makefilepackage.rst b/lib/spack/docs/build_systems/makefilepackage.rst index 5a83d612fa2..66f54a1c4bb 100644 --- a/lib/spack/docs/build_systems/makefilepackage.rst +++ b/lib/spack/docs/build_systems/makefilepackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/mavenpackage.rst b/lib/spack/docs/build_systems/mavenpackage.rst index d1237ce34c3..70c57024acf 100644 --- a/lib/spack/docs/build_systems/mavenpackage.rst +++ b/lib/spack/docs/build_systems/mavenpackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/mesonpackage.rst b/lib/spack/docs/build_systems/mesonpackage.rst index c32b2241bc7..e2f2e0a99eb 100644 --- a/lib/spack/docs/build_systems/mesonpackage.rst +++ b/lib/spack/docs/build_systems/mesonpackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/octavepackage.rst b/lib/spack/docs/build_systems/octavepackage.rst index 32e8cb61b34..2679a1dacc4 100644 --- a/lib/spack/docs/build_systems/octavepackage.rst +++ b/lib/spack/docs/build_systems/octavepackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/perlpackage.rst b/lib/spack/docs/build_systems/perlpackage.rst index 4e1f613c3b7..c29dfaad55c 100644 --- a/lib/spack/docs/build_systems/perlpackage.rst +++ b/lib/spack/docs/build_systems/perlpackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/pythonpackage.rst b/lib/spack/docs/build_systems/pythonpackage.rst index ec70a6dd87a..807af44ed97 100644 --- a/lib/spack/docs/build_systems/pythonpackage.rst +++ b/lib/spack/docs/build_systems/pythonpackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -366,7 +366,7 @@ If the ``pyproject.toml`` lists ``mesonpy`` as the ``build-backend``, it uses the meson build system. Meson uses the default ``pyproject.toml`` keys to list dependencies. -See https://meson-python.readthedocs.io/en/latest/usage/start.html +See https://meson-python.readthedocs.io/en/latest/tutorials/introduction.html for more information. """ diff --git a/lib/spack/docs/build_systems/qmakepackage.rst b/lib/spack/docs/build_systems/qmakepackage.rst index 215d59536e2..98d625ede08 100644 --- a/lib/spack/docs/build_systems/qmakepackage.rst +++ b/lib/spack/docs/build_systems/qmakepackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/racketpackage.rst b/lib/spack/docs/build_systems/racketpackage.rst index 5e09ffca4a6..8e0611af413 100644 --- a/lib/spack/docs/build_systems/racketpackage.rst +++ b/lib/spack/docs/build_systems/racketpackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/rocmpackage.rst b/lib/spack/docs/build_systems/rocmpackage.rst index 20d3f105cd2..636e5b81262 100644 --- a/lib/spack/docs/build_systems/rocmpackage.rst +++ b/lib/spack/docs/build_systems/rocmpackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/rpackage.rst b/lib/spack/docs/build_systems/rpackage.rst index c5d3b423b18..a5a5fcee318 100644 --- a/lib/spack/docs/build_systems/rpackage.rst +++ b/lib/spack/docs/build_systems/rpackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/rubypackage.rst b/lib/spack/docs/build_systems/rubypackage.rst index 5b6ec462a6a..d5c38b863b9 100644 --- a/lib/spack/docs/build_systems/rubypackage.rst +++ b/lib/spack/docs/build_systems/rubypackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/sconspackage.rst b/lib/spack/docs/build_systems/sconspackage.rst index aea5dacfa72..18002586a06 100644 --- a/lib/spack/docs/build_systems/sconspackage.rst +++ b/lib/spack/docs/build_systems/sconspackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/sippackage.rst b/lib/spack/docs/build_systems/sippackage.rst index 3e77968e801..11df0d672bb 100644 --- a/lib/spack/docs/build_systems/sippackage.rst +++ b/lib/spack/docs/build_systems/sippackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/sourceforgepackage.rst b/lib/spack/docs/build_systems/sourceforgepackage.rst index 70e226abcd8..944c660eff6 100644 --- a/lib/spack/docs/build_systems/sourceforgepackage.rst +++ b/lib/spack/docs/build_systems/sourceforgepackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/build_systems/wafpackage.rst b/lib/spack/docs/build_systems/wafpackage.rst index f91479ce43a..c9bbf8bb5a4 100644 --- a/lib/spack/docs/build_systems/wafpackage.rst +++ b/lib/spack/docs/build_systems/wafpackage.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -58,9 +58,7 @@ Testing ``WafPackage`` also provides ``test`` and ``installtest`` methods, which are run after the ``build`` and ``install`` phases, respectively. By default, these phases do nothing, but you can override them to -run package-specific unit tests. For example, the -`py-py2cairo `_ -package uses: +run package-specific unit tests. .. code-block:: python diff --git a/lib/spack/docs/chain.rst b/lib/spack/docs/chain.rst index 75ed729e723..0fef1e6e1a7 100644 --- a/lib/spack/docs/chain.rst +++ b/lib/spack/docs/chain.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index 55848106dec..2b04fa642f1 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -1,4 +1,4 @@ -# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -36,7 +36,7 @@ if not os.path.exists(link_name): os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True) sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external")) -sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback")) +sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/_vendoring")) sys.path.append(os.path.abspath("_spack_root/lib/spack/")) # Add the Spack bin directory to the path so that we can use its output in docs. @@ -74,13 +74,22 @@ "--force", # Overwrite existing files "--no-toc", # Don't create a table of contents file "--output-dir=.", # Directory to place all output + "--module-first", # emit module docs before submodule docs ] -sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/spack"]) +sphinx_apidoc( + apidoc_args + + [ + "_spack_root/lib/spack/spack", + "_spack_root/lib/spack/spack/test/*.py", + "_spack_root/lib/spack/spack/test/cmd/*.py", + ] +) sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/llnl"]) # Enable todo items todo_include_todos = True + # # Disable duplicate cross-reference warnings. # @@ -155,7 +164,7 @@ def setup(sphinx): # General information about the project. project = "Spack" -copyright = "2013-2021, Lawrence Livermore National Laboratory." +copyright = "2013-2023, Lawrence Livermore National Laboratory." # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -200,12 +209,14 @@ def setup(sphinx): ("py:class", "_frozen_importlib_external.SourceFileLoader"), ("py:class", "clingo.Control"), ("py:class", "six.moves.urllib.parse.ParseResult"), + ("py:class", "TextIO"), # Spack classes that are private and we don't want to expose ("py:class", "spack.provider_index._IndexBase"), ("py:class", "spack.repo._PrependFileLoader"), ("py:class", "spack.build_systems._checks.BaseBuilder"), # Spack classes that intersphinx is unable to resolve ("py:class", "spack.version.VersionBase"), + ("py:class", "spack.spec.DependencySpec"), ] # The reST default role (used for this markup: `text`) to use for all documents. @@ -343,9 +354,7 @@ class SpackStyle(DefaultStyle): # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual"), -] +latex_documents = [("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual")] # The name of an image file (relative to this directory) to place at the top of # the title page. @@ -392,7 +401,7 @@ class SpackStyle(DefaultStyle): "Spack", "One line description of project.", "Miscellaneous", - ), + ) ] # Documents to append as an appendix to all manuals. @@ -408,6 +417,4 @@ class SpackStyle(DefaultStyle): # -- Extension configuration ------------------------------------------------- # sphinx.ext.intersphinx -intersphinx_mapping = { - "python": ("https://docs.python.org/3", None), -} +intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} diff --git a/lib/spack/docs/config_yaml.rst b/lib/spack/docs/config_yaml.rst index f2159c64cce..b1e7a1d249b 100644 --- a/lib/spack/docs/config_yaml.rst +++ b/lib/spack/docs/config_yaml.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -222,7 +222,7 @@ and location. (See the *Configuration settings* section of ``man ccache`` to learn more about the default settings and how to change them). Please note that we currently disable ccache's ``hash_dir`` feature to avoid an issue with the stage directory (see -https://github.com/LLNL/spack/pull/3761#issuecomment-294352232). +https://github.com/spack/spack/pull/3761#issuecomment-294352232). ----------------------- ``shared_linking:type`` diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst index b3fc392f979..563351dfaec 100644 --- a/lib/spack/docs/configuration.rst +++ b/lib/spack/docs/configuration.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/containers.rst b/lib/spack/docs/containers.rst index 9c6f8a26c90..a919db06429 100644 --- a/lib/spack/docs/containers.rst +++ b/lib/spack/docs/containers.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/contribution_guide.rst b/lib/spack/docs/contribution_guide.rst index 455a420ff21..8933f590d7c 100644 --- a/lib/spack/docs/contribution_guide.rst +++ b/lib/spack/docs/contribution_guide.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -118,7 +118,7 @@ make another change, test that change, etc. We use `pytest `_ as our tests framework, and these types of arguments are just passed to the ``pytest`` command underneath. See `the pytest docs -`_ +`_ for more details on test selection syntax. ``spack unit-test`` has a few special options that can help you @@ -147,7 +147,7 @@ you want to know about. For example, to see just the tests in You can also combine any of these options with a ``pytest`` keyword search. See the `pytest usage docs -`_: +`_ for more details on test selection syntax. For example, to see the names of all tests that have "spec" or "concretize" somewhere in their names: diff --git a/lib/spack/docs/developer_guide.rst b/lib/spack/docs/developer_guide.rst index 6b67ef9f77e..cc8365ebeac 100644 --- a/lib/spack/docs/developer_guide.rst +++ b/lib/spack/docs/developer_guide.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -175,14 +175,11 @@ Spec-related modules ^^^^^^^^^^^^^^^^^^^^ :mod:`spack.spec` - Contains :class:`~spack.spec.Spec` and :class:`~spack.spec.SpecParser`. - Also implements most of the logic for normalization and concretization + Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization of specs. -:mod:`spack.parse` - Contains some base classes for implementing simple recursive descent - parsers: :class:`~spack.parse.Parser` and :class:`~spack.parse.Lexer`. - Used by :class:`~spack.spec.SpecParser`. +:mod:`spack.parser` + Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs. :mod:`spack.concretize` Contains :class:`~spack.concretize.Concretizer` implementation, @@ -235,7 +232,7 @@ Spack Subcommands Unit tests ^^^^^^^^^^ -:mod:`spack.test` +``spack.test`` Implements Spack's test suite. Add a module and put its name in the test suite in ``__init__.py`` to add more unit tests. diff --git a/lib/spack/docs/environments.rst b/lib/spack/docs/environments.rst index 5c8b6b2fb41..e2805e4f016 100644 --- a/lib/spack/docs/environments.rst +++ b/lib/spack/docs/environments.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -58,9 +58,9 @@ Using Environments Here we follow a typical use case of creating, concretizing, installing and loading an environment. -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Creating a named Environment -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Creating a managed Environment +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ An environment is created by: @@ -72,7 +72,8 @@ Spack then creates the directory ``var/spack/environments/myenv``. .. note:: - All named environments are stored in the ``var/spack/environments`` folder. + All managed environments by default are stored in the ``var/spack/environments`` folder. + This location can be changed by setting the ``environments_root`` variable in ``config.yaml``. In the ``var/spack/environments/myenv`` directory, Spack creates the file ``spack.yaml`` and the hidden directory ``.spack-env``. @@ -1039,7 +1040,7 @@ gets installed and is available for use in the ``env`` target. $(SPACK) -e . concretize -f env.mk: spack.lock - $(SPACK) -e . env depfile -o $@ --make-target-prefix spack + $(SPACK) -e . env depfile -o $@ --make-prefix spack env: spack/env $(info Environment installed!) @@ -1062,9 +1063,9 @@ the include is conditional. .. note:: When including generated ``Makefile``\s, it is important to use - the ``--make-target-prefix`` flag and use the non-phony target - ``/env`` as prerequisite, instead of the phony target - ``/all``. + the ``--make-prefix`` flag and use the non-phony target + ``/env`` as prerequisite, instead of the phony target + ``/all``. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Building a subset of the environment @@ -1089,4 +1090,52 @@ output (``spack install --verbose``) while its dependencies are installed silent $ make -j16 install-deps/python-3.11.0- SPACK_INSTALL_FLAGS=--show-log-on-error # Install the root spec with verbose output. - $ make -j16 install/python-3.11.0- SPACK_INSTALL_FLAGS=--verbose \ No newline at end of file + $ make -j16 install/python-3.11.0- SPACK_INSTALL_FLAGS=--verbose + +^^^^^^^^^^^^^^^^^^^^^^^^^ +Adding post-install hooks +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Another advanced use-case of generated ``Makefile``\s is running a post-install +command for each package. These "hooks" could be anything from printing a +post-install message, running tests, or pushing just-built binaries to a buildcache. + +This can be accomplished through the generated ``[/]SPACK_PACKAGE_IDS`` +variable. Assuming we have an active and concrete environment, we generate the +associated ``Makefile`` with a prefix ``example``: + +.. code:: console + + $ spack env depfile -o env.mk --make-prefix example + +And we now include it in a different ``Makefile``, in which we create a target +``example/push/%`` with ``%`` referring to a package identifier. This target +depends on the particular package installation. In this target we automatically +have the target-specific ``HASH`` and ``SPEC`` variables at our disposal. They +are respectively the spec hash (excluding leading ``/``), and a human-readable spec. +Finally, we have an entrypoint target ``push`` that will update the buildcache +index once every package is pushed. Note how this target uses the generated +``example/SPACK_PACKAGE_IDS`` variable to define its prerequisites. + +.. code:: Makefile + + SPACK ?= spack + BUILDCACHE_DIR = $(CURDIR)/tarballs + + .PHONY: all + + all: push + + include env.mk + + example/push/%: example/install/% + @mkdir -p $(dir $@) + $(info About to push $(SPEC) to a buildcache) + $(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH) + @touch $@ + + push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS)) + $(info Updating the buildcache index) + $(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR) + $(info Done!) + @touch $@ diff --git a/lib/spack/docs/extensions.rst b/lib/spack/docs/extensions.rst index a1cf0d42bab..bc5b3a762f9 100644 --- a/lib/spack/docs/extensions.rst +++ b/lib/spack/docs/extensions.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/features.rst b/lib/spack/docs/features.rst index 1682616adb8..c39075fe106 100644 --- a/lib/spack/docs/features.rst +++ b/lib/spack/docs/features.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -116,7 +116,7 @@ creates a simple python file: # FIXME: Add a list of GitHub accounts to # notify when the package is updated. - # maintainers = ["github_user1", "github_user2"] + # maintainers("github_user1", "github_user2") version("0.8.13", sha256="591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d") diff --git a/lib/spack/docs/getting_started.rst b/lib/spack/docs/getting_started.rst index a454d83d904..b2193fe1fdc 100644 --- a/lib/spack/docs/getting_started.rst +++ b/lib/spack/docs/getting_started.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -21,7 +21,7 @@ be present on the machine where Spack is run: :header-rows: 1 These requirements can be easily installed on most modern Linux systems; -on macOS, the Command Line Tools package is required, and a full XCode suite +on macOS, the Command Line Tools package is required, and a full XCode suite may be necessary for some packages such as Qt and apple-gl. Spack is designed to run on HPC platforms like Cray. Not all packages should be expected to work on all platforms. @@ -1506,7 +1506,7 @@ Spack On Windows Windows support for Spack is currently under development. While this work is still in an early stage, it is currently possible to set up Spack and perform a few operations on Windows. This section will guide -you through the steps needed to install Spack and start running it on a fresh Windows machine. +you through the steps needed to install Spack and start running it on a fresh Windows machine. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Step 1: Install prerequisites @@ -1516,7 +1516,7 @@ To use Spack on Windows, you will need the following packages: Required: * Microsoft Visual Studio -* Python +* Python * Git Optional: @@ -1547,8 +1547,8 @@ Intel Fortran """"""""""""" For Fortran-based packages on Windows, we strongly recommend Intel's oneAPI Fortran compilers. -The suite is free to download from Intel's website, located at -https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html#gs.70t5tw. +The suite is free to download from Intel's website, located at +https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html. The executable of choice for Spack will be Intel's Beta Compiler, ifx, which supports the classic compiler's (ifort's) frontend and runtime libraries by using LLVM. @@ -1700,29 +1700,15 @@ If in the previous step, you did not have CMake or Ninja installed, running the Windows Compatible Packages """"""""""""""""""""""""""" -Many Spack packages are not currently compatible with Windows, due to Unix -dependencies or incompatible build tools like autoconf. Here are several -packages known to work on Windows: - -* abseil-cpp -* bzip2 -* clingo -* cpuinfo -* cmake -* hdf5 -* glm -* nasm -* netlib-lapack (requires Intel Fortran) -* ninja -* openssl -* perl -* python -* ruby -* wrf -* zlib +Not all spack packages currently have Windows support. Some are inherently incompatible with the +platform, and others simply have yet to be ported. To view the current set of packages with Windows +support, the list command should be used via `spack list -t windows`. If there's a package you'd like +to install on Windows but is not in that list, feel free to reach out to request the port or contribute +the port yourself. .. note:: - This is by no means a comprehensive list + This is by no means a comprehensive list, some packages may have ports that were not tagged + while others may just work out of the box on Windows and have not been tagged as such. ^^^^^^^^^^^^^^ For developers @@ -1734,3 +1720,4 @@ Instructions for creating the installer are at https://github.com/spack/spack/blob/develop/lib/spack/spack/cmd/installer/README.md Alternatively a pre-built copy of the Windows installer is available as an artifact of Spack's Windows CI +available at each run of the CI on develop or any PR. diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst index b755f2f3768..69a5ee4f4f7 100644 --- a/lib/spack/docs/index.rst +++ b/lib/spack/docs/index.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -67,7 +67,6 @@ or refer to the full manual below. build_settings environments containers - monitoring mirrors module_file_support repositories @@ -78,12 +77,6 @@ or refer to the full manual below. extensions pipelines -.. toctree:: - :maxdepth: 2 - :caption: Research - - analyze - .. toctree:: :maxdepth: 2 :caption: Contributing diff --git a/lib/spack/docs/mirrors.rst b/lib/spack/docs/mirrors.rst index 1070eab3eb8..ed15cb7d8b1 100644 --- a/lib/spack/docs/mirrors.rst +++ b/lib/spack/docs/mirrors.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/module_file_support.rst b/lib/spack/docs/module_file_support.rst index 2396b91f46a..aa42b08059c 100644 --- a/lib/spack/docs/module_file_support.rst +++ b/lib/spack/docs/module_file_support.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/monitoring.rst b/lib/spack/docs/monitoring.rst deleted file mode 100644 index eaf57a8ad74..00000000000 --- a/lib/spack/docs/monitoring.rst +++ /dev/null @@ -1,265 +0,0 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other - Spack Project Developers. See the top-level COPYRIGHT file for details. - - SPDX-License-Identifier: (Apache-2.0 OR MIT) - -.. _monitoring: - -========== -Monitoring -========== - -You can use a `spack monitor `_ "Spackmon" -server to store a database of your packages, builds, and associated metadata -for provenance, research, or some other kind of development. You should -follow the instructions in the `spack monitor documentation `_ -to first create a server along with a username and token for yourself. -You can then use this guide to interact with the server. - -------------------- -Analysis Monitoring -------------------- - -To read about how to monitor an analysis (meaning you want to send analysis results -to a server) see :ref:`analyze_monitoring`. - ---------------------- -Monitoring An Install ---------------------- - -Since an install is typically when you build packages, we logically want -to tell spack to monitor during this step. Let's start with an example -where we want to monitor the install of hdf5. Unless you have disabled authentication -for the server, we first want to export our spack monitor token and username to the environment: - -.. code-block:: console - - $ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438 - $ export SPACKMON_USER=spacky - - -By default, the host for your server is expected to be at ``http://127.0.0.1`` -with a prefix of ``ms1``, and if this is the case, you can simply add the -``--monitor`` flag to the install command: - -.. code-block:: console - - $ spack install --monitor hdf5 - - -If you need to customize the host or the prefix, you can do that as well: - -.. code-block:: console - - $ spack install --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io hdf5 - - -As a precaution, we cut out early in the spack client if you have not provided -authentication credentials. For example, if you run the command above without -exporting your username or token, you'll see: - -.. code-block:: console - - ==> Error: You are required to export SPACKMON_TOKEN and SPACKMON_USER - -This extra check is to ensure that we don't start any builds, -and then discover that you forgot to export your token. However, if -your monitoring server has authentication disabled, you can tell this to -the client to skip this step: - -.. code-block:: console - - $ spack install --monitor --monitor-disable-auth hdf5 - -If the service is not running, you'll cleanly exit early - the install will -not continue if you've asked it to monitor and there is no service. -For example, here is what you'll see if the monitoring service is not running: - -.. code-block:: console - - [Errno 111] Connection refused - - -If you want to continue builds (and stop monitoring) you can set the ``--monitor-keep-going`` -flag. - -.. code-block:: console - - $ spack install --monitor --monitor-keep-going hdf5 - -This could mean that if a request fails, you only have partial or no data -added to your monitoring database. This setting will not be applied to the -first request to check if the server is running, but to subsequent requests. -If you don't have a monitor server running and you want to build, simply -don't provide the ``--monitor`` flag! Finally, if you want to provide one or -more tags to your build, you can do: - -.. code-block:: console - - # Add one tag, "pizza" - $ spack install --monitor --monitor-tags pizza hdf5 - - # Add two tags, "pizza" and "pasta" - $ spack install --monitor --monitor-tags pizza,pasta hdf5 - - ----------------------------- -Monitoring with Containerize ----------------------------- - -The same argument group is available to add to a containerize command. - -^^^^^^ -Docker -^^^^^^ - -To add monitoring to a Docker container recipe generation using the defaults, -and assuming a monitor server running on localhost, you would -start with a spack.yaml in your present working directory: - -.. code-block:: yaml - - spack: - specs: - - samtools - -And then do: - -.. code-block:: console - - # preview first - spack containerize --monitor - - # and then write to a Dockerfile - spack containerize --monitor > Dockerfile - - -The install command will be edited to include commands for enabling monitoring. -However, getting secrets into the container for your monitor server is something -that should be done carefully. Specifically you should: - - - Never try to define secrets as ENV, ARG, or using ``--build-arg`` - - Do not try to get the secret into the container via a "temporary" file that you remove (it in fact will still exist in a layer) - -Instead, it's recommended to use buildkit `as explained here `_. -You'll need to again export environment variables for your spack monitor server: - -.. code-block:: console - - $ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438 - $ export SPACKMON_USER=spacky - -And then use buildkit along with your build and identifying the name of the secret: - -.. code-block:: console - - $ DOCKER_BUILDKIT=1 docker build --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container . - -The secrets are expected to come from your environment, and then will be temporarily mounted and available -at ``/run/secrets/``. If you forget to supply them (and authentication is required) the build -will fail. If you need to build on your host (and interact with a spack monitor at localhost) you'll -need to tell Docker to use the host network: - -.. code-block:: console - - $ DOCKER_BUILDKIT=1 docker build --network="host" --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container . - - -^^^^^^^^^^^ -Singularity -^^^^^^^^^^^ - -To add monitoring to a Singularity container build, the spack.yaml needs to -be modified slightly to specify wanting a different format: - - -.. code-block:: yaml - - spack: - specs: - - samtools - container: - format: singularity - - -Again, generate the recipe: - - -.. code-block:: console - - # preview first - $ spack containerize --monitor - - # then write to a Singularity recipe - $ spack containerize --monitor > Singularity - - -Singularity doesn't have a direct way to define secrets at build time, so we have -to do a bit of a manual command to add a file, source secrets in it, and remove it. -Since Singularity doesn't have layers like Docker, deleting a file will truly -remove it from the container and history. So let's say we have this file, -``secrets.sh``: - -.. code-block:: console - - # secrets.sh - export SPACKMON_USER=spack - export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438 - - -We would then generate the Singularity recipe, and add a files section, -a source of that file at the start of ``%post``, and **importantly** -a removal of the final at the end of that same section. - -.. code-block:: - - Bootstrap: docker - From: spack/ubuntu-bionic:latest - Stage: build - - %files - secrets.sh /opt/secrets.sh - - %post - . /opt/secrets.sh - - # spack install commands are here - ... - - # Don't forget to remove here! - rm /opt/secrets.sh - - -You can then build the container as your normally would. - -.. code-block:: console - - $ sudo singularity build container.sif Singularity - - ------------------- -Monitoring Offline ------------------- - -In the case that you want to save monitor results to your filesystem -and then upload them later (perhaps you are in an environment where you don't -have credentials or it isn't safe to use them) you can use the ``--monitor-save-local`` -flag. - -.. code-block:: console - - $ spack install --monitor --monitor-save-local hdf5 - -This will save results in a subfolder, "monitor" in your designated spack -reports folder, which defaults to ``$HOME/.spack/reports/monitor``. When -you are ready to upload them to a spack monitor server: - - -.. code-block:: console - - $ spack monitor upload ~/.spack/reports/monitor - - -You can choose the root directory of results as shown above, or a specific -subdirectory. The command accepts other arguments to specify configuration -for the monitor. diff --git a/lib/spack/docs/package_list.rst b/lib/spack/docs/package_list.rst index 04e7f5fcb0b..dfff0704608 100644 --- a/lib/spack/docs/package_list.rst +++ b/lib/spack/docs/package_list.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index ea5cc347ced..9997d5ea061 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -34,6 +34,15 @@ ubiquitous in the scientific software community. Second, it's a modern language and has many powerful features to help make package writing easy. +.. warning:: + + As a general rule, packages should install the software *from source*. + The only exception is for proprietary software (e.g., vendor compilers). + + If a special build system needs to be added in order to support building + a package from source, then the associated code and recipe need to be added + first. + .. _installation_procedure: @@ -225,7 +234,7 @@ generates a boilerplate template for your package, and opens up the new .. code-block:: python :linenos: - # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other + # Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -259,7 +268,7 @@ generates a boilerplate template for your package, and opens up the new # FIXME: Add a list of GitHub accounts to # notify when the package is updated. - # maintainers = ["github_user1", "github_user2"] + # maintainers("github_user1", "github_user2") version("6.2.1", sha256="eae9326beb4158c386e39a356818031bd28f3124cf915f8c5b1dc4c7a36b4d7c") @@ -310,14 +319,8 @@ The rest of the tasks you need to do are as follows: #. Add a comma-separated list of maintainers. - The ``maintainers`` field is a list of GitHub accounts of people - who want to be notified any time the package is modified. When a - pull request is submitted that updates the package, these people - will be requested to review the PR. This is useful for developers - who maintain a Spack package for their own software, as well as - users who rely on a piece of software and want to ensure that the - package doesn't break. It also gives users a list of people to - contact for help when someone reports a build error with the package. + Add a list of Github accounts of people who want to be notified + any time the package is modified. See :ref:`package_maintainers`. #. Add ``depends_on()`` calls for the package's dependencies. @@ -488,6 +491,31 @@ some examples: In general, you won't have to remember this naming convention because :ref:`cmd-spack-create` and :ref:`cmd-spack-edit` handle the details for you. +.. _package_maintainers: + +----------- +Maintainers +----------- + +Each package in Spack may have one or more maintainers, i.e. one or more +GitHub accounts of people who want to be notified any time the package is +modified. + +When a pull request is submitted that updates the package, these people will +be requested to review the PR. This is useful for developers who maintain a +Spack package for their own software, as well as users who rely on a piece of +software and want to ensure that the package doesn't break. It also gives users +a list of people to contact for help when someone reports a build error with +the package. + +To add maintainers to a package, simply declare them with the ``maintainers`` directive: + +.. code-block:: python + + maintainers("user1", "user2") + +The list of maintainers is additive, and includes all the accounts eventually declared in base classes. + ----------------- Trusted Downloads ----------------- @@ -1364,7 +1392,7 @@ Go ^^ Go isn't a VCS, it is a programming language with a builtin command, -`go get `_, +`go get `_, that fetches packages and their dependencies automatically. The destination directory will be the standard stage source path. @@ -2089,7 +2117,7 @@ dynamic loader where to find its dependencies at runtime. You may be familiar with `LD_LIBRARY_PATH `_ on Linux or `DYLD_LIBRARY_PATH -`_ +`_ on Mac OS X. RPATH is similar to these paths, in that it tells the loader where to find libraries. Unlike them, it is embedded in the binary and not set in each user's environment. @@ -2397,13 +2425,15 @@ this because uninstalling the dependency would break the package. ``build``, ``link``, and ``run`` dependencies all affect the hash of Spack packages (along with ``sha256`` sums of patches and archives used to build the -package, and a [canonical hash](https://github.com/spack/spack/pull/28156) of +package, and a `canonical hash `_ of the ``package.py`` recipes). ``test`` dependencies do not affect the package hash, as they are only used to construct a test environment *after* building and installing a given package installation. Older versions of Spack did not include build dependencies in the hash, but this has been -[fixed](https://github.com/spack/spack/pull/28504) as of [Spack -``v0.18``](https://github.com/spack/spack/releases/tag/v0.18.0) +`fixed `_ as of |Spack v0.18|_. + +.. |Spack v0.18| replace:: Spack ``v0.18`` +.. _Spack v0.18: https://github.com/spack/spack/releases/tag/v0.18.0 If the dependency type is not specified, Spack uses a default of ``('build', 'link')``. This is the common case for compiler languages. diff --git a/lib/spack/docs/pipelines.rst b/lib/spack/docs/pipelines.rst index 419e128fc77..699fca2d1e4 100644 --- a/lib/spack/docs/pipelines.rst +++ b/lib/spack/docs/pipelines.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/replace_conda_homebrew.rst b/lib/spack/docs/replace_conda_homebrew.rst index 3f640e35cf6..42a3561300e 100644 --- a/lib/spack/docs/replace_conda_homebrew.rst +++ b/lib/spack/docs/replace_conda_homebrew.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -184,13 +184,48 @@ simply run the following commands: .. code-block:: console $ spack env activate myenv - $ spack concretize --force + $ spack concretize --fresh --force $ spack install -The ``--force`` flag tells Spack to overwrite its previous concretization -decisions, allowing you to choose a new version of Python. If any of the new -packages like Bash are already installed, ``spack install`` won't re-install -them, it will keep the symlinks in place. +The ``--fresh`` flag tells Spack to use the latest version of every package +where possible instead of trying to optimize for reuse of existing installed +packages. + +The ``--force`` flag in addition tells Spack to overwrite its previous +concretization decisions, allowing you to choose a new version of Python. +If any of the new packages like Bash are already installed, ``spack install`` +won't re-install them, it will keep the symlinks in place. + +----------------------------------- +Updating & Cleaning Up Old Packages +----------------------------------- + +If you're looking to mimic the behavior of Homebrew, you may also want to +clean up out-of-date packages from your environment after an upgrade. To +upgrade your entire software stack within an environment and clean up old +package versions, simply run the following commands: + +.. code-block:: console + + $ spack env activate myenv + $ spack mark -i --all + $ spack concretize --fresh --force + $ spack install + $ spack gc + +Running ``spack mark -i --all`` tells Spack to mark all of the existing +packages within an environment as "implicitly" installed. This tells +spack's garbage collection system that these packages should be cleaned up. + +Don't worry however, this will not remove your entire environment. +Running ``spack install`` will reexamine your spack environment after +a fresh concretization and will re-mark any packages that should remain +installed as "explicitly" installed. + +**Note:** if you use multiple spack environments you should re-run ``spack install`` +in each of your environments prior to running ``spack gc`` to prevent spack +from uninstalling any shared packages that are no longer required by the +environment you just upgraded. -------------- Uninstallation diff --git a/lib/spack/docs/repositories.rst b/lib/spack/docs/repositories.rst index 3691bdfebb6..e04104c5862 100644 --- a/lib/spack/docs/repositories.rst +++ b/lib/spack/docs/repositories.rst @@ -1,4 +1,4 @@ -.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +.. Copyright 2013-2023 Lawrence Livermore National Security, LLC and other Spack Project Developers. See the top-level COPYRIGHT file for details. SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/docs/spack.yaml b/lib/spack/docs/spack.yaml index 778f0a3c753..924280bc04e 100644 --- a/lib/spack/docs/spack.yaml +++ b/lib/spack/docs/spack.yaml @@ -1,4 +1,4 @@ -# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/env/cc b/lib/spack/env/cc index f54cdb36ff1..55b79c5290d 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -1,7 +1,7 @@ #!/bin/sh -f # shellcheck disable=SC2034 # evals in this script fool shellcheck # -# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/external/__init__.py b/lib/spack/external/__init__.py index 89928fae59c..ccaff0cc7cc 100644 --- a/lib/spack/external/__init__.py +++ b/lib/spack/external/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -11,25 +11,14 @@ * Homepage: https://altgraph.readthedocs.io/en/latest/index.html * Usage: dependency of macholib -* Version: 0.17.2 +* Version: 0.17.3 archspec -------- * Homepage: https://pypi.python.org/pypi/archspec * Usage: Labeling, comparison and detection of microarchitectures -* Version: 0.2.0 (commit 77640e572725ad97f18e63a04857155752ace045) - -argparse --------- - -* Homepage: https://pypi.python.org/pypi/argparse -* Usage: We include our own version to be Python 3.X compatible. -* Version: 1.4.0 -* Note: This package has been slightly modified to improve - error message formatting. See the following commit if the - vendored copy ever needs to be updated again: - https://github.com/spack/spack/pull/6786/commits/dfcef577b77249106ea4e4c69a6cd9e64fa6c418 +* Version: 0.2.0 (commit e44bad9c7b6defac73696f64078b2fe634719b62) astunparse ---------------- @@ -52,7 +41,7 @@ * Homepage: https://github.com/python-attrs/attrs * Usage: Needed by jsonschema. -* Version: 21.2.0 (83d3cd70f90a3f4d19ee8b508e58d1c58821c0ad) +* Version: 22.1.0 ctest_log_parser ---------------- @@ -67,21 +56,14 @@ * Homepage: https://pypi.python.org/pypi/distro * Usage: Provides a more stable linux distribution detection. -* Version: 1.6.0 (64946a1e2a9ff529047070657728600e006c99ff) -* Note: Last version supporting Python 2.7 - -functools32 ------------ -* Homepage: https://github.com/MiCHiLU/python-functools32 -* Usage: Needed by jsonschema when using Python 2.7. -* Version: 3.2.3-2 +* Version: 1.8.0 jinja2 ------ * Homepage: https://pypi.python.org/pypi/Jinja2 * Usage: A modern and designer-friendly templating language for Python. -* Version: 2.11.3 (last version supporting Python 2.7) +* Version: 3.0.3 (last version supporting Python 3.6) jsonschema ---------- @@ -96,44 +78,21 @@ * Homepage: https://macholib.readthedocs.io/en/latest/index.html# * Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux -* Version: 1.15.2 +* Version: 1.16.2 markupsafe ---------- * Homepage: https://pypi.python.org/pypi/MarkupSafe * Usage: Implements a XML/HTML/XHTML Markup safe string for Python. -* Version: 1.1.1 (last version supporting Python 2.7) - -py --- - -* Homepage: https://pypi.python.org/pypi/py -* Usage: Needed by pytest. Library with cross-python path, - ini-parsing, io, code, and log facilities. -* Version: 1.4.34 (last version supporting Python 2.6) -* Note: This packages has been modified: - * https://github.com/pytest-dev/py/pull/186 was backported +* Version: 2.0.1 (last version supporting Python 3.6) pyrsistent ---------- * Homepage: http://github.com/tobgu/pyrsistent/ * Usage: Needed by `jsonschema` -* Version: 0.16.1 (last version supporting Python 2.7) -* Note: We only include the parts needed for `jsonschema`. - -pytest ------- - -* Homepage: https://pypi.python.org/pypi/pytest -* Usage: Testing framework used by Spack. -* Version: 3.2.5 (last version supporting Python 2.6) -* Note: This package has been slightly modified: - * We improve Python 2.6 compatibility. See: - https://github.com/spack/spack/pull/6801. - * We have patched pytest not to depend on setuptools. See: - https://github.com/spack/spack/pull/15612 +* Version: 0.18.0 ruamel.yaml ------ diff --git a/lib/spack/external/_vendoring/_pyrsistent_version.py b/lib/spack/external/_vendoring/_pyrsistent_version.py new file mode 100644 index 00000000000..5ec52a922cc --- /dev/null +++ b/lib/spack/external/_vendoring/_pyrsistent_version.py @@ -0,0 +1 @@ +__version__ = '0.18.0' diff --git a/lib/spack/external/_vendoring/_pyrsistent_version.pyi b/lib/spack/external/_vendoring/_pyrsistent_version.pyi new file mode 100644 index 00000000000..873cc62efdd --- /dev/null +++ b/lib/spack/external/_vendoring/_pyrsistent_version.pyi @@ -0,0 +1 @@ +from _pyrsistent_version import * \ No newline at end of file diff --git a/lib/spack/external/_vendoring/altgraph.pyi b/lib/spack/external/_vendoring/altgraph.pyi new file mode 100644 index 00000000000..0d67a24e016 --- /dev/null +++ b/lib/spack/external/_vendoring/altgraph.pyi @@ -0,0 +1 @@ +from altgraph import * \ No newline at end of file diff --git a/lib/spack/external/altgraph/Dot.py b/lib/spack/external/_vendoring/altgraph/Dot.py similarity index 100% rename from lib/spack/external/altgraph/Dot.py rename to lib/spack/external/_vendoring/altgraph/Dot.py diff --git a/lib/spack/external/altgraph/Graph.py b/lib/spack/external/_vendoring/altgraph/Graph.py similarity index 100% rename from lib/spack/external/altgraph/Graph.py rename to lib/spack/external/_vendoring/altgraph/Graph.py diff --git a/lib/spack/external/altgraph/GraphAlgo.py b/lib/spack/external/_vendoring/altgraph/GraphAlgo.py similarity index 100% rename from lib/spack/external/altgraph/GraphAlgo.py rename to lib/spack/external/_vendoring/altgraph/GraphAlgo.py diff --git a/lib/spack/external/altgraph/GraphStat.py b/lib/spack/external/_vendoring/altgraph/GraphStat.py similarity index 100% rename from lib/spack/external/altgraph/GraphStat.py rename to lib/spack/external/_vendoring/altgraph/GraphStat.py diff --git a/lib/spack/external/altgraph/GraphUtil.py b/lib/spack/external/_vendoring/altgraph/GraphUtil.py similarity index 100% rename from lib/spack/external/altgraph/GraphUtil.py rename to lib/spack/external/_vendoring/altgraph/GraphUtil.py diff --git a/lib/spack/external/_vendoring/altgraph/LICENSE b/lib/spack/external/_vendoring/altgraph/LICENSE new file mode 100644 index 00000000000..6013a212b84 --- /dev/null +++ b/lib/spack/external/_vendoring/altgraph/LICENSE @@ -0,0 +1,18 @@ +Copyright (c) 2004 Istvan Albert unless otherwise noted. +Copyright (c) 2006-2010 Bob Ippolito +Copyright (2) 2010-2020 Ronald Oussoren, et. al. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/lib/spack/external/altgraph/ObjectGraph.py b/lib/spack/external/_vendoring/altgraph/ObjectGraph.py similarity index 100% rename from lib/spack/external/altgraph/ObjectGraph.py rename to lib/spack/external/_vendoring/altgraph/ObjectGraph.py diff --git a/lib/spack/external/altgraph/__init__.py b/lib/spack/external/_vendoring/altgraph/__init__.py similarity index 100% rename from lib/spack/external/altgraph/__init__.py rename to lib/spack/external/_vendoring/altgraph/__init__.py diff --git a/lib/spack/external/attr/__init__.py b/lib/spack/external/_vendoring/attr/__init__.py similarity index 87% rename from lib/spack/external/attr/__init__.py rename to lib/spack/external/_vendoring/attr/__init__.py index b1ce7fe248b..386305d6289 100644 --- a/lib/spack/external/attr/__init__.py +++ b/lib/spack/external/_vendoring/attr/__init__.py @@ -1,4 +1,5 @@ -from __future__ import absolute_import, division, print_function +# SPDX-License-Identifier: MIT + import sys @@ -22,7 +23,7 @@ from ._version_info import VersionInfo -__version__ = "21.2.0" +__version__ = "22.1.0" __version_info__ = VersionInfo._from_version_string(__version__) __title__ = "attrs" @@ -73,6 +74,6 @@ ] if sys.version_info[:2] >= (3, 6): - from ._next_gen import define, field, frozen, mutable + from ._next_gen import define, field, frozen, mutable # noqa: F401 - __all__.extend((define, field, frozen, mutable)) + __all__.extend(("define", "field", "frozen", "mutable")) diff --git a/lib/spack/external/_vendoring/attr/__init__.pyi b/lib/spack/external/_vendoring/attr/__init__.pyi new file mode 100644 index 00000000000..03cc4c82d2f --- /dev/null +++ b/lib/spack/external/_vendoring/attr/__init__.pyi @@ -0,0 +1,486 @@ +import sys + +from typing import ( + Any, + Callable, + ClassVar, + Dict, + Generic, + List, + Mapping, + Optional, + Protocol, + Sequence, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +# `import X as X` is required to make these public +from . import converters as converters +from . import exceptions as exceptions +from . import filters as filters +from . import setters as setters +from . import validators as validators +from ._cmp import cmp_using as cmp_using +from ._version_info import VersionInfo + +__version__: str +__version_info__: VersionInfo +__title__: str +__description__: str +__url__: str +__uri__: str +__author__: str +__email__: str +__license__: str +__copyright__: str + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_EqOrderType = Union[bool, Callable[[Any], Any]] +_ValidatorType = Callable[[Any, Attribute[_T], _T], Any] +_ConverterType = Callable[[Any], Any] +_FilterType = Callable[[Attribute[_T], _T], bool] +_ReprType = Callable[[Any], str] +_ReprArgType = Union[bool, _ReprType] +_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any] +_OnSetAttrArgType = Union[ + _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType +] +_FieldTransformer = Callable[ + [type, List[Attribute[Any]]], List[Attribute[Any]] +] +# FIXME: in reality, if multiple validators are passed they must be in a list +# or tuple, but those are invariant and so would prevent subtypes of +# _ValidatorType from working when passed in a list or tuple. +_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] + +# A protocol to be able to statically accept an attrs class. +class AttrsInstance(Protocol): + __attrs_attrs__: ClassVar[Any] + +# _make -- + +NOTHING: object + +# NOTE: Factory lies about its return type to make this possible: +# `x: List[int] # = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. +if sys.version_info >= (3, 8): + from typing import Literal + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Callable[[Any], _T], + takes_self: Literal[True], + ) -> _T: ... + @overload + def Factory( + factory: Callable[[], _T], + takes_self: Literal[False], + ) -> _T: ... + +else: + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Union[Callable[[Any], _T], Callable[[], _T]], + takes_self: bool = ..., + ) -> _T: ... + +# Static type inference support via __dataclass_transform__ implemented as per: +# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md +# This annotation must be applied to all overloads of "define" and "attrs" +# +# NOTE: This is a typing construct and does not exist at runtime. Extensions +# wrapping attrs decorators should declare a separate __dataclass_transform__ +# signature in the extension module using the specification linked above to +# provide pyright support. +def __dataclass_transform__( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), +) -> Callable[[_T], _T]: ... + +class Attribute(Generic[_T]): + name: str + default: Optional[_T] + validator: Optional[_ValidatorType[_T]] + repr: _ReprArgType + cmp: _EqOrderType + eq: _EqOrderType + order: _EqOrderType + hash: Optional[bool] + init: bool + converter: Optional[_ConverterType] + metadata: Dict[Any, Any] + type: Optional[Type[_T]] + kw_only: bool + on_setattr: _OnSetAttrType + def evolve(self, **changes: Any) -> "Attribute[Any]": ... + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting +# TypeVars e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments +# returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def attrib( + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: object = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... +@overload +def field( + *, + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def field( + *, + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def field( + *, + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def field( + *, + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., +) -> Any: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: _C, + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: None = ..., + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: _C, + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: None = ..., + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... + +mutable = define +frozen = define # they differ only in their defaults + +def fields(cls: Type[AttrsInstance]) -> Any: ... +def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ... +def validate(inst: AttrsInstance) -> None: ... +def resolve_types( + cls: _C, + globalns: Optional[Dict[str, Any]] = ..., + localns: Optional[Dict[str, Any]] = ..., + attribs: Optional[List[Attribute[Any]]] = ..., +) -> _C: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', +# [attr.ib()])` is valid +def make_class( + name: str, + attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], + bases: Tuple[type, ...] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + collect_by_mro: bool = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. Waiting on one of +# these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +# XXX: remember to fix attrs.asdict/astuple too! +def asdict( + inst: AttrsInstance, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: Optional[bool] = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: AttrsInstance, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... +def has(cls: type) -> bool: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/lib/spack/external/attr/_cmp.py b/lib/spack/external/_vendoring/attr/_cmp.py similarity index 95% rename from lib/spack/external/attr/_cmp.py rename to lib/spack/external/_vendoring/attr/_cmp.py index b747b603f17..81b99e4c330 100644 --- a/lib/spack/external/attr/_cmp.py +++ b/lib/spack/external/_vendoring/attr/_cmp.py @@ -1,8 +1,9 @@ -from __future__ import absolute_import, division, print_function +# SPDX-License-Identifier: MIT + import functools +import types -from ._compat import new_class from ._make import _make_ne @@ -78,7 +79,9 @@ def cmp_using( num_order_functions += 1 body["__ge__"] = _make_operator("ge", ge) - type_ = new_class(class_name, (object,), {}, lambda ns: ns.update(body)) + type_ = types.new_class( + class_name, (object,), {}, lambda ns: ns.update(body) + ) # Add same type requirement. if require_same_type: diff --git a/lib/spack/external/_vendoring/attr/_cmp.pyi b/lib/spack/external/_vendoring/attr/_cmp.pyi new file mode 100644 index 00000000000..35437eff623 --- /dev/null +++ b/lib/spack/external/_vendoring/attr/_cmp.pyi @@ -0,0 +1,13 @@ +from typing import Any, Callable, Optional, Type + +_CompareWithType = Callable[[Any, Any], bool] + +def cmp_using( + eq: Optional[_CompareWithType], + lt: Optional[_CompareWithType], + le: Optional[_CompareWithType], + gt: Optional[_CompareWithType], + ge: Optional[_CompareWithType], + require_same_type: bool, + class_name: str, +) -> Type: ... diff --git a/lib/spack/external/_vendoring/attr/_compat.py b/lib/spack/external/_vendoring/attr/_compat.py new file mode 100644 index 00000000000..58264932572 --- /dev/null +++ b/lib/spack/external/_vendoring/attr/_compat.py @@ -0,0 +1,185 @@ +# SPDX-License-Identifier: MIT + + +import inspect +import platform +import sys +import threading +import types +import warnings + +from collections.abc import Mapping, Sequence # noqa + + +PYPY = platform.python_implementation() == "PyPy" +PY36 = sys.version_info[:2] >= (3, 6) +HAS_F_STRINGS = PY36 +PY310 = sys.version_info[:2] >= (3, 10) + + +if PYPY or PY36: + ordered_dict = dict +else: + from collections import OrderedDict + + ordered_dict = OrderedDict + + +def just_warn(*args, **kw): + warnings.warn( + "Running interpreter doesn't sufficiently support code object " + "introspection. Some features like bare super() or accessing " + "__class__ will not work with slotted classes.", + RuntimeWarning, + stacklevel=2, + ) + + +class _AnnotationExtractor: + """ + Extract type annotations from a callable, returning None whenever there + is none. + """ + + __slots__ = ["sig"] + + def __init__(self, callable): + try: + self.sig = inspect.signature(callable) + except (ValueError, TypeError): # inspect failed + self.sig = None + + def get_first_param_type(self): + """ + Return the type annotation of the first argument if it's not empty. + """ + if not self.sig: + return None + + params = list(self.sig.parameters.values()) + if params and params[0].annotation is not inspect.Parameter.empty: + return params[0].annotation + + return None + + def get_return_type(self): + """ + Return the return type if it's not empty. + """ + if ( + self.sig + and self.sig.return_annotation is not inspect.Signature.empty + ): + return self.sig.return_annotation + + return None + + +def make_set_closure_cell(): + """Return a function of two arguments (cell, value) which sets + the value stored in the closure cell `cell` to `value`. + """ + # pypy makes this easy. (It also supports the logic below, but + # why not do the easy/fast thing?) + if PYPY: + + def set_closure_cell(cell, value): + cell.__setstate__((value,)) + + return set_closure_cell + + # Otherwise gotta do it the hard way. + + # Create a function that will set its first cellvar to `value`. + def set_first_cellvar_to(value): + x = value + return + + # This function will be eliminated as dead code, but + # not before its reference to `x` forces `x` to be + # represented as a closure cell rather than a local. + def force_x_to_be_a_cell(): # pragma: no cover + return x + + try: + # Extract the code object and make sure our assumptions about + # the closure behavior are correct. + co = set_first_cellvar_to.__code__ + if co.co_cellvars != ("x",) or co.co_freevars != (): + raise AssertionError # pragma: no cover + + # Convert this code object to a code object that sets the + # function's first _freevar_ (not cellvar) to the argument. + if sys.version_info >= (3, 8): + + def set_closure_cell(cell, value): + cell.cell_contents = value + + else: + args = [co.co_argcount] + args.append(co.co_kwonlyargcount) + args.extend( + [ + co.co_nlocals, + co.co_stacksize, + co.co_flags, + co.co_code, + co.co_consts, + co.co_names, + co.co_varnames, + co.co_filename, + co.co_name, + co.co_firstlineno, + co.co_lnotab, + # These two arguments are reversed: + co.co_cellvars, + co.co_freevars, + ] + ) + set_first_freevar_code = types.CodeType(*args) + + def set_closure_cell(cell, value): + # Create a function using the set_first_freevar_code, + # whose first closure cell is `cell`. Calling it will + # change the value of that cell. + setter = types.FunctionType( + set_first_freevar_code, {}, "setter", (), (cell,) + ) + # And call it to set the cell. + setter(value) + + # Make sure it works on this interpreter: + def make_func_with_cell(): + x = None + + def func(): + return x # pragma: no cover + + return func + + cell = make_func_with_cell().__closure__[0] + set_closure_cell(cell, 100) + if cell.cell_contents != 100: + raise AssertionError # pragma: no cover + + except Exception: + return just_warn + else: + return set_closure_cell + + +set_closure_cell = make_set_closure_cell() + +# Thread-local global to track attrs instances which are already being repr'd. +# This is needed because there is no other (thread-safe) way to pass info +# about the instances that are already being repr'd through the call stack +# in order to ensure we don't perform infinite recursion. +# +# For instance, if an instance contains a dict which contains that instance, +# we need to know that we're already repr'ing the outside instance from within +# the dict's repr() call. +# +# This lives here rather than in _make.py so that the functions in _make.py +# don't have a direct reference to the thread-local in their globals dict. +# If they have such a reference, it breaks cloudpickle. +repr_context = threading.local() diff --git a/lib/spack/external/attr/_config.py b/lib/spack/external/_vendoring/attr/_config.py similarity index 54% rename from lib/spack/external/attr/_config.py rename to lib/spack/external/_vendoring/attr/_config.py index 8ec920962d1..96d4200773d 100644 --- a/lib/spack/external/attr/_config.py +++ b/lib/spack/external/_vendoring/attr/_config.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import, division, print_function +# SPDX-License-Identifier: MIT __all__ = ["set_run_validators", "get_run_validators"] @@ -9,6 +9,10 @@ def set_run_validators(run): """ Set whether or not validators are run. By default, they are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` + instead. """ if not isinstance(run, bool): raise TypeError("'run' must be bool.") @@ -19,5 +23,9 @@ def set_run_validators(run): def get_run_validators(): """ Return whether or not validators are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` + instead. """ return _run_validators diff --git a/lib/spack/external/attr/_funcs.py b/lib/spack/external/_vendoring/attr/_funcs.py similarity index 77% rename from lib/spack/external/attr/_funcs.py rename to lib/spack/external/_vendoring/attr/_funcs.py index fda508c5c4b..a982d7cb562 100644 --- a/lib/spack/external/attr/_funcs.py +++ b/lib/spack/external/_vendoring/attr/_funcs.py @@ -1,8 +1,8 @@ -from __future__ import absolute_import, division, print_function +# SPDX-License-Identifier: MIT + import copy -from ._compat import iteritems from ._make import NOTHING, _obj_setattr, fields from .exceptions import AttrsAttributeNotFoundError @@ -25,7 +25,7 @@ def asdict( ``attrs``-decorated. :param callable filter: A callable whose return code determines whether an attribute or element is included (``True``) or dropped (``False``). Is - called with the `attr.Attribute` as the first argument and the + called with the `attrs.Attribute` as the first argument and the value as the second argument. :param callable dict_factory: A callable to produce dictionaries from. For example, to produce ordered dictionaries instead of normal Python @@ -46,6 +46,8 @@ def asdict( .. versionadded:: 16.0.0 *dict_factory* .. versionadded:: 16.1.0 *retain_collection_types* .. versionadded:: 20.3.0 *value_serializer* + .. versionadded:: 21.3.0 If a dict has a collection for a key, it is + serialized as a tuple. """ attrs = fields(inst.__class__) rv = dict_factory() @@ -61,11 +63,11 @@ def asdict( if has(v.__class__): rv[a.name] = asdict( v, - True, - filter, - dict_factory, - retain_collection_types, - value_serializer, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, ) elif isinstance(v, (tuple, list, set, frozenset)): cf = v.__class__ if retain_collection_types is True else list @@ -73,10 +75,11 @@ def asdict( [ _asdict_anything( i, - filter, - dict_factory, - retain_collection_types, - value_serializer, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, ) for i in v ] @@ -87,20 +90,22 @@ def asdict( ( _asdict_anything( kk, - filter, - df, - retain_collection_types, - value_serializer, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, ), _asdict_anything( vv, - filter, - df, - retain_collection_types, - value_serializer, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, ), ) - for kk, vv in iteritems(v) + for kk, vv in v.items() ) else: rv[a.name] = v @@ -111,6 +116,7 @@ def asdict( def _asdict_anything( val, + is_key, filter, dict_factory, retain_collection_types, @@ -123,22 +129,29 @@ def _asdict_anything( # Attrs class. rv = asdict( val, - True, - filter, - dict_factory, - retain_collection_types, - value_serializer, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, ) elif isinstance(val, (tuple, list, set, frozenset)): - cf = val.__class__ if retain_collection_types is True else list + if retain_collection_types is True: + cf = val.__class__ + elif is_key: + cf = tuple + else: + cf = list + rv = cf( [ _asdict_anything( i, - filter, - dict_factory, - retain_collection_types, - value_serializer, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, ) for i in val ] @@ -148,13 +161,23 @@ def _asdict_anything( rv = df( ( _asdict_anything( - kk, filter, df, retain_collection_types, value_serializer + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, ), _asdict_anything( - vv, filter, df, retain_collection_types, value_serializer + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, ), ) - for kk, vv in iteritems(val) + for kk, vv in val.items() ) else: rv = val @@ -181,7 +204,7 @@ def astuple( ``attrs``-decorated. :param callable filter: A callable whose return code determines whether an attribute or element is included (``True``) or dropped (``False``). Is - called with the `attr.Attribute` as the first argument and the + called with the `attrs.Attribute` as the first argument and the value as the second argument. :param callable tuple_factory: A callable to produce tuples from. For example, to produce lists instead of tuples. @@ -253,7 +276,7 @@ def astuple( if has(vv.__class__) else vv, ) - for kk, vv in iteritems(v) + for kk, vv in v.items() ) ) else: @@ -291,7 +314,9 @@ def assoc(inst, **changes): class. .. deprecated:: 17.1.0 - Use `evolve` instead. + Use `attrs.evolve` instead if you can. + This function will not be removed du to the slightly different approach + compared to `attrs.evolve`. """ import warnings @@ -302,7 +327,7 @@ def assoc(inst, **changes): ) new = copy.copy(inst) attrs = fields(inst.__class__) - for k, v in iteritems(changes): + for k, v in changes.items(): a = getattr(attrs, k, NOTHING) if a is NOTHING: raise AttrsAttributeNotFoundError( @@ -370,18 +395,16 @@ class and you didn't pass any attribs. :raise NameError: If types cannot be resolved because of missing variables. :returns: *cls* so you can use this function also as a class decorator. - Please note that you have to apply it **after** `attr.s`. That means - the decorator has to come in the line **before** `attr.s`. + Please note that you have to apply it **after** `attrs.define`. That + means the decorator has to come in the line **before** `attrs.define`. .. versionadded:: 20.1.0 .. versionadded:: 21.1.0 *attribs* """ - try: - # Since calling get_type_hints is expensive we cache whether we've - # done it already. - cls.__attrs_types_resolved__ - except AttributeError: + # Since calling get_type_hints is expensive we cache whether we've + # done it already. + if getattr(cls, "__attrs_types_resolved__", None) != cls: import typing hints = typing.get_type_hints(cls, globalns=globalns, localns=localns) @@ -389,7 +412,9 @@ class and you didn't pass any attribs. if field.name in hints: # Since fields have been frozen we must work around it. _obj_setattr(field, "type", hints[field.name]) - cls.__attrs_types_resolved__ = True + # We store the class we resolved so that subclasses know they haven't + # been resolved. + cls.__attrs_types_resolved__ = cls # Return the class so you can use it as a decorator too. return cls diff --git a/lib/spack/external/attr/_make.py b/lib/spack/external/_vendoring/attr/_make.py similarity index 86% rename from lib/spack/external/attr/_make.py rename to lib/spack/external/_vendoring/attr/_make.py index a1912b1233f..4d1afe3fc8a 100644 --- a/lib/spack/external/attr/_make.py +++ b/lib/spack/external/_vendoring/attr/_make.py @@ -1,23 +1,21 @@ -from __future__ import absolute_import, division, print_function +# SPDX-License-Identifier: MIT import copy -import inspect import linecache import sys -import threading -import uuid -import warnings +import types +import typing from operator import itemgetter -from . import _config, setters +# We need to import _compat itself in addition to the _compat members to avoid +# having the thread-local in the globals here. +from . import _compat, _config, setters from ._compat import ( - PY2, + HAS_F_STRINGS, + PY310, PYPY, - isclass, - iteritems, - metadata_proxy, - new_class, + _AnnotationExtractor, ordered_dict, set_closure_cell, ) @@ -25,15 +23,10 @@ DefaultAlreadySetError, FrozenInstanceError, NotAnAttrsClassError, - PythonTooOldError, UnannotatedAttributeError, ) -if not PY2: - import typing - - # This is used at least twice, so cache it here. _obj_setattr = object.__setattr__ _init_converter_pat = "__attr_converter_%s" @@ -52,13 +45,15 @@ # (when slots=True) _hash_cache_field = "_attrs_cached_hash" -_empty_metadata_singleton = metadata_proxy({}) +_empty_metadata_singleton = types.MappingProxyType({}) # Unique object for unequivocal getattr() defaults. _sentinel = object() +_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate) -class _Nothing(object): + +class _Nothing: """ Sentinel class to indicate the lack of a value when ``None`` is ambiguous. @@ -71,7 +66,7 @@ class _Nothing(object): def __new__(cls): if _Nothing._singleton is None: - _Nothing._singleton = super(_Nothing, cls).__new__(cls) + _Nothing._singleton = super().__new__(cls) return _Nothing._singleton def __repr__(self): @@ -80,9 +75,6 @@ def __repr__(self): def __bool__(self): return False - def __len__(self): - return 0 # __bool__ for Python 2 - NOTHING = _Nothing() """ @@ -102,17 +94,8 @@ class _CacheHashWrapper(int): See GH #613 for more details. """ - if PY2: - # For some reason `type(None)` isn't callable in Python 2, but we don't - # actually need a constructor for None objects, we just need any - # available function that returns None. - def __reduce__(self, _none_constructor=getattr, _args=(0, "", None)): - return _none_constructor, _args - - else: - - def __reduce__(self, _none_constructor=type(None), _args=()): - return _none_constructor, _args + def __reduce__(self, _none_constructor=type(None), _args=()): + return _none_constructor, _args def attrib( @@ -143,11 +126,11 @@ def attrib( is used and no value is passed while instantiating or the attribute is excluded using ``init=False``. - If the value is an instance of `Factory`, its callable will be + If the value is an instance of `attrs.Factory`, its callable will be used to construct a new value (useful for mutable data types like lists or dicts). - If a default is not set (or set manually to `attr.NOTHING`), a value + If a default is not set (or set manually to `attrs.NOTHING`), a value *must* be supplied when instantiating; otherwise a `TypeError` will be raised. @@ -160,7 +143,7 @@ def attrib( :param validator: `callable` that is called by ``attrs``-generated ``__init__`` methods after the instance has been initialized. They - receive the initialized instance, the `Attribute`, and the + receive the initialized instance, the :func:`~attrs.Attribute`, and the passed value. The return value is *not* inspected so the validator has to throw an @@ -220,7 +203,7 @@ def attrib( components. See `extending_metadata`. :param type: The type of the attribute. In Python 3.6 or greater, the preferred method to specify the type is using a variable annotation - (see `PEP 526 `_). + (see :pep:`526`). This argument is provided for backward compatibility. Regardless of the approach used, the type will be stored on ``Attribute.type``. @@ -233,10 +216,10 @@ def attrib( parameter is ignored). :param on_setattr: Allows to overwrite the *on_setattr* setting from `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. - Set to `attr.setters.NO_OP` to run **no** `setattr` hooks for this + Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this attribute -- regardless of the setting in `attr.s`. :type on_setattr: `callable`, or a list of callables, or `None`, or - `attr.setters.NO_OP` + `attrs.setters.NO_OP` .. versionadded:: 15.2.0 *convert* .. versionadded:: 16.3.0 *metadata* @@ -319,24 +302,31 @@ def _compile_and_eval(script, globs, locs=None, filename=""): eval(bytecode, globs, locs) -def _make_method(name, script, filename, globs=None): +def _make_method(name, script, filename, globs): """ Create the method with the script given and return the method object. """ locs = {} - if globs is None: - globs = {} - - _compile_and_eval(script, globs, locs, filename) # In order of debuggers like PDB being able to step through the code, # we add a fake linecache entry. - linecache.cache[filename] = ( - len(script), - None, - script.splitlines(True), - filename, - ) + count = 1 + base_filename = filename + while True: + linecache_tuple = ( + len(script), + None, + script.splitlines(True), + filename, + ) + old_val = linecache.cache.setdefault(filename, linecache_tuple) + if old_val == linecache_tuple: + break + else: + filename = "{}-{}>".format(base_filename[:-1], count) + count += 1 + + _compile_and_eval(script, globs, locs, filename) return locs[name] @@ -511,7 +501,7 @@ def _transform_attrs( anns = _get_annotations(cls) if these is not None: - ca_list = [(name, ca) for name, ca in iteritems(these)] + ca_list = [(name, ca) for name, ca in these.items()] if not isinstance(these, ordered_dict): ca_list.sort(key=_counter_getter) @@ -571,15 +561,11 @@ def _transform_attrs( cls, {a.name for a in own_attrs} ) - attr_names = [a.name for a in base_attrs + own_attrs] - - AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) - if kw_only: own_attrs = [a.evolve(kw_only=True) for a in own_attrs] base_attrs = [a.evolve(kw_only=True) for a in base_attrs] - attrs = AttrsClass(base_attrs + own_attrs) + attrs = base_attrs + own_attrs # Mandatory vs non-mandatory attr order only matters when they are part of # the __init__ signature and when they aren't kw_only (which are moved to @@ -598,7 +584,13 @@ def _transform_attrs( if field_transformer is not None: attrs = field_transformer(cls, attrs) - return _Attributes((attrs, base_attrs, base_attr_map)) + + # Create AttrsClass *after* applying the field_transformer since it may + # add or remove attributes! + attr_names = [a.name for a in attrs] + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map)) if PYPY: @@ -616,7 +608,6 @@ def _frozen_setattrs(self, name, value): raise FrozenInstanceError() - else: def _frozen_setattrs(self, name, value): @@ -633,7 +624,7 @@ def _frozen_delattrs(self, name): raise FrozenInstanceError() -class _ClassBuilder(object): +class _ClassBuilder: """ Iteratively build *one* class. """ @@ -654,7 +645,7 @@ class _ClassBuilder(object): "_on_setattr", "_slots", "_weakref_slot", - "_has_own_setattr", + "_wrote_own_setattr", "_has_custom_setattr", ) @@ -687,7 +678,7 @@ def __init__( self._cls = cls self._cls_dict = dict(cls.__dict__) if slots else {} self._attrs = attrs - self._base_names = set(a.name for a in base_attrs) + self._base_names = {a.name for a in base_attrs} self._base_attr_map = base_map self._attr_names = tuple(a.name for a in attrs) self._slots = slots @@ -701,7 +692,7 @@ def __init__( self._on_setattr = on_setattr self._has_custom_setattr = has_custom_setattr - self._has_own_setattr = False + self._wrote_own_setattr = False self._cls_dict["__attrs_attrs__"] = self._attrs @@ -709,7 +700,33 @@ def __init__( self._cls_dict["__setattr__"] = _frozen_setattrs self._cls_dict["__delattr__"] = _frozen_delattrs - self._has_own_setattr = True + self._wrote_own_setattr = True + elif on_setattr in ( + _ng_default_on_setattr, + setters.validate, + setters.convert, + ): + has_validator = has_converter = False + for a in attrs: + if a.validator is not None: + has_validator = True + if a.converter is not None: + has_converter = True + + if has_validator and has_converter: + break + if ( + ( + on_setattr == _ng_default_on_setattr + and not (has_validator or has_converter) + ) + or (on_setattr == setters.validate and not has_validator) + or (on_setattr == setters.convert and not has_converter) + ): + # If class-level on_setattr is set to convert + validate, but + # there's no field to convert or validate, pretend like there's + # no on_setattr. + self._on_setattr = None if getstate_setstate: ( @@ -759,13 +776,13 @@ def _patch_original_class(self): # If we've inherited an attrs __setattr__ and don't write our own, # reset it to object's. - if not self._has_own_setattr and getattr( + if not self._wrote_own_setattr and getattr( cls, "__attrs_own_setattr__", False ): cls.__attrs_own_setattr__ = False if not self._has_custom_setattr: - cls.__setattr__ = object.__setattr__ + cls.__setattr__ = _obj_setattr return cls @@ -775,7 +792,7 @@ def _create_slots_class(self): """ cd = { k: v - for k, v in iteritems(self._cls_dict) + for k, v in self._cls_dict.items() if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") } @@ -787,13 +804,13 @@ def _create_slots_class(self): # XXX: a non-attrs class and subclass the resulting class with an attrs # XXX: class. See `test_slotted_confused` for details. For now that's # XXX: OK with us. - if not self._has_own_setattr: + if not self._wrote_own_setattr: cd["__attrs_own_setattr__"] = False if not self._has_custom_setattr: for base_cls in self._cls.__bases__: if base_cls.__dict__.get("__attrs_own_setattr__", False): - cd["__setattr__"] = object.__setattr__ + cd["__setattr__"] = _obj_setattr break # Traverse the MRO to collect existing slots @@ -826,11 +843,11 @@ def _create_slots_class(self): slot_names = [name for name in names if name not in base_names] # There are slots for attributes from current class # that are defined in parent classes. - # As their descriptors may be overriden by a child class, + # As their descriptors may be overridden by a child class, # we collect them here and update the class dict reused_slots = { slot: slot_descriptor - for slot, slot_descriptor in iteritems(existing_slots) + for slot, slot_descriptor in existing_slots.items() if slot in slot_names } slot_names = [name for name in slot_names if name not in reused_slots] @@ -839,15 +856,13 @@ def _create_slots_class(self): slot_names.append(_hash_cache_field) cd["__slots__"] = tuple(slot_names) - qualname = getattr(self._cls, "__qualname__", None) - if qualname is not None: - cd["__qualname__"] = qualname + cd["__qualname__"] = self._cls.__qualname__ # Create new class based on old class and our methods. cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) # The following is a fix for - # https://github.com/python-attrs/attrs/issues/102. On Python 3, + # . On Python 3, # if a method mentions `__class__` or uses the no-arg super(), the # compiler will bake a reference to the class in the method itself # as `method.__closure__`. Since we replace the class with a @@ -879,7 +894,7 @@ def _create_slots_class(self): def add_repr(self, ns): self._cls_dict["__repr__"] = self._add_method_dunders( - _make_repr(self._attrs, ns=ns) + _make_repr(self._attrs, ns, self._cls) ) return self @@ -958,14 +973,20 @@ def add_init(self): self._cache_hash, self._base_attr_map, self._is_exc, - self._on_setattr is not None - and self._on_setattr is not setters.NO_OP, + self._on_setattr, attrs_init=False, ) ) return self + def add_match_args(self): + self._cls_dict["__match_args__"] = tuple( + field.name + for field in self._attrs + if field.init and not field.kw_only + ) + def add_attrs_init(self): self._cls_dict["__attrs_init__"] = self._add_method_dunders( _make_init( @@ -978,8 +999,7 @@ def add_attrs_init(self): self._cache_hash, self._base_attr_map, self._is_exc, - self._on_setattr is not None - and self._on_setattr is not setters.NO_OP, + self._on_setattr, attrs_init=True, ) ) @@ -1038,7 +1058,7 @@ def __setattr__(self, name, val): self._cls_dict["__attrs_own_setattr__"] = True self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) - self._has_own_setattr = True + self._wrote_own_setattr = True return self @@ -1068,12 +1088,6 @@ def _add_method_dunders(self, method): return method -_CMP_DEPRECATION = ( - "The usage of `cmp` is deprecated and will be removed on or after " - "2021-06-01. Please use `eq` and `order` instead." -) - - def _determine_attrs_eq_order(cmp, eq, order, default_eq): """ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective @@ -1152,8 +1166,6 @@ def _determine_whether_to_implement( whose presence signal that the user has implemented it themselves. Return *default* if no reason for either for or against is found. - - auto_detect must be False on Python 2. """ if flag is True or flag is False: return flag @@ -1192,6 +1204,7 @@ def attrs( getstate_setstate=None, on_setattr=None, field_transformer=None, + match_args=True, ): r""" A class decorator that adds `dunder @@ -1240,7 +1253,7 @@ def attrs( *cmp*, or *hash* overrides whatever *auto_detect* would determine. *auto_detect* requires Python 3. Setting it ``True`` on Python 2 raises - a `PythonTooOldError`. + an `attrs.exceptions.PythonTooOldError`. :param bool repr: Create a ``__repr__`` method with a human readable representation of ``attrs`` attributes.. @@ -1316,7 +1329,7 @@ def attrs( :param bool weakref_slot: Make instances weak-referenceable. This has no effect unless ``slots`` is also enabled. - :param bool auto_attribs: If ``True``, collect `PEP 526`_-annotated + :param bool auto_attribs: If ``True``, collect :pep:`526`-annotated attributes (Python 3.6 and later only) from the class body. In this case, you **must** annotate every field. If ``attrs`` @@ -1327,7 +1340,7 @@ def attrs( If you assign a value to those attributes (e.g. ``x: int = 42``), that value becomes the default value like if it were passed using - ``attr.ib(default=42)``. Passing an instance of `Factory` also + ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also works as expected in most cases (see warning below). Attributes annotated as `typing.ClassVar`, and attributes that are @@ -1342,7 +1355,6 @@ def attrs( These errors can be quite confusing and probably the most common bug report on our bug tracker. - .. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/ :param bool kw_only: Make all attributes keyword-only (Python 3+) in the generated ``__init__`` (if ``init`` is ``False``, this parameter is ignored). @@ -1369,7 +1381,7 @@ def attrs( :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs`` collects attributes from base classes. The default behavior is incorrect in certain cases of multiple inheritance. It should be on by - default but is kept off for backward-compatability. + default but is kept off for backward-compatibility. See issue `#428 `_ for more details. @@ -1399,7 +1411,9 @@ def attrs( the callable. If a list of callables is passed, they're automatically wrapped in an - `attr.setters.pipe`. + `attrs.setters.pipe`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attrs.setters.NO_OP` :param Optional[callable] field_transformer: A function that is called with the original class object and all @@ -1407,6 +1421,12 @@ def attrs( this, e.g., to automatically add converters or validators to fields based on their types. See `transform-fields` for more details. + :param bool match_args: + If `True` (default), set ``__match_args__`` on the class to support + :pep:`634` (Structural Pattern Matching). It is a tuple of all + non-keyword-only ``__init__`` parameter names on Python 3.10 and later. + Ignored on older Python versions. + .. versionadded:: 16.0.0 *slots* .. versionadded:: 16.1.0 *frozen* .. versionadded:: 16.3.0 *str* @@ -1440,12 +1460,8 @@ def attrs( ``init=False`` injects ``__attrs_init__`` .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 21.3.0 *match_args* """ - if auto_detect and PY2: - raise PythonTooOldError( - "auto_detect only works on Python 3 and later." - ) - eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) hash_ = hash # work around the lack of nonlocal @@ -1453,10 +1469,6 @@ def attrs( on_setattr = setters.pipe(*on_setattr) def wrap(cls): - - if getattr(cls, "__class__", None) is None: - raise TypeError("attrs only works with new-style classes.") - is_frozen = frozen or _has_frozen_base_class(cls) is_exc = auto_exc is True and issubclass(cls, BaseException) has_own_setattr = auto_detect and _has_own_attribute( @@ -1556,6 +1568,13 @@ def wrap(cls): " init must be True." ) + if ( + PY310 + and match_args + and not _has_own_attribute(cls, "__match_args__") + ): + builder.add_match_args() + return builder.build_class() # maybe_cls's type depends on the usage of the decorator. It's a class @@ -1573,58 +1592,24 @@ def wrap(cls): """ -if PY2: - - def _has_frozen_base_class(cls): - """ - Check whether *cls* has a frozen ancestor by looking at its - __setattr__. - """ - return ( - getattr(cls.__setattr__, "__module__", None) - == _frozen_setattrs.__module__ - and cls.__setattr__.__name__ == _frozen_setattrs.__name__ - ) - - -else: - - def _has_frozen_base_class(cls): - """ - Check whether *cls* has a frozen ancestor by looking at its - __setattr__. - """ - return cls.__setattr__ == _frozen_setattrs +def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ is _frozen_setattrs def _generate_unique_filename(cls, func_name): """ Create a "filename" suitable for a function being generated. """ - unique_id = uuid.uuid4() - extra = "" - count = 1 - - while True: - unique_filename = "".format( - func_name, - cls.__module__, - getattr(cls, "__qualname__", cls.__name__), - extra, - ) - # To handle concurrency we essentially "reserve" our spot in - # the linecache with a dummy line. The caller can then - # set this value correctly. - cache_line = (1, None, (str(unique_id),), unique_filename) - if ( - linecache.cache.setdefault(unique_filename, cache_line) - == cache_line - ): - return unique_filename - - # Looks like this spot is taken. Try again. - count += 1 - extra = "-{0}".format(count) + unique_filename = "".format( + func_name, + cls.__module__, + getattr(cls, "__qualname__", cls.__name__), + ) + return unique_filename def _make_hash(cls, attrs, frozen, cache_hash): @@ -1636,6 +1621,8 @@ def _make_hash(cls, attrs, frozen, cache_hash): unique_filename = _generate_unique_filename(cls, "hash") type_hash = hash(unique_filename) + # If eq is custom generated, we need to include the functions in globs + globs = {} hash_def = "def __hash__(self" hash_func = "hash((" @@ -1643,8 +1630,7 @@ def _make_hash(cls, attrs, frozen, cache_hash): if not cache_hash: hash_def += "):" else: - if not PY2: - hash_def += ", *" + hash_def += ", *" hash_def += ( ", _cache_wrapper=" @@ -1670,7 +1656,14 @@ def append_hash_computation_lines(prefix, indent): ) for a in attrs: - method_lines.append(indent + " self.%s," % a.name) + if a.eq_key: + cmp_name = "_%s_key" % (a.name,) + globs[cmp_name] = a.eq_key + method_lines.append( + indent + " %s(self.%s)," % (cmp_name, a.name) + ) + else: + method_lines.append(indent + " self.%s," % a.name) method_lines.append(indent + " " + closing_braces) @@ -1690,7 +1683,7 @@ def append_hash_computation_lines(prefix, indent): append_hash_computation_lines("return ", tab) script = "\n".join(method_lines) - return _make_method("__hash__", script, unique_filename) + return _make_method("__hash__", script, unique_filename, globs) def _add_hash(cls, attrs): @@ -1841,66 +1834,126 @@ def _add_eq(cls, attrs=None): return cls -_already_repring = threading.local() +if HAS_F_STRINGS: + def _make_repr(attrs, ns, cls): + unique_filename = _generate_unique_filename(cls, "repr") + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, (repr if a.repr is True else a.repr), a.init) + for a in attrs + if a.repr is not False + ) + globs = { + name + "_repr": r + for name, r, _ in attr_names_with_reprs + if r != repr + } + globs["_compat"] = _compat + globs["AttributeError"] = AttributeError + globs["NOTHING"] = NOTHING + attribute_fragments = [] + for name, r, i in attr_names_with_reprs: + accessor = ( + "self." + name + if i + else 'getattr(self, "' + name + '", NOTHING)' + ) + fragment = ( + "%s={%s!r}" % (name, accessor) + if r == repr + else "%s={%s_repr(%s)}" % (name, name, accessor) + ) + attribute_fragments.append(fragment) + repr_fragment = ", ".join(attribute_fragments) -def _make_repr(attrs, ns): - """ - Make a repr method that includes relevant *attrs*, adding *ns* to the full - name. - """ - - # Figure out which attributes to include, and which function to use to - # format them. The a.repr value can be either bool or a custom callable. - attr_names_with_reprs = tuple( - (a.name, repr if a.repr is True else a.repr) - for a in attrs - if a.repr is not False - ) - - def __repr__(self): - """ - Automatically created by attrs. - """ - try: - working_set = _already_repring.working_set - except AttributeError: - working_set = set() - _already_repring.working_set = working_set - - if id(self) in working_set: - return "..." - real_cls = self.__class__ if ns is None: - qualname = getattr(real_cls, "__qualname__", None) - if qualname is not None: - class_name = qualname.rsplit(">.", 1)[-1] - else: - class_name = real_cls.__name__ + cls_name_fragment = ( + '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' + ) else: - class_name = ns + "." + real_cls.__name__ + cls_name_fragment = ns + ".{self.__class__.__name__}" - # Since 'self' remains on the stack (i.e.: strongly referenced) for the - # duration of this call, it's safe to depend on id(...) stability, and - # not need to track the instance and therefore worry about properties - # like weakref- or hash-ability. - working_set.add(id(self)) - try: - result = [class_name, "("] - first = True - for name, attr_repr in attr_names_with_reprs: - if first: - first = False - else: - result.append(", ") - result.extend( - (name, "=", attr_repr(getattr(self, name, NOTHING))) - ) - return "".join(result) + ")" - finally: - working_set.remove(id(self)) + lines = [ + "def __repr__(self):", + " try:", + " already_repring = _compat.repr_context.already_repring", + " except AttributeError:", + " already_repring = {id(self),}", + " _compat.repr_context.already_repring = already_repring", + " else:", + " if id(self) in already_repring:", + " return '...'", + " else:", + " already_repring.add(id(self))", + " try:", + " return f'%s(%s)'" % (cls_name_fragment, repr_fragment), + " finally:", + " already_repring.remove(id(self))", + ] - return __repr__ + return _make_method( + "__repr__", "\n".join(lines), unique_filename, globs=globs + ) + +else: + + def _make_repr(attrs, ns, _): + """ + Make a repr method that includes relevant *attrs*, adding *ns* to the + full name. + """ + + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, repr if a.repr is True else a.repr) + for a in attrs + if a.repr is not False + ) + + def __repr__(self): + """ + Automatically created by attrs. + """ + try: + already_repring = _compat.repr_context.already_repring + except AttributeError: + already_repring = set() + _compat.repr_context.already_repring = already_repring + + if id(self) in already_repring: + return "..." + real_cls = self.__class__ + if ns is None: + class_name = real_cls.__qualname__.rsplit(">.", 1)[-1] + else: + class_name = ns + "." + real_cls.__name__ + + # Since 'self' remains on the stack (i.e.: strongly referenced) + # for the duration of this call, it's safe to depend on id(...) + # stability, and not need to track the instance and therefore + # worry about properties like weakref- or hash-ability. + already_repring.add(id(self)) + try: + result = [class_name, "("] + first = True + for name, attr_repr in attr_names_with_reprs: + if first: + first = False + else: + result.append(", ") + result.extend( + (name, "=", attr_repr(getattr(self, name, NOTHING))) + ) + return "".join(result) + ")" + finally: + already_repring.remove(id(self)) + + return __repr__ def _add_repr(cls, ns=None, attrs=None): @@ -1910,7 +1963,7 @@ def _add_repr(cls, ns=None, attrs=None): if attrs is None: attrs = cls.__attrs_attrs__ - cls.__repr__ = _make_repr(attrs, ns) + cls.__repr__ = _make_repr(attrs, ns, cls) return cls @@ -1927,12 +1980,12 @@ def fields(cls): :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` class. - :rtype: tuple (with name accessors) of `attr.Attribute` + :rtype: tuple (with name accessors) of `attrs.Attribute` .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields by name. """ - if not isclass(cls): + if not isinstance(cls, type): raise TypeError("Passed object must be a class.") attrs = getattr(cls, "__attrs_attrs__", None) if attrs is None: @@ -1954,20 +2007,20 @@ def fields_dict(cls): class. :rtype: an ordered dict where keys are attribute names and values are - `attr.Attribute`\\ s. This will be a `dict` if it's + `attrs.Attribute`\\ s. This will be a `dict` if it's naturally ordered like on Python 3.6+ or an :class:`~collections.OrderedDict` otherwise. .. versionadded:: 18.1.0 """ - if not isclass(cls): + if not isinstance(cls, type): raise TypeError("Passed object must be a class.") attrs = getattr(cls, "__attrs_attrs__", None) if attrs is None: raise NotAnAttrsClassError( "{cls!r} is not an attrs-decorated class.".format(cls=cls) ) - return ordered_dict(((a.name, a) for a in attrs)) + return ordered_dict((a.name, a) for a in attrs) def validate(inst): @@ -2008,10 +2061,14 @@ def _make_init( cache_hash, base_attr_map, is_exc, - has_global_on_setattr, + cls_on_setattr, attrs_init, ): - if frozen and has_global_on_setattr: + has_cls_on_setattr = ( + cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP + ) + + if frozen and has_cls_on_setattr: raise ValueError("Frozen classes can't use on_setattr.") needs_cached_setattr = cache_hash or frozen @@ -2029,9 +2086,7 @@ def _make_init( raise ValueError("Frozen classes can't use on_setattr.") needs_cached_setattr = True - elif ( - has_global_on_setattr and a.on_setattr is not setters.NO_OP - ) or _is_slot_attr(a.name, base_attr_map): + elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: needs_cached_setattr = True unique_filename = _generate_unique_filename(cls, "init") @@ -2045,8 +2100,7 @@ def _make_init( cache_hash, base_attr_map, is_exc, - needs_cached_setattr, - has_global_on_setattr, + has_cls_on_setattr, attrs_init, ) if cls.__module__ in sys.modules: @@ -2058,7 +2112,7 @@ def _make_init( if needs_cached_setattr: # Save the lookup overhead in __init__ if we need to circumvent # setattr hooks. - globs["_cached_setattr"] = _obj_setattr + globs["_setattr"] = _obj_setattr init = _make_method( "__attrs_init__" if attrs_init else "__init__", @@ -2075,7 +2129,7 @@ def _setattr(attr_name, value_var, has_on_setattr): """ Use the cached object.setattr to set *attr_name* to *value_var*. """ - return "_setattr('%s', %s)" % (attr_name, value_var) + return "_setattr(self, '%s', %s)" % (attr_name, value_var) def _setattr_with_converter(attr_name, value_var, has_on_setattr): @@ -2083,7 +2137,7 @@ def _setattr_with_converter(attr_name, value_var, has_on_setattr): Use the cached object.setattr to set *attr_name* to *value_var*, but run its converter first. """ - return "_setattr('%s', %s(%s))" % ( + return "_setattr(self, '%s', %s(%s))" % ( attr_name, _init_converter_pat % (attr_name,), value_var, @@ -2116,63 +2170,6 @@ def _assign_with_converter(attr_name, value_var, has_on_setattr): ) -if PY2: - - def _unpack_kw_only_py2(attr_name, default=None): - """ - Unpack *attr_name* from _kw_only dict. - """ - if default is not None: - arg_default = ", %s" % default - else: - arg_default = "" - return "%s = _kw_only.pop('%s'%s)" % ( - attr_name, - attr_name, - arg_default, - ) - - def _unpack_kw_only_lines_py2(kw_only_args): - """ - Unpack all *kw_only_args* from _kw_only dict and handle errors. - - Given a list of strings "{attr_name}" and "{attr_name}={default}" - generates list of lines of code that pop attrs from _kw_only dict and - raise TypeError similar to builtin if required attr is missing or - extra key is passed. - - >>> print("\n".join(_unpack_kw_only_lines_py2(["a", "b=42"]))) - try: - a = _kw_only.pop('a') - b = _kw_only.pop('b', 42) - except KeyError as _key_error: - raise TypeError( - ... - if _kw_only: - raise TypeError( - ... - """ - lines = ["try:"] - lines.extend( - " " + _unpack_kw_only_py2(*arg.split("=")) - for arg in kw_only_args - ) - lines += """\ -except KeyError as _key_error: - raise TypeError( - '__init__() missing required keyword-only argument: %s' % _key_error - ) -if _kw_only: - raise TypeError( - '__init__() got an unexpected keyword argument %r' - % next(iter(_kw_only)) - ) -""".split( - "\n" - ) - return lines - - def _attrs_to_init_script( attrs, frozen, @@ -2182,8 +2179,7 @@ def _attrs_to_init_script( cache_hash, base_attr_map, is_exc, - needs_cached_setattr, - has_global_on_setattr, + has_cls_on_setattr, attrs_init, ): """ @@ -2198,14 +2194,6 @@ def _attrs_to_init_script( if pre_init: lines.append("self.__attrs_pre_init__()") - if needs_cached_setattr: - lines.append( - # Circumvent the __setattr__ descriptor to save one lookup per - # assignment. - # Note _setattr will be used again below if cache_hash is True - "_setattr = _cached_setattr.__get__(self, self.__class__)" - ) - if frozen is True: if slots is True: fmt_setter = _setattr @@ -2257,7 +2245,7 @@ def fmt_setter_with_converter( attr_name = a.name has_on_setattr = a.on_setattr is not None or ( - a.on_setattr is not setters.NO_OP and has_global_on_setattr + a.on_setattr is not setters.NO_OP and has_cls_on_setattr ) arg_name = a.name.lstrip("_") @@ -2390,21 +2378,11 @@ def fmt_setter_with_converter( if a.init is True: if a.type is not None and a.converter is None: annotations[arg_name] = a.type - elif a.converter is not None and not PY2: + elif a.converter is not None: # Try to get the type from the converter. - sig = None - try: - sig = inspect.signature(a.converter) - except (ValueError, TypeError): # inspect failed - pass - if sig: - sig_params = list(sig.parameters.values()) - if ( - sig_params - and sig_params[0].annotation - is not inspect.Parameter.empty - ): - annotations[arg_name] = sig_params[0].annotation + t = _AnnotationExtractor(a.converter).get_first_param_type() + if t: + annotations[arg_name] = t if attrs_to_validate: # we can skip this if there are no validators. names_for_globals["_config"] = _config @@ -2421,7 +2399,7 @@ def fmt_setter_with_converter( if post_init: lines.append("self.__attrs_post_init__()") - # because this is set only after __attrs_post_init is called, a crash + # because this is set only after __attrs_post_init__ is called, a crash # will result if post-init tries to access the hash code. This seemed # preferable to setting this beforehand, in which case alteration to # field values during post-init combined with post-init accessing the @@ -2430,7 +2408,7 @@ def fmt_setter_with_converter( if frozen: if slots: # if frozen and slots, then _setattr defined above - init_hash_cache = "_setattr('%s', %s)" + init_hash_cache = "_setattr(self, '%s', %s)" else: # if frozen and not slots, then _inst_dict defined above init_hash_cache = "_inst_dict['%s'] = %s" @@ -2447,15 +2425,10 @@ def fmt_setter_with_converter( args = ", ".join(args) if kw_only_args: - if PY2: - lines = _unpack_kw_only_lines_py2(kw_only_args) + lines - - args += "%s**_kw_only" % (", " if args else "",) # leading comma - else: - args += "%s*, %s" % ( - ", " if args else "", # leading comma - ", ".join(kw_only_args), # kw_only args - ) + args += "%s*, %s" % ( + ", " if args else "", # leading comma + ", ".join(kw_only_args), # kw_only args + ) return ( """\ def {init_name}(self, {args}): @@ -2470,23 +2443,30 @@ def {init_name}(self, {args}): ) -class Attribute(object): +class Attribute: """ *Read-only* representation of an attribute. + The class has *all* arguments of `attr.ib` (except for ``factory`` + which is only syntactic sugar for ``default=Factory(...)`` plus the + following: + + - ``name`` (`str`): The name of the attribute. + - ``inherited`` (`bool`): Whether or not that attribute has been inherited + from a base class. + - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables + that are used for comparing and ordering objects by this attribute, + respectively. These are set by passing a callable to `attr.ib`'s ``eq``, + ``order``, or ``cmp`` arguments. See also :ref:`comparison customization + `. + Instances of this class are frequently used for introspection purposes like: - `fields` returns a tuple of them. - Validators get them passed as the first argument. - - The *field transformer* hook receives a list of them. - - :attribute name: The name of the attribute. - :attribute inherited: Whether or not that attribute has been inherited from - a base class. - - Plus *all* arguments of `attr.ib` (except for ``factory`` - which is only syntactic sugar for ``default=Factory(...)``. + - The :ref:`field transformer ` hook receives a list of + them. .. versionadded:: 20.1.0 *inherited* .. versionadded:: 20.1.0 *on_setattr* @@ -2559,7 +2539,7 @@ def __init__( bound_setattr( "metadata", ( - metadata_proxy(metadata) + types.MappingProxyType(dict(metadata)) # Shallow copy if metadata else _empty_metadata_singleton ), @@ -2603,15 +2583,6 @@ def from_counting_attr(cls, name, ca, type=None): **inst_dict ) - @property - def cmp(self): - """ - Simulate the presence of a cmp attribute and warn. - """ - warnings.warn(_CMP_DEPRECATION, DeprecationWarning, stacklevel=2) - - return self.eq and self.order - # Don't use attr.evolve since fields(Attribute) doesn't work def evolve(self, **changes): """ @@ -2654,7 +2625,7 @@ def _setattrs(self, name_values_pairs): else: bound_setattr( name, - metadata_proxy(value) + types.MappingProxyType(dict(value)) if value else _empty_metadata_singleton, ) @@ -2685,7 +2656,7 @@ def _setattrs(self, name_values_pairs): ) -class _CountingAttr(object): +class _CountingAttr: """ Intermediate representation of attributes that uses a counter to preserve the order in which the attributes have been defined. @@ -2828,11 +2799,11 @@ def default(self, meth): _CountingAttr = _add_eq(_add_repr(_CountingAttr)) -class Factory(object): +class Factory: """ Stores a factory callable. - If passed as the default value to `attr.ib`, the factory is used to + If passed as the default value to `attrs.field`, the factory is used to generate a new value. :param callable factory: A callable that takes either none or exactly one @@ -2914,7 +2885,7 @@ def make_class(name, attrs, bases=(object,), **attributes_arguments): if isinstance(attrs, dict): cls_dict = attrs elif isinstance(attrs, (list, tuple)): - cls_dict = dict((a, attrib()) for a in attrs) + cls_dict = {a: attrib() for a in attrs} else: raise TypeError("attrs argument must be a dict or a list.") @@ -2930,7 +2901,7 @@ def make_class(name, attrs, bases=(object,), **attributes_arguments): if user_init is not None: body["__init__"] = user_init - type_ = new_class(name, bases, {}, lambda ns: ns.update(body)) + type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body)) # For pickling to work, the __module__ variable needs to be set to the # frame where the class is created. Bypass this step in environments where @@ -2963,7 +2934,7 @@ def make_class(name, attrs, bases=(object,), **attributes_arguments): @attrs(slots=True, hash=True) -class _AndValidator(object): +class _AndValidator: """ Compose many validators to a single one. """ @@ -3017,36 +2988,19 @@ def pipe_converter(val): return val - if not PY2: - if not converters: - # If the converter list is empty, pipe_converter is the identity. - A = typing.TypeVar("A") - pipe_converter.__annotations__ = {"val": A, "return": A} - else: - # Get parameter type. - sig = None - try: - sig = inspect.signature(converters[0]) - except (ValueError, TypeError): # inspect failed - pass - if sig: - params = list(sig.parameters.values()) - if ( - params - and params[0].annotation is not inspect.Parameter.empty - ): - pipe_converter.__annotations__["val"] = params[ - 0 - ].annotation - # Get return type. - sig = None - try: - sig = inspect.signature(converters[-1]) - except (ValueError, TypeError): # inspect failed - pass - if sig and sig.return_annotation is not inspect.Signature().empty: - pipe_converter.__annotations__[ - "return" - ] = sig.return_annotation + if not converters: + # If the converter list is empty, pipe_converter is the identity. + A = typing.TypeVar("A") + pipe_converter.__annotations__ = {"val": A, "return": A} + else: + # Get parameter type from first converter. + t = _AnnotationExtractor(converters[0]).get_first_param_type() + if t: + pipe_converter.__annotations__["val"] = t + + # Get return type from last converter. + rt = _AnnotationExtractor(converters[-1]).get_return_type() + if rt: + pipe_converter.__annotations__["return"] = rt return pipe_converter diff --git a/lib/spack/external/attr/_next_gen.py b/lib/spack/external/_vendoring/attr/_next_gen.py similarity index 62% rename from lib/spack/external/attr/_next_gen.py rename to lib/spack/external/_vendoring/attr/_next_gen.py index fab0af966a5..5a06a743855 100644 --- a/lib/spack/external/attr/_next_gen.py +++ b/lib/spack/external/_vendoring/attr/_next_gen.py @@ -1,14 +1,24 @@ +# SPDX-License-Identifier: MIT + """ These are Python 3.6+-only and keyword-only APIs that call `attr.s` and `attr.ib` with different default values. """ + from functools import partial -from attr.exceptions import UnannotatedAttributeError - from . import setters -from ._make import NOTHING, _frozen_setattrs, attrib, attrs +from ._funcs import asdict as _asdict +from ._funcs import astuple as _astuple +from ._make import ( + NOTHING, + _frozen_setattrs, + _ng_default_on_setattr, + attrib, + attrs, +) +from .exceptions import UnannotatedAttributeError def define( @@ -32,22 +42,45 @@ def define( getstate_setstate=None, on_setattr=None, field_transformer=None, + match_args=True, ): r""" - The only behavioral differences are the handling of the *auto_attribs* - option: + Define an ``attrs`` class. + + Differences to the classic `attr.s` that it uses underneath: + + - Automatically detect whether or not *auto_attribs* should be `True` (c.f. + *auto_attribs* parameter). + - If *frozen* is `False`, run converters and validators when setting an + attribute by default. + - *slots=True* + + .. caution:: + + Usually this has only upsides and few visible effects in everyday + programming. But it *can* lead to some suprising behaviors, so please + make sure to read :term:`slotted classes`. + - *auto_exc=True* + - *auto_detect=True* + - *order=False* + - Some options that were only relevant on Python 2 or were kept around for + backwards-compatibility have been removed. + + Please note that these are all defaults and you can change them as you + wish. :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves exactly like `attr.s`. If left `None`, `attr.s` will try to guess: - 1. If any attributes are annotated and no unannotated `attr.ib`\ s + 1. If any attributes are annotated and no unannotated `attrs.fields`\ s are found, it assumes *auto_attribs=True*. 2. Otherwise it assumes *auto_attribs=False* and tries to collect - `attr.ib`\ s. + `attrs.fields`\ s. - and that mutable classes (``frozen=False``) validate on ``__setattr__``. + For now, please refer to `attr.s` for the rest of the parameters. .. versionadded:: 20.1.0 + .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. """ def do_it(cls, auto_attribs): @@ -72,6 +105,7 @@ def do_it(cls, auto_attribs): getstate_setstate=getstate_setstate, on_setattr=on_setattr, field_transformer=field_transformer, + match_args=match_args, ) def wrap(cls): @@ -84,9 +118,9 @@ def wrap(cls): had_on_setattr = on_setattr not in (None, setters.NO_OP) - # By default, mutable classes validate on setattr. + # By default, mutable classes convert & validate on setattr. if frozen is False and on_setattr is None: - on_setattr = setters.validate + on_setattr = _ng_default_on_setattr # However, if we subclass a frozen class, we inherit the immutability # and disable on_setattr. @@ -156,3 +190,31 @@ def field( order=order, on_setattr=on_setattr, ) + + +def asdict(inst, *, recurse=True, filter=None, value_serializer=None): + """ + Same as `attr.asdict`, except that collections types are always retained + and dict is always used as *dict_factory*. + + .. versionadded:: 21.3.0 + """ + return _asdict( + inst=inst, + recurse=recurse, + filter=filter, + value_serializer=value_serializer, + retain_collection_types=True, + ) + + +def astuple(inst, *, recurse=True, filter=None): + """ + Same as `attr.astuple`, except that collections types are always retained + and `tuple` is always used as the *tuple_factory*. + + .. versionadded:: 21.3.0 + """ + return _astuple( + inst=inst, recurse=recurse, filter=filter, retain_collection_types=True + ) diff --git a/lib/spack/external/attr/_version_info.py b/lib/spack/external/_vendoring/attr/_version_info.py similarity index 95% rename from lib/spack/external/attr/_version_info.py rename to lib/spack/external/_vendoring/attr/_version_info.py index 014e78a1b43..51a1312f975 100644 --- a/lib/spack/external/attr/_version_info.py +++ b/lib/spack/external/_vendoring/attr/_version_info.py @@ -1,4 +1,5 @@ -from __future__ import absolute_import, division, print_function +# SPDX-License-Identifier: MIT + from functools import total_ordering @@ -8,7 +9,7 @@ @total_ordering @attrs(eq=False, order=False, slots=True, frozen=True) -class VersionInfo(object): +class VersionInfo: """ A version object that can be compared to tuple of length 1--4: diff --git a/lib/spack/external/_vendoring/attr/_version_info.pyi b/lib/spack/external/_vendoring/attr/_version_info.pyi new file mode 100644 index 00000000000..45ced086337 --- /dev/null +++ b/lib/spack/external/_vendoring/attr/_version_info.pyi @@ -0,0 +1,9 @@ +class VersionInfo: + @property + def year(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... diff --git a/lib/spack/external/attr/converters.py b/lib/spack/external/_vendoring/attr/converters.py similarity index 58% rename from lib/spack/external/attr/converters.py rename to lib/spack/external/_vendoring/attr/converters.py index 2777db6d0af..a73626c26d6 100644 --- a/lib/spack/external/attr/converters.py +++ b/lib/spack/external/_vendoring/attr/converters.py @@ -1,22 +1,21 @@ +# SPDX-License-Identifier: MIT + """ Commonly useful converters. """ -from __future__ import absolute_import, division, print_function -from ._compat import PY2 +import typing + +from ._compat import _AnnotationExtractor from ._make import NOTHING, Factory, pipe -if not PY2: - import inspect - import typing - - __all__ = [ - "pipe", - "optional", "default_if_none", + "optional", + "pipe", + "to_bool", ] @@ -39,22 +38,15 @@ def optional_converter(val): return None return converter(val) - if not PY2: - sig = None - try: - sig = inspect.signature(converter) - except (ValueError, TypeError): # inspect failed - pass - if sig: - params = list(sig.parameters.values()) - if params and params[0].annotation is not inspect.Parameter.empty: - optional_converter.__annotations__["val"] = typing.Optional[ - params[0].annotation - ] - if sig.return_annotation is not inspect.Signature.empty: - optional_converter.__annotations__["return"] = typing.Optional[ - sig.return_annotation - ] + xtr = _AnnotationExtractor(converter) + + t = xtr.get_first_param_type() + if t: + optional_converter.__annotations__["val"] = typing.Optional[t] + + rt = xtr.get_return_type() + if rt: + optional_converter.__annotations__["return"] = typing.Optional[rt] return optional_converter @@ -65,14 +57,14 @@ def default_if_none(default=NOTHING, factory=None): result of *factory*. :param default: Value to be used if ``None`` is passed. Passing an instance - of `attr.Factory` is supported, however the ``takes_self`` option + of `attrs.Factory` is supported, however the ``takes_self`` option is *not*. :param callable factory: A callable that takes no parameters whose result is used if ``None`` is passed. :raises TypeError: If **neither** *default* or *factory* is passed. :raises TypeError: If **both** *default* and *factory* are passed. - :raises ValueError: If an instance of `attr.Factory` is passed with + :raises ValueError: If an instance of `attrs.Factory` is passed with ``takes_self=True``. .. versionadded:: 18.2.0 @@ -109,3 +101,44 @@ def default_if_none_converter(val): return default return default_if_none_converter + + +def to_bool(val): + """ + Convert "boolean" strings (e.g., from env. vars.) to real booleans. + + Values mapping to :code:`True`: + + - :code:`True` + - :code:`"true"` / :code:`"t"` + - :code:`"yes"` / :code:`"y"` + - :code:`"on"` + - :code:`"1"` + - :code:`1` + + Values mapping to :code:`False`: + + - :code:`False` + - :code:`"false"` / :code:`"f"` + - :code:`"no"` / :code:`"n"` + - :code:`"off"` + - :code:`"0"` + - :code:`0` + + :raises ValueError: for any other value. + + .. versionadded:: 21.3.0 + """ + if isinstance(val, str): + val = val.lower() + truthy = {True, "true", "t", "yes", "y", "on", "1", 1} + falsy = {False, "false", "f", "no", "n", "off", "0", 0} + try: + if val in truthy: + return True + if val in falsy: + return False + except TypeError: + # Raised when "val" is not hashable (e.g., lists) + pass + raise ValueError("Cannot convert value to bool: {}".format(val)) diff --git a/lib/spack/external/_vendoring/attr/converters.pyi b/lib/spack/external/_vendoring/attr/converters.pyi new file mode 100644 index 00000000000..0f58088a37b --- /dev/null +++ b/lib/spack/external/_vendoring/attr/converters.pyi @@ -0,0 +1,13 @@ +from typing import Callable, Optional, TypeVar, overload + +from . import _ConverterType + +_T = TypeVar("_T") + +def pipe(*validators: _ConverterType) -> _ConverterType: ... +def optional(converter: _ConverterType) -> _ConverterType: ... +@overload +def default_if_none(default: _T) -> _ConverterType: ... +@overload +def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ... +def to_bool(val: str) -> bool: ... diff --git a/lib/spack/external/attr/exceptions.py b/lib/spack/external/_vendoring/attr/exceptions.py similarity index 96% rename from lib/spack/external/attr/exceptions.py rename to lib/spack/external/_vendoring/attr/exceptions.py index f6f9861bea9..5dc51e0a82b 100644 --- a/lib/spack/external/attr/exceptions.py +++ b/lib/spack/external/_vendoring/attr/exceptions.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import, division, print_function +# SPDX-License-Identifier: MIT class FrozenError(AttributeError): diff --git a/lib/spack/external/_vendoring/attr/exceptions.pyi b/lib/spack/external/_vendoring/attr/exceptions.pyi new file mode 100644 index 00000000000..f2680118b40 --- /dev/null +++ b/lib/spack/external/_vendoring/attr/exceptions.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class FrozenError(AttributeError): + msg: str = ... + +class FrozenInstanceError(FrozenError): ... +class FrozenAttributeError(FrozenError): ... +class AttrsAttributeNotFoundError(ValueError): ... +class NotAnAttrsClassError(ValueError): ... +class DefaultAlreadySetError(RuntimeError): ... +class UnannotatedAttributeError(RuntimeError): ... +class PythonTooOldError(RuntimeError): ... + +class NotCallableError(TypeError): + msg: str = ... + value: Any = ... + def __init__(self, msg: str, value: Any) -> None: ... diff --git a/lib/spack/external/attr/filters.py b/lib/spack/external/_vendoring/attr/filters.py similarity index 65% rename from lib/spack/external/attr/filters.py rename to lib/spack/external/_vendoring/attr/filters.py index dc47e8fa38c..baa25e94652 100644 --- a/lib/spack/external/attr/filters.py +++ b/lib/spack/external/_vendoring/attr/filters.py @@ -1,10 +1,9 @@ +# SPDX-License-Identifier: MIT + """ Commonly useful filters for `attr.asdict`. """ -from __future__ import absolute_import, division, print_function - -from ._compat import isclass from ._make import Attribute @@ -13,17 +12,17 @@ def _split_what(what): Returns a tuple of `frozenset`s of classes and attributes. """ return ( - frozenset(cls for cls in what if isclass(cls)), + frozenset(cls for cls in what if isinstance(cls, type)), frozenset(cls for cls in what if isinstance(cls, Attribute)), ) def include(*what): """ - Whitelist *what*. + Include *what*. - :param what: What to whitelist. - :type what: `list` of `type` or `attr.Attribute`\\ s + :param what: What to include. + :type what: `list` of `type` or `attrs.Attribute`\\ s :rtype: `callable` """ @@ -37,10 +36,10 @@ def include_(attribute, value): def exclude(*what): """ - Blacklist *what*. + Exclude *what*. - :param what: What to blacklist. - :type what: `list` of classes or `attr.Attribute`\\ s. + :param what: What to exclude. + :type what: `list` of classes or `attrs.Attribute`\\ s. :rtype: `callable` """ diff --git a/lib/spack/external/_vendoring/attr/filters.pyi b/lib/spack/external/_vendoring/attr/filters.pyi new file mode 100644 index 00000000000..993866865ea --- /dev/null +++ b/lib/spack/external/_vendoring/attr/filters.pyi @@ -0,0 +1,6 @@ +from typing import Any, Union + +from . import Attribute, _FilterType + +def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... +def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... diff --git a/lib/spack/external/pytest-fallback/_pytest/vendored_packages/__init__.py b/lib/spack/external/_vendoring/attr/py.typed similarity index 100% rename from lib/spack/external/pytest-fallback/_pytest/vendored_packages/__init__.py rename to lib/spack/external/_vendoring/attr/py.typed diff --git a/lib/spack/external/attr/setters.py b/lib/spack/external/_vendoring/attr/setters.py similarity index 84% rename from lib/spack/external/attr/setters.py rename to lib/spack/external/_vendoring/attr/setters.py index 240014b3c1e..12ed6750df3 100644 --- a/lib/spack/external/attr/setters.py +++ b/lib/spack/external/_vendoring/attr/setters.py @@ -1,8 +1,9 @@ +# SPDX-License-Identifier: MIT + """ Commonly used hooks for on_setattr. """ -from __future__ import absolute_import, division, print_function from . import _config from .exceptions import FrozenAttributeError @@ -67,11 +68,6 @@ def convert(instance, attrib, new_value): return new_value +# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. +# autodata stopped working, so the docstring is inlined in the API docs. NO_OP = object() -""" -Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. - -Does not work in `pipe` or within lists. - -.. versionadded:: 20.1.0 -""" diff --git a/lib/spack/external/_vendoring/attr/setters.pyi b/lib/spack/external/_vendoring/attr/setters.pyi new file mode 100644 index 00000000000..3f5603c2b0c --- /dev/null +++ b/lib/spack/external/_vendoring/attr/setters.pyi @@ -0,0 +1,19 @@ +from typing import Any, NewType, NoReturn, TypeVar, cast + +from . import Attribute, _OnSetAttrType + +_T = TypeVar("_T") + +def frozen( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> NoReturn: ... +def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... +def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... + +# convert is allowed to return Any, because they can be chained using pipe. +def convert( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> Any: ... + +_NoOpType = NewType("_NoOpType", object) +NO_OP: _NoOpType diff --git a/lib/spack/external/attr/validators.py b/lib/spack/external/_vendoring/attr/validators.py similarity index 58% rename from lib/spack/external/attr/validators.py rename to lib/spack/external/_vendoring/attr/validators.py index b9a73054e9c..eece517da8e 100644 --- a/lib/spack/external/attr/validators.py +++ b/lib/spack/external/_vendoring/attr/validators.py @@ -1,30 +1,98 @@ +# SPDX-License-Identifier: MIT + """ Commonly useful validators. """ -from __future__ import absolute_import, division, print_function +import operator import re +from contextlib import contextmanager + +from ._config import get_run_validators, set_run_validators from ._make import _AndValidator, and_, attrib, attrs from .exceptions import NotCallableError +try: + Pattern = re.Pattern +except AttributeError: # Python <3.7 lacks a Pattern type. + Pattern = type(re.compile("")) + + __all__ = [ "and_", "deep_iterable", "deep_mapping", + "disabled", + "ge", + "get_disabled", + "gt", "in_", "instance_of", "is_callable", + "le", + "lt", "matches_re", + "max_len", + "min_len", "optional", "provides", + "set_disabled", ] +def set_disabled(disabled): + """ + Globally disable or enable running validators. + + By default, they are run. + + :param disabled: If ``True``, disable running all validators. + :type disabled: bool + + .. warning:: + + This function is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(not disabled) + + +def get_disabled(): + """ + Return a bool indicating whether validators are currently disabled or not. + + :return: ``True`` if validators are currently disabled. + :rtype: bool + + .. versionadded:: 21.3.0 + """ + return not get_run_validators() + + +@contextmanager +def disabled(): + """ + Context manager that disables running validators within its context. + + .. warning:: + + This context manager is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(False) + try: + yield + finally: + set_run_validators(True) + + @attrs(repr=False, slots=True, hash=True) -class _InstanceOfValidator(object): +class _InstanceOfValidator: type = attrib() def __call__(self, inst, attr, value): @@ -61,16 +129,15 @@ def instance_of(type): :type type: type or tuple of types :raises TypeError: With a human readable error message, the attribute - (of type `attr.Attribute`), the expected type, and the value it + (of type `attrs.Attribute`), the expected type, and the value it got. """ return _InstanceOfValidator(type) @attrs(repr=False, frozen=True, slots=True) -class _MatchesReValidator(object): - regex = attrib() - flags = attrib() +class _MatchesReValidator: + pattern = attrib() match_func = attrib() def __call__(self, inst, attr, value): @@ -79,18 +146,18 @@ def __call__(self, inst, attr, value): """ if not self.match_func(value): raise ValueError( - "'{name}' must match regex {regex!r}" + "'{name}' must match regex {pattern!r}" " ({value!r} doesn't)".format( - name=attr.name, regex=self.regex.pattern, value=value + name=attr.name, pattern=self.pattern.pattern, value=value ), attr, - self.regex, + self.pattern, value, ) def __repr__(self): - return "".format( - regex=self.regex + return "".format( + pattern=self.pattern ) @@ -99,48 +166,51 @@ def matches_re(regex, flags=0, func=None): A validator that raises `ValueError` if the initializer is called with a string that doesn't match *regex*. - :param str regex: a regex string to match against + :param regex: a regex string or precompiled pattern to match against :param int flags: flags that will be passed to the underlying re function (default 0) - :param callable func: which underlying `re` function to call (options - are `re.fullmatch`, `re.search`, `re.match`, default - is ``None`` which means either `re.fullmatch` or an emulation of - it on Python 2). For performance reasons, they won't be used directly - but on a pre-`re.compile`\ ed pattern. + :param callable func: which underlying `re` function to call. Valid options + are `re.fullmatch`, `re.search`, and `re.match`; the default ``None`` + means `re.fullmatch`. For performance reasons, the pattern is always + precompiled using `re.compile`. .. versionadded:: 19.2.0 + .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. """ - fullmatch = getattr(re, "fullmatch", None) - valid_funcs = (fullmatch, None, re.search, re.match) + valid_funcs = (re.fullmatch, None, re.search, re.match) if func not in valid_funcs: raise ValueError( - "'func' must be one of %s." - % ( + "'func' must be one of {}.".format( ", ".join( sorted( e and e.__name__ or "None" for e in set(valid_funcs) ) - ), + ) ) ) - pattern = re.compile(regex, flags) + if isinstance(regex, Pattern): + if flags: + raise TypeError( + "'flags' can only be used with a string pattern; " + "pass flags to re.compile() instead" + ) + pattern = regex + else: + pattern = re.compile(regex, flags) + if func is re.match: match_func = pattern.match elif func is re.search: match_func = pattern.search else: - if fullmatch: - match_func = pattern.fullmatch - else: - pattern = re.compile(r"(?:{})\Z".format(regex), flags) - match_func = pattern.match + match_func = pattern.fullmatch - return _MatchesReValidator(pattern, flags, match_func) + return _MatchesReValidator(pattern, match_func) @attrs(repr=False, slots=True, hash=True) -class _ProvidesValidator(object): +class _ProvidesValidator: interface = attrib() def __call__(self, inst, attr, value): @@ -175,14 +245,14 @@ def provides(interface): :type interface: ``zope.interface.Interface`` :raises TypeError: With a human readable error message, the attribute - (of type `attr.Attribute`), the expected interface, and the + (of type `attrs.Attribute`), the expected interface, and the value it got. """ return _ProvidesValidator(interface) @attrs(repr=False, slots=True, hash=True) -class _OptionalValidator(object): +class _OptionalValidator: validator = attrib() def __call__(self, inst, attr, value): @@ -216,7 +286,7 @@ def optional(validator): @attrs(repr=False, slots=True, hash=True) -class _InValidator(object): +class _InValidator: options = attrib() def __call__(self, inst, attr, value): @@ -229,7 +299,10 @@ def __call__(self, inst, attr, value): raise ValueError( "'{name}' must be in {options!r} (got {value!r})".format( name=attr.name, options=self.options, value=value - ) + ), + attr, + self.options, + value, ) def __repr__(self): @@ -248,16 +321,20 @@ def in_(options): :type options: list, tuple, `enum.Enum`, ... :raises ValueError: With a human readable error message, the attribute (of - type `attr.Attribute`), the expected options, and the value it + type `attrs.Attribute`), the expected options, and the value it got. .. versionadded:: 17.1.0 + .. versionchanged:: 22.1.0 + The ValueError was incomplete until now and only contained the human + readable error message. Now it contains all the information that has + been promised since 17.1.0. """ return _InValidator(options) @attrs(repr=False, slots=False, hash=True) -class _IsCallableValidator(object): +class _IsCallableValidator: def __call__(self, inst, attr, value): """ We use a callable class to be able to change the ``__repr__``. @@ -287,14 +364,14 @@ def is_callable(): .. versionadded:: 19.1.0 :raises `attr.exceptions.NotCallableError`: With a human readable error - message containing the attribute (`attr.Attribute`) name, + message containing the attribute (`attrs.Attribute`) name, and the value it got. """ return _IsCallableValidator() @attrs(repr=False, slots=True, hash=True) -class _DeepIterable(object): +class _DeepIterable: member_validator = attrib(validator=is_callable()) iterable_validator = attrib( default=None, validator=optional(is_callable()) @@ -329,7 +406,7 @@ def deep_iterable(member_validator, iterable_validator=None): """ A validator that performs deep validation of an iterable. - :param member_validator: Validator to apply to iterable members + :param member_validator: Validator(s) to apply to iterable members :param iterable_validator: Validator to apply to iterable itself (optional) @@ -337,11 +414,13 @@ def deep_iterable(member_validator, iterable_validator=None): :raises TypeError: if any sub-validators fail """ + if isinstance(member_validator, (list, tuple)): + member_validator = and_(*member_validator) return _DeepIterable(member_validator, iterable_validator) @attrs(repr=False, slots=True, hash=True) -class _DeepMapping(object): +class _DeepMapping: key_validator = attrib(validator=is_callable()) value_validator = attrib(validator=is_callable()) mapping_validator = attrib(default=None, validator=optional(is_callable())) @@ -377,3 +456,139 @@ def deep_mapping(key_validator, value_validator, mapping_validator=None): :raises TypeError: if any sub-validators fail """ return _DeepMapping(key_validator, value_validator, mapping_validator) + + +@attrs(repr=False, frozen=True, slots=True) +class _NumberValidator: + bound = attrib() + compare_op = attrib() + compare_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.compare_func(value, self.bound): + raise ValueError( + "'{name}' must be {op} {bound}: {value}".format( + name=attr.name, + op=self.compare_op, + bound=self.bound, + value=value, + ) + ) + + def __repr__(self): + return "".format( + op=self.compare_op, bound=self.bound + ) + + +def lt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number larger or equal to *val*. + + :param val: Exclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<", operator.lt) + + +def le(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number greater than *val*. + + :param val: Inclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<=", operator.le) + + +def ge(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller than *val*. + + :param val: Inclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">=", operator.ge) + + +def gt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller or equal to *val*. + + :param val: Exclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">", operator.gt) + + +@attrs(repr=False, frozen=True, slots=True) +class _MaxLengthValidator: + max_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) > self.max_length: + raise ValueError( + "Length of '{name}' must be <= {max}: {len}".format( + name=attr.name, max=self.max_length, len=len(value) + ) + ) + + def __repr__(self): + return "".format(max=self.max_length) + + +def max_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is longer than *length*. + + :param int length: Maximum length of the string or iterable + + .. versionadded:: 21.3.0 + """ + return _MaxLengthValidator(length) + + +@attrs(repr=False, frozen=True, slots=True) +class _MinLengthValidator: + min_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) < self.min_length: + raise ValueError( + "Length of '{name}' must be => {min}: {len}".format( + name=attr.name, min=self.min_length, len=len(value) + ) + ) + + def __repr__(self): + return "".format(min=self.min_length) + + +def min_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is shorter than *length*. + + :param int length: Minimum length of the string or iterable + + .. versionadded:: 22.1.0 + """ + return _MinLengthValidator(length) diff --git a/lib/spack/external/_vendoring/attr/validators.pyi b/lib/spack/external/_vendoring/attr/validators.pyi new file mode 100644 index 00000000000..54b9dba24ef --- /dev/null +++ b/lib/spack/external/_vendoring/attr/validators.pyi @@ -0,0 +1,80 @@ +from typing import ( + Any, + AnyStr, + Callable, + Container, + ContextManager, + Iterable, + List, + Mapping, + Match, + Optional, + Pattern, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +from . import _ValidatorType +from . import _ValidatorArgType + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_I = TypeVar("_I", bound=Iterable) +_K = TypeVar("_K") +_V = TypeVar("_V") +_M = TypeVar("_M", bound=Mapping) + +def set_disabled(run: bool) -> None: ... +def get_disabled() -> bool: ... +def disabled() -> ContextManager[None]: ... + +# To be more precise on instance_of use some overloads. +# If there are more than 3 items in the tuple then we fall back to Any +@overload +def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ... +@overload +def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2]] +) -> _ValidatorType[Union[_T1, _T2]]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2], Type[_T3]] +) -> _ValidatorType[Union[_T1, _T2, _T3]]: ... +@overload +def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ... +def provides(interface: Any) -> _ValidatorType[Any]: ... +def optional( + validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]] +) -> _ValidatorType[Optional[_T]]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... +def matches_re( + regex: Union[Pattern[AnyStr], AnyStr], + flags: int = ..., + func: Optional[ + Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]] + ] = ..., +) -> _ValidatorType[AnyStr]: ... +def deep_iterable( + member_validator: _ValidatorArgType[_T], + iterable_validator: Optional[_ValidatorType[_I]] = ..., +) -> _ValidatorType[_I]: ... +def deep_mapping( + key_validator: _ValidatorType[_K], + value_validator: _ValidatorType[_V], + mapping_validator: Optional[_ValidatorType[_M]] = ..., +) -> _ValidatorType[_M]: ... +def is_callable() -> _ValidatorType[_T]: ... +def lt(val: _T) -> _ValidatorType[_T]: ... +def le(val: _T) -> _ValidatorType[_T]: ... +def ge(val: _T) -> _ValidatorType[_T]: ... +def gt(val: _T) -> _ValidatorType[_T]: ... +def max_len(length: int) -> _ValidatorType[_T]: ... +def min_len(length: int) -> _ValidatorType[_T]: ... diff --git a/lib/spack/external/attr/LICENSE b/lib/spack/external/_vendoring/attrs/LICENSE similarity index 94% rename from lib/spack/external/attr/LICENSE rename to lib/spack/external/_vendoring/attrs/LICENSE index 7ae3df93097..2bd6453d255 100644 --- a/lib/spack/external/attr/LICENSE +++ b/lib/spack/external/_vendoring/attrs/LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2015 Hynek Schlawack +Copyright (c) 2015 Hynek Schlawack and the attrs contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/lib/spack/external/_vendoring/attrs/__init__.py b/lib/spack/external/_vendoring/attrs/__init__.py new file mode 100644 index 00000000000..a704b8b56bc --- /dev/null +++ b/lib/spack/external/_vendoring/attrs/__init__.py @@ -0,0 +1,70 @@ +# SPDX-License-Identifier: MIT + +from attr import ( + NOTHING, + Attribute, + Factory, + __author__, + __copyright__, + __description__, + __doc__, + __email__, + __license__, + __title__, + __url__, + __version__, + __version_info__, + assoc, + cmp_using, + define, + evolve, + field, + fields, + fields_dict, + frozen, + has, + make_class, + mutable, + resolve_types, + validate, +) +from attr._next_gen import asdict, astuple + +from . import converters, exceptions, filters, setters, validators + + +__all__ = [ + "__author__", + "__copyright__", + "__description__", + "__doc__", + "__email__", + "__license__", + "__title__", + "__url__", + "__version__", + "__version_info__", + "asdict", + "assoc", + "astuple", + "Attribute", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "Factory", + "field", + "fields_dict", + "fields", + "filters", + "frozen", + "has", + "make_class", + "mutable", + "NOTHING", + "resolve_types", + "setters", + "validate", + "validators", +] diff --git a/lib/spack/external/_vendoring/attrs/__init__.pyi b/lib/spack/external/_vendoring/attrs/__init__.pyi new file mode 100644 index 00000000000..fc44de46a08 --- /dev/null +++ b/lib/spack/external/_vendoring/attrs/__init__.pyi @@ -0,0 +1,66 @@ +from typing import ( + Any, + Callable, + Dict, + Mapping, + Optional, + Sequence, + Tuple, + Type, +) + +# Because we need to type our own stuff, we have to make everything from +# attr explicitly public too. +from attr import __author__ as __author__ +from attr import __copyright__ as __copyright__ +from attr import __description__ as __description__ +from attr import __email__ as __email__ +from attr import __license__ as __license__ +from attr import __title__ as __title__ +from attr import __url__ as __url__ +from attr import __version__ as __version__ +from attr import __version_info__ as __version_info__ +from attr import _FilterType +from attr import assoc as assoc +from attr import Attribute as Attribute +from attr import cmp_using as cmp_using +from attr import converters as converters +from attr import define as define +from attr import evolve as evolve +from attr import exceptions as exceptions +from attr import Factory as Factory +from attr import field as field +from attr import fields as fields +from attr import fields_dict as fields_dict +from attr import filters as filters +from attr import frozen as frozen +from attr import has as has +from attr import make_class as make_class +from attr import mutable as mutable +from attr import NOTHING as NOTHING +from attr import resolve_types as resolve_types +from attr import setters as setters +from attr import validate as validate +from attr import validators as validators + +# TODO: see definition of attr.asdict/astuple +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: bool = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... diff --git a/lib/spack/external/_vendoring/attrs/converters.py b/lib/spack/external/_vendoring/attrs/converters.py new file mode 100644 index 00000000000..edfa8d3c16a --- /dev/null +++ b/lib/spack/external/_vendoring/attrs/converters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.converters import * # noqa diff --git a/lib/spack/external/_vendoring/attrs/exceptions.py b/lib/spack/external/_vendoring/attrs/exceptions.py new file mode 100644 index 00000000000..bd9efed202a --- /dev/null +++ b/lib/spack/external/_vendoring/attrs/exceptions.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.exceptions import * # noqa diff --git a/lib/spack/external/_vendoring/attrs/filters.py b/lib/spack/external/_vendoring/attrs/filters.py new file mode 100644 index 00000000000..52959005b08 --- /dev/null +++ b/lib/spack/external/_vendoring/attrs/filters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.filters import * # noqa diff --git a/lib/spack/external/_vendoring/attrs/py.typed b/lib/spack/external/_vendoring/attrs/py.typed new file mode 100644 index 00000000000..e69de29bb2d diff --git a/lib/spack/external/_vendoring/attrs/setters.py b/lib/spack/external/_vendoring/attrs/setters.py new file mode 100644 index 00000000000..9b50770804e --- /dev/null +++ b/lib/spack/external/_vendoring/attrs/setters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.setters import * # noqa diff --git a/lib/spack/external/_vendoring/attrs/validators.py b/lib/spack/external/_vendoring/attrs/validators.py new file mode 100644 index 00000000000..ab2c9b30247 --- /dev/null +++ b/lib/spack/external/_vendoring/attrs/validators.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.validators import * # noqa diff --git a/lib/spack/external/_vendoring/distro/LICENSE b/lib/spack/external/_vendoring/distro/LICENSE new file mode 100644 index 00000000000..e06d2081865 --- /dev/null +++ b/lib/spack/external/_vendoring/distro/LICENSE @@ -0,0 +1,202 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/lib/spack/external/_vendoring/distro/__init__.py b/lib/spack/external/_vendoring/distro/__init__.py new file mode 100644 index 00000000000..7686fe85a7c --- /dev/null +++ b/lib/spack/external/_vendoring/distro/__init__.py @@ -0,0 +1,54 @@ +from .distro import ( + NORMALIZED_DISTRO_ID, + NORMALIZED_LSB_ID, + NORMALIZED_OS_ID, + LinuxDistribution, + __version__, + build_number, + codename, + distro_release_attr, + distro_release_info, + id, + info, + like, + linux_distribution, + lsb_release_attr, + lsb_release_info, + major_version, + minor_version, + name, + os_release_attr, + os_release_info, + uname_attr, + uname_info, + version, + version_parts, +) + +__all__ = [ + "NORMALIZED_DISTRO_ID", + "NORMALIZED_LSB_ID", + "NORMALIZED_OS_ID", + "LinuxDistribution", + "build_number", + "codename", + "distro_release_attr", + "distro_release_info", + "id", + "info", + "like", + "linux_distribution", + "lsb_release_attr", + "lsb_release_info", + "major_version", + "minor_version", + "name", + "os_release_attr", + "os_release_info", + "uname_attr", + "uname_info", + "version", + "version_parts", +] + +__version__ = __version__ diff --git a/lib/spack/external/_vendoring/distro/__main__.py b/lib/spack/external/_vendoring/distro/__main__.py new file mode 100644 index 00000000000..0c01d5b08b6 --- /dev/null +++ b/lib/spack/external/_vendoring/distro/__main__.py @@ -0,0 +1,4 @@ +from .distro import main + +if __name__ == "__main__": + main() diff --git a/lib/spack/external/distro.py b/lib/spack/external/_vendoring/distro/distro.py similarity index 79% rename from lib/spack/external/distro.py rename to lib/spack/external/_vendoring/distro/distro.py index 7892741347d..89e18680472 100644 --- a/lib/spack/external/distro.py +++ b/lib/spack/external/_vendoring/distro/distro.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python # Copyright 2015,2016,2017 Nir Cohen # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -36,40 +37,39 @@ import subprocess import sys import warnings +from typing import ( + Any, + Callable, + Dict, + Iterable, + Optional, + Sequence, + TextIO, + Tuple, + Type, +) -__version__ = "1.6.0" +try: + from typing import TypedDict +except ImportError: + # Python 3.7 + TypedDict = dict -# Use `if False` to avoid an ImportError on Python 2. After dropping Python 2 -# support, can use typing.TYPE_CHECKING instead. See: -# https://docs.python.org/3/library/typing.html#typing.TYPE_CHECKING -if False: # pragma: nocover - from typing import ( - Any, - Callable, - Dict, - Iterable, - Optional, - Sequence, - TextIO, - Tuple, - Type, - TypedDict, - Union, - ) +__version__ = "1.8.0" - VersionDict = TypedDict( - "VersionDict", {"major": str, "minor": str, "build_number": str} - ) - InfoDict = TypedDict( - "InfoDict", - { - "id": str, - "version": str, - "version_parts": VersionDict, - "like": str, - "codename": str, - }, - ) + +class VersionDict(TypedDict): + major: str + minor: str + build_number: str + + +class InfoDict(TypedDict): + id: str + version: str + version_parts: VersionDict + like: str + codename: str _UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc") @@ -85,6 +85,7 @@ #: * Value: Normalized value. NORMALIZED_OS_ID = { "ol": "oracle", # Oracle Linux + "opensuse-leap": "opensuse", # Newer versions of OpenSuSE report as opensuse-leap } #: Translation table for normalizing the "Distributor ID" attribute returned by @@ -121,6 +122,26 @@ # Pattern for base file name of distro release file _DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$") +# Base file names to be looked up for if _UNIXCONFDIR is not readable. +_DISTRO_RELEASE_BASENAMES = [ + "SuSE-release", + "arch-release", + "base-release", + "centos-release", + "fedora-release", + "gentoo-release", + "mageia-release", + "mandrake-release", + "mandriva-release", + "mandrivalinux-release", + "manjaro-release", + "oracle-release", + "redhat-release", + "rocky-release", + "sl-release", + "slackware-version", +] + # Base file names to be ignored when searching for distro release file _DISTRO_RELEASE_IGNORE_BASENAMES = ( "debian_version", @@ -133,8 +154,7 @@ ) -def linux_distribution(full_distribution_name=True): - # type: (bool) -> Tuple[str, str, str] +def linux_distribution(full_distribution_name: bool = True) -> Tuple[str, str, str]: """ .. deprecated:: 1.6.0 @@ -151,7 +171,8 @@ def linux_distribution(full_distribution_name=True): * ``version``: The result of :func:`distro.version`. - * ``codename``: The result of :func:`distro.codename`. + * ``codename``: The extra item (usually in parentheses) after the + os-release version number, or the result of :func:`distro.codename`. The interface of this function is compatible with the original :py:func:`platform.linux_distribution` function, supporting a subset of @@ -176,8 +197,7 @@ def linux_distribution(full_distribution_name=True): return _distro.linux_distribution(full_distribution_name) -def id(): - # type: () -> str +def id() -> str: """ Return the distro ID of the current distribution, as a machine-readable string. @@ -198,8 +218,9 @@ def id(): "fedora" Fedora "sles" SUSE Linux Enterprise Server "opensuse" openSUSE - "amazon" Amazon Linux + "amzn" Amazon Linux "arch" Arch Linux + "buildroot" Buildroot "cloudlinux" CloudLinux OS "exherbo" Exherbo Linux "gentoo" GenToo Linux @@ -219,6 +240,9 @@ def id(): "netbsd" NetBSD "freebsd" FreeBSD "midnightbsd" MidnightBSD + "rocky" Rocky Linux + "aix" AIX + "guix" Guix System ============== ========================================= If you have a need to get distros for reliable IDs added into this set, @@ -256,8 +280,7 @@ def id(): return _distro.id() -def name(pretty=False): - # type: (bool) -> str +def name(pretty: bool = False) -> str: """ Return the name of the current OS distribution, as a human-readable string. @@ -296,8 +319,7 @@ def name(pretty=False): return _distro.name(pretty) -def version(pretty=False, best=False): - # type: (bool, bool) -> str +def version(pretty: bool = False, best: bool = False) -> str: """ Return the version of the current OS distribution, as a human-readable string. @@ -313,6 +335,10 @@ def version(pretty=False, best=False): sources in a fixed priority order does not always yield the most precise version (e.g. for Debian 8.2, or CentOS 7.1). + Some other distributions may not provide this kind of information. In these + cases, an empty string would be returned. This behavior can be observed + with rolling releases distributions (e.g. Arch Linux). + The *best* parameter can be used to control the approach for the returned version: @@ -341,8 +367,7 @@ def version(pretty=False, best=False): return _distro.version(pretty, best) -def version_parts(best=False): - # type: (bool) -> Tuple[str, str, str] +def version_parts(best: bool = False) -> Tuple[str, str, str]: """ Return the version of the current OS distribution as a tuple ``(major, minor, build_number)`` with items as follows: @@ -359,8 +384,7 @@ def version_parts(best=False): return _distro.version_parts(best) -def major_version(best=False): - # type: (bool) -> str +def major_version(best: bool = False) -> str: """ Return the major version of the current OS distribution, as a string, if provided. @@ -373,8 +397,7 @@ def major_version(best=False): return _distro.major_version(best) -def minor_version(best=False): - # type: (bool) -> str +def minor_version(best: bool = False) -> str: """ Return the minor version of the current OS distribution, as a string, if provided. @@ -387,8 +410,7 @@ def minor_version(best=False): return _distro.minor_version(best) -def build_number(best=False): - # type: (bool) -> str +def build_number(best: bool = False) -> str: """ Return the build number of the current OS distribution, as a string, if provided. @@ -401,8 +423,7 @@ def build_number(best=False): return _distro.build_number(best) -def like(): - # type: () -> str +def like() -> str: """ Return a space-separated list of distro IDs of distributions that are closely related to the current OS distribution in regards to packaging @@ -419,8 +440,7 @@ def like(): return _distro.like() -def codename(): - # type: () -> str +def codename() -> str: """ Return the codename for the release of the current OS distribution, as a string. @@ -444,8 +464,7 @@ def codename(): return _distro.codename() -def info(pretty=False, best=False): - # type: (bool, bool) -> InfoDict +def info(pretty: bool = False, best: bool = False) -> InfoDict: """ Return certain machine-readable information items about the current OS distribution in a dictionary, as shown in the following example: @@ -489,8 +508,7 @@ def info(pretty=False, best=False): return _distro.info(pretty, best) -def os_release_info(): - # type: () -> Dict[str, str] +def os_release_info() -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the os-release file data source of the current OS distribution. @@ -500,8 +518,7 @@ def os_release_info(): return _distro.os_release_info() -def lsb_release_info(): - # type: () -> Dict[str, str] +def lsb_release_info() -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the lsb_release command data source of the current OS distribution. @@ -512,8 +529,7 @@ def lsb_release_info(): return _distro.lsb_release_info() -def distro_release_info(): - # type: () -> Dict[str, str] +def distro_release_info() -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the distro release file data source of the current OS distribution. @@ -523,8 +539,7 @@ def distro_release_info(): return _distro.distro_release_info() -def uname_info(): - # type: () -> Dict[str, str] +def uname_info() -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the distro release file data source of the current OS distribution. @@ -532,8 +547,7 @@ def uname_info(): return _distro.uname_info() -def os_release_attr(attribute): - # type: (str) -> str +def os_release_attr(attribute: str) -> str: """ Return a single named information item from the os-release file data source of the current OS distribution. @@ -552,8 +566,7 @@ def os_release_attr(attribute): return _distro.os_release_attr(attribute) -def lsb_release_attr(attribute): - # type: (str) -> str +def lsb_release_attr(attribute: str) -> str: """ Return a single named information item from the lsb_release command output data source of the current OS distribution. @@ -573,8 +586,7 @@ def lsb_release_attr(attribute): return _distro.lsb_release_attr(attribute) -def distro_release_attr(attribute): - # type: (str) -> str +def distro_release_attr(attribute: str) -> str: """ Return a single named information item from the distro release file data source of the current OS distribution. @@ -593,8 +605,7 @@ def distro_release_attr(attribute): return _distro.distro_release_attr(attribute) -def uname_attr(attribute): - # type: (str) -> str +def uname_attr(attribute: str) -> str: """ Return a single named information item from the distro release file data source of the current OS distribution. @@ -615,25 +626,23 @@ def uname_attr(attribute): from functools import cached_property except ImportError: # Python < 3.8 - class cached_property(object): # type: ignore + class cached_property: # type: ignore """A version of @property which caches the value. On access, it calls the underlying function and sets the value in `__dict__` so future accesses will not re-call the property. """ - def __init__(self, f): - # type: (Callable[[Any], Any]) -> None + def __init__(self, f: Callable[[Any], Any]) -> None: self._fname = f.__name__ self._f = f - def __get__(self, obj, owner): - # type: (Any, Type[Any]) -> Any - assert obj is not None, "call {} on an instance".format(self._fname) + def __get__(self, obj: Any, owner: Type[Any]) -> Any: + assert obj is not None, f"call {self._fname} on an instance" ret = obj.__dict__[self._fname] = self._f(obj) return ret -class LinuxDistribution(object): +class LinuxDistribution: """ Provides information about a OS distribution. @@ -653,13 +662,13 @@ class LinuxDistribution(object): def __init__( self, - include_lsb=True, - os_release_file="", - distro_release_file="", - include_uname=True, - root_dir=None, - ): - # type: (bool, str, str, bool, Optional[str]) -> None + include_lsb: Optional[bool] = None, + os_release_file: str = "", + distro_release_file: str = "", + include_uname: Optional[bool] = None, + root_dir: Optional[str] = None, + include_oslevel: Optional[bool] = None, + ) -> None: """ The initialization method of this class gathers information from the available data sources, and stores that in private instance attributes. @@ -699,7 +708,13 @@ def __init__( be empty. * ``root_dir`` (string): The absolute path to the root directory to use - to find distro-related information files. + to find distro-related information files. Note that ``include_*`` + parameters must not be enabled in combination with ``root_dir``. + + * ``include_oslevel`` (bool): Controls whether (AIX) oslevel command + output is included as a data source. If the oslevel command is not + available in the program execution path the data source will be + empty. Public instance attributes: @@ -718,14 +733,21 @@ def __init__( parameter. This controls whether the uname information will be loaded. + * ``include_oslevel`` (bool): The result of the ``include_oslevel`` + parameter. This controls whether (AIX) oslevel information will be + loaded. + + * ``root_dir`` (string): The result of the ``root_dir`` parameter. + The absolute path to the root directory to use to find distro-related + information files. + Raises: - * :py:exc:`IOError`: Some I/O issue with an os-release file or distro - release file. + * :py:exc:`ValueError`: Initialization parameters combination is not + supported. - * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had - some issue (other than not being available in the program execution - path). + * :py:exc:`OSError`: Some I/O issue with an os-release file or distro + release file. * :py:exc:`UnicodeError`: A data source has unexpected characters or uses an unexpected encoding. @@ -754,11 +776,24 @@ def __init__( self.os_release_file = usr_lib_os_release_file self.distro_release_file = distro_release_file or "" # updated later - self.include_lsb = include_lsb - self.include_uname = include_uname - def __repr__(self): - # type: () -> str + is_root_dir_defined = root_dir is not None + if is_root_dir_defined and (include_lsb or include_uname or include_oslevel): + raise ValueError( + "Including subprocess data sources from specific root_dir is disallowed" + " to prevent false information" + ) + self.include_lsb = ( + include_lsb if include_lsb is not None else not is_root_dir_defined + ) + self.include_uname = ( + include_uname if include_uname is not None else not is_root_dir_defined + ) + self.include_oslevel = ( + include_oslevel if include_oslevel is not None else not is_root_dir_defined + ) + + def __repr__(self) -> str: """Return repr of all info""" return ( "LinuxDistribution(" @@ -766,14 +801,18 @@ def __repr__(self): "distro_release_file={self.distro_release_file!r}, " "include_lsb={self.include_lsb!r}, " "include_uname={self.include_uname!r}, " + "include_oslevel={self.include_oslevel!r}, " + "root_dir={self.root_dir!r}, " "_os_release_info={self._os_release_info!r}, " "_lsb_release_info={self._lsb_release_info!r}, " "_distro_release_info={self._distro_release_info!r}, " - "_uname_info={self._uname_info!r})".format(self=self) + "_uname_info={self._uname_info!r}, " + "_oslevel_info={self._oslevel_info!r})".format(self=self) ) - def linux_distribution(self, full_distribution_name=True): - # type: (bool) -> Tuple[str, str, str] + def linux_distribution( + self, full_distribution_name: bool = True + ) -> Tuple[str, str, str]: """ Return information about the OS distribution that is compatible with Python's :func:`platform.linux_distribution`, supporting a subset @@ -784,18 +823,16 @@ def linux_distribution(self, full_distribution_name=True): return ( self.name() if full_distribution_name else self.id(), self.version(), - self.codename(), + self._os_release_info.get("release_codename") or self.codename(), ) - def id(self): - # type: () -> str + def id(self) -> str: """Return the distro ID of the OS distribution, as a string. For details, see :func:`distro.id`. """ - def normalize(distro_id, table): - # type: (str, Dict[str, str]) -> str + def normalize(distro_id: str, table: Dict[str, str]) -> str: distro_id = distro_id.lower().replace(" ", "_") return table.get(distro_id, distro_id) @@ -817,8 +854,7 @@ def normalize(distro_id, table): return "" - def name(self, pretty=False): - # type: (bool) -> str + def name(self, pretty: bool = False) -> str: """ Return the name of the OS distribution, as a string. @@ -838,11 +874,10 @@ def name(self, pretty=False): name = self.distro_release_attr("name") or self.uname_attr("name") version = self.version(pretty=True) if version: - name = name + " " + version + name = f"{name} {version}" return name or "" - def version(self, pretty=False, best=False): - # type: (bool, bool) -> str + def version(self, pretty: bool = False, best: bool = False) -> str: """ Return the version of the OS distribution, as a string. @@ -860,6 +895,12 @@ def version(self, pretty=False, best=False): ).get("version_id", ""), self.uname_attr("release"), ] + if self.uname_attr("id").startswith("aix"): + # On AIX platforms, prefer oslevel command output. + versions.insert(0, self.oslevel_info()) + elif self.id() == "debian" or "debian" in self.like().split(): + # On Debian-like, add debian_version file content to candidates list. + versions.append(self._debian_version) version = "" if best: # This algorithm uses the last version in priority order that has @@ -875,11 +916,10 @@ def version(self, pretty=False, best=False): version = v break if pretty and version and self.codename(): - version = "{0} ({1})".format(version, self.codename()) + version = f"{version} ({self.codename()})" return version - def version_parts(self, best=False): - # type: (bool) -> Tuple[str, str, str] + def version_parts(self, best: bool = False) -> Tuple[str, str, str]: """ Return the version of the OS distribution, as a tuple of version numbers. @@ -895,8 +935,7 @@ def version_parts(self, best=False): return major, minor or "", build_number or "" return "", "", "" - def major_version(self, best=False): - # type: (bool) -> str + def major_version(self, best: bool = False) -> str: """ Return the major version number of the current distribution. @@ -904,8 +943,7 @@ def major_version(self, best=False): """ return self.version_parts(best)[0] - def minor_version(self, best=False): - # type: (bool) -> str + def minor_version(self, best: bool = False) -> str: """ Return the minor version number of the current distribution. @@ -913,8 +951,7 @@ def minor_version(self, best=False): """ return self.version_parts(best)[1] - def build_number(self, best=False): - # type: (bool) -> str + def build_number(self, best: bool = False) -> str: """ Return the build number of the current distribution. @@ -922,8 +959,7 @@ def build_number(self, best=False): """ return self.version_parts(best)[2] - def like(self): - # type: () -> str + def like(self) -> str: """ Return the IDs of distributions that are like the OS distribution. @@ -931,8 +967,7 @@ def like(self): """ return self.os_release_attr("id_like") or "" - def codename(self): - # type: () -> str + def codename(self) -> str: """ Return the codename of the OS distribution. @@ -949,8 +984,7 @@ def codename(self): or "" ) - def info(self, pretty=False, best=False): - # type: (bool, bool) -> InfoDict + def info(self, pretty: bool = False, best: bool = False) -> InfoDict: """ Return certain machine-readable information about the OS distribution. @@ -969,8 +1003,7 @@ def info(self, pretty=False, best=False): codename=self.codename(), ) - def os_release_info(self): - # type: () -> Dict[str, str] + def os_release_info(self) -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the os-release file data source of the OS distribution. @@ -979,8 +1012,7 @@ def os_release_info(self): """ return self._os_release_info - def lsb_release_info(self): - # type: () -> Dict[str, str] + def lsb_release_info(self) -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the lsb_release command data source of the OS @@ -990,8 +1022,7 @@ def lsb_release_info(self): """ return self._lsb_release_info - def distro_release_info(self): - # type: () -> Dict[str, str] + def distro_release_info(self) -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the distro release file data source of the OS @@ -1001,8 +1032,7 @@ def distro_release_info(self): """ return self._distro_release_info - def uname_info(self): - # type: () -> Dict[str, str] + def uname_info(self) -> Dict[str, str]: """ Return a dictionary containing key-value pairs for the information items from the uname command data source of the OS distribution. @@ -1011,8 +1041,13 @@ def uname_info(self): """ return self._uname_info - def os_release_attr(self, attribute): - # type: (str) -> str + def oslevel_info(self) -> str: + """ + Return AIX' oslevel command output. + """ + return self._oslevel_info + + def os_release_attr(self, attribute: str) -> str: """ Return a single named information item from the os-release file data source of the OS distribution. @@ -1021,8 +1056,7 @@ def os_release_attr(self, attribute): """ return self._os_release_info.get(attribute, "") - def lsb_release_attr(self, attribute): - # type: (str) -> str + def lsb_release_attr(self, attribute: str) -> str: """ Return a single named information item from the lsb_release command output data source of the OS distribution. @@ -1031,8 +1065,7 @@ def lsb_release_attr(self, attribute): """ return self._lsb_release_info.get(attribute, "") - def distro_release_attr(self, attribute): - # type: (str) -> str + def distro_release_attr(self, attribute: str) -> str: """ Return a single named information item from the distro release file data source of the OS distribution. @@ -1041,8 +1074,7 @@ def distro_release_attr(self, attribute): """ return self._distro_release_info.get(attribute, "") - def uname_attr(self, attribute): - # type: (str) -> str + def uname_attr(self, attribute: str) -> str: """ Return a single named information item from the uname command output data source of the OS distribution. @@ -1052,8 +1084,7 @@ def uname_attr(self, attribute): return self._uname_info.get(attribute, "") @cached_property - def _os_release_info(self): - # type: () -> Dict[str, str] + def _os_release_info(self) -> Dict[str, str]: """ Get the information items from the specified os-release file. @@ -1061,13 +1092,12 @@ def _os_release_info(self): A dictionary containing all information items. """ if os.path.isfile(self.os_release_file): - with open(self.os_release_file) as release_file: + with open(self.os_release_file, encoding="utf-8") as release_file: return self._parse_os_release_content(release_file) return {} @staticmethod - def _parse_os_release_content(lines): - # type: (TextIO) -> Dict[str, str] + def _parse_os_release_content(lines: TextIO) -> Dict[str, str]: """ Parse the lines of an os-release file. @@ -1084,16 +1114,6 @@ def _parse_os_release_content(lines): lexer = shlex.shlex(lines, posix=True) lexer.whitespace_split = True - # The shlex module defines its `wordchars` variable using literals, - # making it dependent on the encoding of the Python source file. - # In Python 2.6 and 2.7, the shlex source file is encoded in - # 'iso-8859-1', and the `wordchars` variable is defined as a byte - # string. This causes a UnicodeDecodeError to be raised when the - # parsed content is a unicode object. The following fix resolves that - # (... but it should be fixed in shlex...): - if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes): - lexer.wordchars = lexer.wordchars.decode("iso-8859-1") - tokens = list(lexer) for token in tokens: # At this point, all shell-like parsing has been done (i.e. @@ -1102,12 +1122,17 @@ def _parse_os_release_content(lines): # stripped, etc.), so the tokens are now either: # * variable assignments: var=value # * commands or their arguments (not allowed in os-release) + # Ignore any tokens that are not variable assignments if "=" in token: k, v = token.split("=", 1) props[k.lower()] = v - else: - # Ignore any tokens that are not variable assignments - pass + + if "version" in props: + # extract release codename (if any) from version attribute + match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"]) + if match: + release_codename = match.group(1) or match.group(2) + props["codename"] = props["release_codename"] = release_codename if "version_codename" in props: # os-release added a version_codename field. Use that in @@ -1118,22 +1143,11 @@ def _parse_os_release_content(lines): elif "ubuntu_codename" in props: # Same as above but a non-standard field name used on older Ubuntus props["codename"] = props["ubuntu_codename"] - elif "version" in props: - # If there is no version_codename, parse it from the version - match = re.search(r"(\(\D+\))|,(\s+)?\D+", props["version"]) - if match: - codename = match.group() - codename = codename.strip("()") - codename = codename.strip(",") - codename = codename.strip() - # codename appears within paranthese. - props["codename"] = codename return props @cached_property - def _lsb_release_info(self): - # type: () -> Dict[str, str] + def _lsb_release_info(self) -> Dict[str, str]: """ Get the information items from the lsb_release command output. @@ -1142,19 +1156,17 @@ def _lsb_release_info(self): """ if not self.include_lsb: return {} - with open(os.devnull, "wb") as devnull: - try: - cmd = ("lsb_release", "-a") - stdout = subprocess.check_output(cmd, stderr=devnull) - # Command not found or lsb_release returned error - except (OSError, subprocess.CalledProcessError): - return {} + try: + cmd = ("lsb_release", "-a") + stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) + # Command not found or lsb_release returned error + except (OSError, subprocess.CalledProcessError): + return {} content = self._to_str(stdout).splitlines() return self._parse_lsb_release_content(content) @staticmethod - def _parse_lsb_release_content(lines): - # type: (Iterable[str]) -> Dict[str, str] + def _parse_lsb_release_content(lines: Iterable[str]) -> Dict[str, str]: """ Parse the output of the lsb_release command. @@ -1178,20 +1190,41 @@ def _parse_lsb_release_content(lines): return props @cached_property - def _uname_info(self): - # type: () -> Dict[str, str] - with open(os.devnull, "wb") as devnull: - try: - cmd = ("uname", "-rs") - stdout = subprocess.check_output(cmd, stderr=devnull) - except OSError: - return {} + def _uname_info(self) -> Dict[str, str]: + if not self.include_uname: + return {} + try: + cmd = ("uname", "-rs") + stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) + except OSError: + return {} content = self._to_str(stdout).splitlines() return self._parse_uname_content(content) + @cached_property + def _oslevel_info(self) -> str: + if not self.include_oslevel: + return "" + try: + stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL) + except (OSError, subprocess.CalledProcessError): + return "" + return self._to_str(stdout).strip() + + @cached_property + def _debian_version(self) -> str: + try: + with open( + os.path.join(self.etc_dir, "debian_version"), encoding="ascii" + ) as fp: + return fp.readline().rstrip() + except FileNotFoundError: + return "" + @staticmethod - def _parse_uname_content(lines): - # type: (Sequence[str]) -> Dict[str, str] + def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]: + if not lines: + return {} props = {} match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip()) if match: @@ -1208,23 +1241,12 @@ def _parse_uname_content(lines): return props @staticmethod - def _to_str(text): - # type: (Union[bytes, str]) -> str + def _to_str(bytestring: bytes) -> str: encoding = sys.getfilesystemencoding() - encoding = "utf-8" if encoding == "ascii" else encoding - - if sys.version_info[0] >= 3: - if isinstance(text, bytes): - return text.decode(encoding) - else: - if isinstance(text, unicode): # noqa - return text.encode(encoding) - - return text + return bytestring.decode(encoding) @cached_property - def _distro_release_info(self): - # type: () -> Dict[str, str] + def _distro_release_info(self) -> Dict[str, str]: """ Get the information items from the specified distro release file. @@ -1241,14 +1263,14 @@ def _distro_release_info(self): # file), because we want to use what was specified as best as # possible. match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) - if "name" in distro_info and "cloudlinux" in distro_info["name"].lower(): - distro_info["id"] = "cloudlinux" - elif match: - distro_info["id"] = match.group(1) - return distro_info else: try: - basenames = os.listdir(self.etc_dir) + basenames = [ + basename + for basename in os.listdir(self.etc_dir) + if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES + and os.path.isfile(os.path.join(self.etc_dir, basename)) + ] # We sort for repeatability in cases where there are multiple # distro specific files; e.g. CentOS, Oracle, Enterprise all # containing `redhat-release` on top of their own. @@ -1258,41 +1280,31 @@ def _distro_release_info(self): # sure about the *-release files. Check common entries of # /etc for information. If they turn out to not be there the # error is handled in `_parse_distro_release_file()`. - basenames = [ - "SuSE-release", - "arch-release", - "base-release", - "centos-release", - "fedora-release", - "gentoo-release", - "mageia-release", - "mandrake-release", - "mandriva-release", - "mandrivalinux-release", - "manjaro-release", - "oracle-release", - "redhat-release", - "sl-release", - "slackware-version", - ] + basenames = _DISTRO_RELEASE_BASENAMES for basename in basenames: - if basename in _DISTRO_RELEASE_IGNORE_BASENAMES: - continue match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) - if match: - filepath = os.path.join(self.etc_dir, basename) - distro_info = self._parse_distro_release_file(filepath) - if "name" in distro_info: - # The name is always present if the pattern matches - self.distro_release_file = filepath - distro_info["id"] = match.group(1) - if "cloudlinux" in distro_info["name"].lower(): - distro_info["id"] = "cloudlinux" - return distro_info - return {} + if match is None: + continue + filepath = os.path.join(self.etc_dir, basename) + distro_info = self._parse_distro_release_file(filepath) + # The name is always present if the pattern matches. + if "name" not in distro_info: + continue + self.distro_release_file = filepath + break + else: # the loop didn't "break": no candidate. + return {} - def _parse_distro_release_file(self, filepath): - # type: (str) -> Dict[str, str] + if match is not None: + distro_info["id"] = match.group(1) + + # CloudLinux < 7: manually enrich info with proper id. + if "cloudlinux" in distro_info.get("name", "").lower(): + distro_info["id"] = "cloudlinux" + + return distro_info + + def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]: """ Parse a distro release file. @@ -1304,19 +1316,18 @@ def _parse_distro_release_file(self, filepath): A dictionary containing all information items. """ try: - with open(filepath) as fp: + with open(filepath, encoding="utf-8") as fp: # Only parse the first line. For instance, on SLES there # are multiple lines. We don't want them... return self._parse_distro_release_content(fp.readline()) - except (OSError, IOError): + except OSError: # Ignore not being able to read a specific, seemingly version # related file. # See https://github.com/python-distro/distro/issues/162 return {} @staticmethod - def _parse_distro_release_content(line): - # type: (str) -> Dict[str, str] + def _parse_distro_release_content(line: str) -> Dict[str, str]: """ Parse a line from a distro release file. @@ -1344,8 +1355,7 @@ def _parse_distro_release_content(line): _distro = LinuxDistribution() -def main(): - # type: () -> None +def main() -> None: logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) logger.addHandler(logging.StreamHandler(sys.stdout)) @@ -1367,7 +1377,10 @@ def main(): if args.root_dir: dist = LinuxDistribution( - include_lsb=False, include_uname=False, root_dir=args.root_dir + include_lsb=False, + include_uname=False, + include_oslevel=False, + root_dir=args.root_dir, ) else: dist = _distro diff --git a/lib/spack/external/_vendoring/distro/py.typed b/lib/spack/external/_vendoring/distro/py.typed new file mode 100644 index 00000000000..e69de29bb2d diff --git a/lib/spack/external/jinja2/LICENSE.rst b/lib/spack/external/_vendoring/jinja2/LICENSE.rst similarity index 100% rename from lib/spack/external/jinja2/LICENSE.rst rename to lib/spack/external/_vendoring/jinja2/LICENSE.rst diff --git a/lib/spack/external/_vendoring/jinja2/__init__.py b/lib/spack/external/_vendoring/jinja2/__init__.py new file mode 100644 index 00000000000..9dcd901a51f --- /dev/null +++ b/lib/spack/external/_vendoring/jinja2/__init__.py @@ -0,0 +1,45 @@ +"""Jinja is a template engine written in pure Python. It provides a +non-XML syntax that supports inline expressions and an optional +sandboxed environment. +""" +from .bccache import BytecodeCache as BytecodeCache +from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache +from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache +from .environment import Environment as Environment +from .environment import Template as Template +from .exceptions import TemplateAssertionError as TemplateAssertionError +from .exceptions import TemplateError as TemplateError +from .exceptions import TemplateNotFound as TemplateNotFound +from .exceptions import TemplateRuntimeError as TemplateRuntimeError +from .exceptions import TemplatesNotFound as TemplatesNotFound +from .exceptions import TemplateSyntaxError as TemplateSyntaxError +from .exceptions import UndefinedError as UndefinedError +from .filters import contextfilter +from .filters import environmentfilter +from .filters import evalcontextfilter +from .loaders import BaseLoader as BaseLoader +from .loaders import ChoiceLoader as ChoiceLoader +from .loaders import DictLoader as DictLoader +from .loaders import FileSystemLoader as FileSystemLoader +from .loaders import FunctionLoader as FunctionLoader +from .loaders import ModuleLoader as ModuleLoader +from .loaders import PackageLoader as PackageLoader +from .loaders import PrefixLoader as PrefixLoader +from .runtime import ChainableUndefined as ChainableUndefined +from .runtime import DebugUndefined as DebugUndefined +from .runtime import make_logging_undefined as make_logging_undefined +from .runtime import StrictUndefined as StrictUndefined +from .runtime import Undefined as Undefined +from .utils import clear_caches as clear_caches +from .utils import contextfunction +from .utils import environmentfunction +from .utils import escape +from .utils import evalcontextfunction +from .utils import is_undefined as is_undefined +from .utils import Markup +from .utils import pass_context as pass_context +from .utils import pass_environment as pass_environment +from .utils import pass_eval_context as pass_eval_context +from .utils import select_autoescape as select_autoescape + +__version__ = "3.0.3" diff --git a/lib/spack/external/jinja2/_identifier.py b/lib/spack/external/_vendoring/jinja2/_identifier.py similarity index 100% rename from lib/spack/external/jinja2/_identifier.py rename to lib/spack/external/_vendoring/jinja2/_identifier.py diff --git a/lib/spack/external/_vendoring/jinja2/async_utils.py b/lib/spack/external/_vendoring/jinja2/async_utils.py new file mode 100644 index 00000000000..35e6cb10902 --- /dev/null +++ b/lib/spack/external/_vendoring/jinja2/async_utils.py @@ -0,0 +1,75 @@ +import inspect +import typing as t +from functools import wraps + +from .utils import _PassArg +from .utils import pass_eval_context + +V = t.TypeVar("V") + + +def async_variant(normal_func): # type: ignore + def decorator(async_func): # type: ignore + pass_arg = _PassArg.from_obj(normal_func) + need_eval_context = pass_arg is None + + if pass_arg is _PassArg.environment: + + def is_async(args: t.Any) -> bool: + return t.cast(bool, args[0].is_async) + + else: + + def is_async(args: t.Any) -> bool: + return t.cast(bool, args[0].environment.is_async) + + @wraps(normal_func) + def wrapper(*args, **kwargs): # type: ignore + b = is_async(args) + + if need_eval_context: + args = args[1:] + + if b: + return async_func(*args, **kwargs) + + return normal_func(*args, **kwargs) + + if need_eval_context: + wrapper = pass_eval_context(wrapper) + + wrapper.jinja_async_variant = True + return wrapper + + return decorator + + +_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)} + + +async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V": + # Avoid a costly call to isawaitable + if type(value) in _common_primitives: + return t.cast("V", value) + + if inspect.isawaitable(value): + return await t.cast("t.Awaitable[V]", value) + + return t.cast("V", value) + + +async def auto_aiter( + iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", +) -> "t.AsyncIterator[V]": + if hasattr(iterable, "__aiter__"): + async for item in t.cast("t.AsyncIterable[V]", iterable): + yield item + else: + for item in t.cast("t.Iterable[V]", iterable): + yield item + + +async def auto_to_list( + value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", +) -> t.List["V"]: + return [x async for x in auto_aiter(value)] diff --git a/lib/spack/external/jinja2/bccache.py b/lib/spack/external/_vendoring/jinja2/bccache.py similarity index 76% rename from lib/spack/external/jinja2/bccache.py rename to lib/spack/external/_vendoring/jinja2/bccache.py index 9c0661030f7..3bb61b7c34c 100644 --- a/lib/spack/external/jinja2/bccache.py +++ b/lib/spack/external/_vendoring/jinja2/bccache.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """The optional bytecode cache system. This is useful if you have very complex template situations and the compilation of all those templates slows down your application too much. @@ -8,22 +7,30 @@ """ import errno import fnmatch +import marshal import os +import pickle import stat import sys import tempfile +import typing as t from hashlib import sha1 -from os import listdir -from os import path +from io import BytesIO +from types import CodeType -from ._compat import BytesIO -from ._compat import marshal_dump -from ._compat import marshal_load -from ._compat import pickle -from ._compat import text_type -from .utils import open_if_exists +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment -bc_version = 4 + class _MemcachedClient(te.Protocol): + def get(self, key: str) -> bytes: + ... + + def set(self, key: str, value: bytes, timeout: t.Optional[int] = None) -> None: + ... + + +bc_version = 5 # Magic bytes to identify Jinja bytecode cache files. Contains the # Python major and minor version to avoid loading incompatible bytecode # if a project upgrades its Python version. @@ -34,7 +41,7 @@ ) -class Bucket(object): +class Bucket: """Buckets are used to store the bytecode for one template. It's created and initialized by the bytecode cache and passed to the loading functions. @@ -43,17 +50,17 @@ class Bucket(object): cache subclasses don't have to care about cache invalidation. """ - def __init__(self, environment, key, checksum): + def __init__(self, environment: "Environment", key: str, checksum: str) -> None: self.environment = environment self.key = key self.checksum = checksum self.reset() - def reset(self): + def reset(self) -> None: """Resets the bucket (unloads the bytecode).""" - self.code = None + self.code: t.Optional[CodeType] = None - def load_bytecode(self, f): + def load_bytecode(self, f: t.BinaryIO) -> None: """Loads bytecode from a file or file like object.""" # make sure the magic header is correct magic = f.read(len(bc_magic)) @@ -67,31 +74,31 @@ def load_bytecode(self, f): return # if marshal_load fails then we need to reload try: - self.code = marshal_load(f) + self.code = marshal.load(f) except (EOFError, ValueError, TypeError): self.reset() return - def write_bytecode(self, f): + def write_bytecode(self, f: t.BinaryIO) -> None: """Dump the bytecode into the file or file like object passed.""" if self.code is None: raise TypeError("can't write empty bucket") f.write(bc_magic) pickle.dump(self.checksum, f, 2) - marshal_dump(self.code, f) + marshal.dump(self.code, f) - def bytecode_from_string(self, string): - """Load bytecode from a string.""" + def bytecode_from_string(self, string: bytes) -> None: + """Load bytecode from bytes.""" self.load_bytecode(BytesIO(string)) - def bytecode_to_string(self): - """Return the bytecode as string.""" + def bytecode_to_string(self) -> bytes: + """Return the bytecode as bytes.""" out = BytesIO() self.write_bytecode(out) return out.getvalue() -class BytecodeCache(object): +class BytecodeCache: """To implement your own bytecode cache you have to subclass this class and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of these methods are passed a :class:`~jinja2.bccache.Bucket`. @@ -120,41 +127,48 @@ def dump_bytecode(self, bucket): Jinja. """ - def load_bytecode(self, bucket): + def load_bytecode(self, bucket: Bucket) -> None: """Subclasses have to override this method to load bytecode into a bucket. If they are not able to find code in the cache for the bucket, it must not do anything. """ raise NotImplementedError() - def dump_bytecode(self, bucket): + def dump_bytecode(self, bucket: Bucket) -> None: """Subclasses have to override this method to write the bytecode from a bucket back to the cache. If it unable to do so it must not fail silently but raise an exception. """ raise NotImplementedError() - def clear(self): + def clear(self) -> None: """Clears the cache. This method is not used by Jinja but should be implemented to allow applications to clear the bytecode cache used by a particular environment. """ - def get_cache_key(self, name, filename=None): + def get_cache_key( + self, name: str, filename: t.Optional[t.Union[str]] = None + ) -> str: """Returns the unique hash key for this template name.""" hash = sha1(name.encode("utf-8")) + if filename is not None: - filename = "|" + filename - if isinstance(filename, text_type): - filename = filename.encode("utf-8") - hash.update(filename) + hash.update(f"|{filename}".encode()) + return hash.hexdigest() - def get_source_checksum(self, source): + def get_source_checksum(self, source: str) -> str: """Returns a checksum for the source.""" return sha1(source.encode("utf-8")).hexdigest() - def get_bucket(self, environment, name, filename, source): + def get_bucket( + self, + environment: "Environment", + name: str, + filename: t.Optional[str], + source: str, + ) -> Bucket: """Return a cache bucket for the given template. All arguments are mandatory but filename may be `None`. """ @@ -164,7 +178,7 @@ def get_bucket(self, environment, name, filename, source): self.load_bytecode(bucket) return bucket - def set_bucket(self, bucket): + def set_bucket(self, bucket: Bucket) -> None: """Put the bucket into the cache.""" self.dump_bytecode(bucket) @@ -187,14 +201,16 @@ class FileSystemBytecodeCache(BytecodeCache): This bytecode cache supports clearing of the cache using the clear method. """ - def __init__(self, directory=None, pattern="__jinja2_%s.cache"): + def __init__( + self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache" + ) -> None: if directory is None: directory = self._get_default_cache_dir() self.directory = directory self.pattern = pattern - def _get_default_cache_dir(self): - def _unsafe_dir(): + def _get_default_cache_dir(self) -> str: + def _unsafe_dir() -> "te.NoReturn": raise RuntimeError( "Cannot determine safe temp directory. You " "need to explicitly provide one." @@ -209,7 +225,7 @@ def _unsafe_dir(): if not hasattr(os, "getuid"): _unsafe_dir() - dirname = "_jinja2-cache-%d" % os.getuid() + dirname = f"_jinja2-cache-{os.getuid()}" actual_dir = os.path.join(tmpdir, dirname) try: @@ -240,34 +256,30 @@ def _unsafe_dir(): return actual_dir - def _get_cache_filename(self, bucket): - return path.join(self.directory, self.pattern % bucket.key) + def _get_cache_filename(self, bucket: Bucket) -> str: + return os.path.join(self.directory, self.pattern % (bucket.key,)) - def load_bytecode(self, bucket): - f = open_if_exists(self._get_cache_filename(bucket), "rb") - if f is not None: - try: + def load_bytecode(self, bucket: Bucket) -> None: + filename = self._get_cache_filename(bucket) + + if os.path.exists(filename): + with open(filename, "rb") as f: bucket.load_bytecode(f) - finally: - f.close() - def dump_bytecode(self, bucket): - f = open(self._get_cache_filename(bucket), "wb") - try: + def dump_bytecode(self, bucket: Bucket) -> None: + with open(self._get_cache_filename(bucket), "wb") as f: bucket.write_bytecode(f) - finally: - f.close() - def clear(self): + def clear(self) -> None: # imported lazily here because google app-engine doesn't support # write access on the file system and the function does not exist # normally. from os import remove - files = fnmatch.filter(listdir(self.directory), self.pattern % "*") + files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",)) for filename in files: try: - remove(path.join(self.directory, filename)) + remove(os.path.join(self.directory, filename)) except OSError: pass @@ -284,7 +296,7 @@ class MemcachedBytecodeCache(BytecodeCache): - `python-memcached `_ (Unfortunately the django cache interface is not compatible because it - does not support storing binary data, only unicode. You can however pass + does not support storing binary data, only text. You can however pass the underlying cache client to the bytecode cache which is available as `django.core.cache.cache._client`.) @@ -319,32 +331,34 @@ class MemcachedBytecodeCache(BytecodeCache): def __init__( self, - client, - prefix="jinja2/bytecode/", - timeout=None, - ignore_memcache_errors=True, + client: "_MemcachedClient", + prefix: str = "jinja2/bytecode/", + timeout: t.Optional[int] = None, + ignore_memcache_errors: bool = True, ): self.client = client self.prefix = prefix self.timeout = timeout self.ignore_memcache_errors = ignore_memcache_errors - def load_bytecode(self, bucket): + def load_bytecode(self, bucket: Bucket) -> None: try: code = self.client.get(self.prefix + bucket.key) except Exception: if not self.ignore_memcache_errors: raise - code = None - if code is not None: + else: bucket.bytecode_from_string(code) - def dump_bytecode(self, bucket): - args = (self.prefix + bucket.key, bucket.bytecode_to_string()) - if self.timeout is not None: - args += (self.timeout,) + def dump_bytecode(self, bucket: Bucket) -> None: + key = self.prefix + bucket.key + value = bucket.bytecode_to_string() + try: - self.client.set(*args) + if self.timeout is not None: + self.client.set(key, value, self.timeout) + else: + self.client.set(key, value) except Exception: if not self.ignore_memcache_errors: raise diff --git a/lib/spack/external/jinja2/compiler.py b/lib/spack/external/_vendoring/jinja2/compiler.py similarity index 59% rename from lib/spack/external/jinja2/compiler.py rename to lib/spack/external/_vendoring/jinja2/compiler.py index 63297b42c30..52fd5b83e20 100644 --- a/lib/spack/external/jinja2/compiler.py +++ b/lib/spack/external/_vendoring/jinja2/compiler.py @@ -1,7 +1,8 @@ -# -*- coding: utf-8 -*- """Compiles nodes from the parser into Python code.""" -from collections import namedtuple +import typing as t +from contextlib import contextmanager from functools import update_wrapper +from io import StringIO from itertools import chain from keyword import iskeyword as is_python_keyword @@ -9,13 +10,6 @@ from markupsafe import Markup from . import nodes -from ._compat import imap -from ._compat import iteritems -from ._compat import izip -from ._compat import NativeStringIO -from ._compat import range_type -from ._compat import string_types -from ._compat import text_type from .exceptions import TemplateAssertionError from .idtracking import Symbols from .idtracking import VAR_LOAD_ALIAS @@ -24,9 +18,16 @@ from .idtracking import VAR_LOAD_UNDEFINED from .nodes import EvalContext from .optimizer import Optimizer +from .utils import _PassArg from .utils import concat from .visitor import NodeVisitor +if t.TYPE_CHECKING: + import typing_extensions as te + from .environment import Environment + +F = t.TypeVar("F", bound=t.Callable[..., t.Any]) + operators = { "eq": "==", "ne": "!=", @@ -38,79 +39,109 @@ "notin": "not in", } -# what method to iterate over items do we want to use for dict iteration -# in generated code? on 2.x let's go with iteritems, on 3.x with items -if hasattr(dict, "iteritems"): - dict_item_iter = "iteritems" -else: - dict_item_iter = "items" -code_features = ["division"] - -# does this python version support generator stops? (PEP 0479) -try: - exec("from __future__ import generator_stop") - code_features.append("generator_stop") -except SyntaxError: - pass - -# does this python version support yield from? -try: - exec("def f(): yield from x()") -except SyntaxError: - supports_yield_from = False -else: - supports_yield_from = True - - -def optimizeconst(f): - def new_func(self, node, frame, **kwargs): +def optimizeconst(f: F) -> F: + def new_func( + self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any + ) -> t.Any: # Only optimize if the frame is not volatile - if self.optimized and not frame.eval_ctx.volatile: + if self.optimizer is not None and not frame.eval_ctx.volatile: new_node = self.optimizer.visit(node, frame.eval_ctx) + if new_node != node: return self.visit(new_node, frame) + return f(self, node, frame, **kwargs) - return update_wrapper(new_func, f) + return update_wrapper(t.cast(F, new_func), f) + + +def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]: + @optimizeconst + def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None: + if ( + self.environment.sandboxed + and op in self.environment.intercepted_binops # type: ignore + ): + self.write(f"environment.call_binop(context, {op!r}, ") + self.visit(node.left, frame) + self.write(", ") + self.visit(node.right, frame) + else: + self.write("(") + self.visit(node.left, frame) + self.write(f" {op} ") + self.visit(node.right, frame) + + self.write(")") + + return visitor + + +def _make_unop( + op: str, +) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]: + @optimizeconst + def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None: + if ( + self.environment.sandboxed + and op in self.environment.intercepted_unops # type: ignore + ): + self.write(f"environment.call_unop(context, {op!r}, ") + self.visit(node.node, frame) + else: + self.write("(" + op) + self.visit(node.node, frame) + + self.write(")") + + return visitor def generate( - node, environment, name, filename, stream=None, defer_init=False, optimized=True -): + node: nodes.Template, + environment: "Environment", + name: t.Optional[str], + filename: t.Optional[str], + stream: t.Optional[t.TextIO] = None, + defer_init: bool = False, + optimized: bool = True, +) -> t.Optional[str]: """Generate the python source for a node tree.""" if not isinstance(node, nodes.Template): raise TypeError("Can't compile non template nodes") + generator = environment.code_generator_class( environment, name, filename, stream, defer_init, optimized ) generator.visit(node) + if stream is None: - return generator.stream.getvalue() + return generator.stream.getvalue() # type: ignore + + return None -def has_safe_repr(value): +def has_safe_repr(value: t.Any) -> bool: """Does the node have a safe representation?""" if value is None or value is NotImplemented or value is Ellipsis: return True - if type(value) in (bool, int, float, complex, range_type, Markup) + string_types: - return True - if type(value) in (tuple, list, set, frozenset): - for item in value: - if not has_safe_repr(item): - return False - return True - elif type(value) is dict: - for key, value in iteritems(value): - if not has_safe_repr(key): - return False - if not has_safe_repr(value): - return False + + if type(value) in {bool, int, float, complex, range, str, Markup}: return True + + if type(value) in {tuple, list, set, frozenset}: + return all(has_safe_repr(v) for v in value) + + if type(value) is dict: + return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items()) + return False -def find_undeclared(nodes, names): +def find_undeclared( + nodes: t.Iterable[nodes.Node], names: t.Iterable[str] +) -> t.Set[str]: """Check if the names passed are accessed undeclared. The return value is a set of all the undeclared names from the sequence of names found. """ @@ -123,20 +154,49 @@ def find_undeclared(nodes, names): return visitor.undeclared -class MacroRef(object): - def __init__(self, node): +class MacroRef: + def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None: self.node = node self.accesses_caller = False self.accesses_kwargs = False self.accesses_varargs = False -class Frame(object): +class Frame: """Holds compile time information for us.""" - def __init__(self, eval_ctx, parent=None, level=None): + def __init__( + self, + eval_ctx: EvalContext, + parent: t.Optional["Frame"] = None, + level: t.Optional[int] = None, + ) -> None: self.eval_ctx = eval_ctx - self.symbols = Symbols(parent and parent.symbols or None, level=level) + + # the parent of this frame + self.parent = parent + + if parent is None: + self.symbols = Symbols(level=level) + + # in some dynamic inheritance situations the compiler needs to add + # write tests around output statements. + self.require_output_check = False + + # inside some tags we are using a buffer rather than yield statements. + # this for example affects {% filter %} or {% macro %}. If a frame + # is buffered this variable points to the name of the list used as + # buffer. + self.buffer: t.Optional[str] = None + + # the name of the block we're in, otherwise None. + self.block: t.Optional[str] = None + + else: + self.symbols = Symbols(parent.symbols, level=level) + self.require_output_check = parent.require_output_check + self.buffer = parent.buffer + self.block = parent.block # a toplevel frame is the root + soft frames such as if conditions. self.toplevel = False @@ -146,47 +206,40 @@ def __init__(self, eval_ctx, parent=None, level=None): # situations. self.rootlevel = False - # in some dynamic inheritance situations the compiler needs to add - # write tests around output statements. - self.require_output_check = parent and parent.require_output_check + # variables set inside of loops and blocks should not affect outer frames, + # but they still needs to be kept track of as part of the active context. + self.loop_frame = False + self.block_frame = False - # inside some tags we are using a buffer rather than yield statements. - # this for example affects {% filter %} or {% macro %}. If a frame - # is buffered this variable points to the name of the list used as - # buffer. - self.buffer = None + # track whether the frame is being used in an if-statement or conditional + # expression as it determines which errors should be raised during runtime + # or compile time. + self.soft_frame = False - # the name of the block we're in, otherwise None. - self.block = parent and parent.block or None - - # the parent of this frame - self.parent = parent - - if parent is not None: - self.buffer = parent.buffer - - def copy(self): + def copy(self) -> "Frame": """Create a copy of the current one.""" - rv = object.__new__(self.__class__) + rv = t.cast(Frame, object.__new__(self.__class__)) rv.__dict__.update(self.__dict__) rv.symbols = self.symbols.copy() return rv - def inner(self, isolated=False): + def inner(self, isolated: bool = False) -> "Frame": """Return an inner frame.""" if isolated: return Frame(self.eval_ctx, level=self.symbols.level + 1) return Frame(self.eval_ctx, self) - def soft(self): + def soft(self) -> "Frame": """Return a soft frame. A soft frame may not be modified as standalone thing as it shares the resources with the frame it was created of, but it's not a rootlevel frame any longer. - This is only used to implement if-statements. + This is only used to implement if-statements and conditional + expressions. """ rv = self.copy() rv.rootlevel = False + rv.soft_frame = True return rv __copy__ = copy @@ -199,19 +252,19 @@ class VisitorExit(RuntimeError): class DependencyFinderVisitor(NodeVisitor): """A visitor that collects filter and test calls.""" - def __init__(self): - self.filters = set() - self.tests = set() + def __init__(self) -> None: + self.filters: t.Set[str] = set() + self.tests: t.Set[str] = set() - def visit_Filter(self, node): + def visit_Filter(self, node: nodes.Filter) -> None: self.generic_visit(node) self.filters.add(node.name) - def visit_Test(self, node): + def visit_Test(self, node: nodes.Test) -> None: self.generic_visit(node) self.tests.add(node.name) - def visit_Block(self, node): + def visit_Block(self, node: nodes.Block) -> None: """Stop visiting at blocks.""" @@ -221,11 +274,11 @@ class UndeclaredNameVisitor(NodeVisitor): not stop at closure frames. """ - def __init__(self, names): + def __init__(self, names: t.Iterable[str]) -> None: self.names = set(names) - self.undeclared = set() + self.undeclared: t.Set[str] = set() - def visit_Name(self, node): + def visit_Name(self, node: nodes.Name) -> None: if node.ctx == "load" and node.name in self.names: self.undeclared.add(node.name) if self.undeclared == self.names: @@ -233,7 +286,7 @@ def visit_Name(self, node): else: self.names.discard(node.name) - def visit_Block(self, node): + def visit_Block(self, node: nodes.Block) -> None: """Stop visiting a blocks.""" @@ -246,26 +299,33 @@ class CompilerExit(Exception): class CodeGenerator(NodeVisitor): def __init__( - self, environment, name, filename, stream=None, defer_init=False, optimized=True - ): + self, + environment: "Environment", + name: t.Optional[str], + filename: t.Optional[str], + stream: t.Optional[t.TextIO] = None, + defer_init: bool = False, + optimized: bool = True, + ) -> None: if stream is None: - stream = NativeStringIO() + stream = StringIO() self.environment = environment self.name = name self.filename = filename self.stream = stream self.created_block_context = False self.defer_init = defer_init - self.optimized = optimized + self.optimizer: t.Optional[Optimizer] = None + if optimized: self.optimizer = Optimizer(environment) # aliases for imports - self.import_aliases = {} + self.import_aliases: t.Dict[str, str] = {} # a registry for all blocks. Because blocks are moved out # into the global python scope they are registered here - self.blocks = {} + self.blocks: t.Dict[str, nodes.Block] = {} # the number of extends statements so far self.extends_so_far = 0 @@ -279,12 +339,12 @@ def __init__( self.code_lineno = 1 # registry of all filters and tests (global, not block local) - self.tests = {} - self.filters = {} + self.tests: t.Dict[str, str] = {} + self.filters: t.Dict[str, str] = {} # the debug information - self.debug_info = [] - self._write_debug_info = None + self.debug_info: t.List[t.Tuple[int, int]] = [] + self._write_debug_info: t.Optional[int] = None # the number of new lines before the next write() self._new_lines = 0 @@ -303,75 +363,83 @@ def __init__( self._indentation = 0 # Tracks toplevel assignments - self._assign_stack = [] + self._assign_stack: t.List[t.Set[str]] = [] # Tracks parameter definition blocks - self._param_def_block = [] + self._param_def_block: t.List[t.Set[str]] = [] # Tracks the current context. self._context_reference_stack = ["context"] + @property + def optimized(self) -> bool: + return self.optimizer is not None + # -- Various compilation helpers - def fail(self, msg, lineno): + def fail(self, msg: str, lineno: int) -> "te.NoReturn": """Fail with a :exc:`TemplateAssertionError`.""" raise TemplateAssertionError(msg, lineno, self.name, self.filename) - def temporary_identifier(self): + def temporary_identifier(self) -> str: """Get a new unique identifier.""" self._last_identifier += 1 - return "t_%d" % self._last_identifier + return f"t_{self._last_identifier}" - def buffer(self, frame): + def buffer(self, frame: Frame) -> None: """Enable buffering for the frame from that point onwards.""" frame.buffer = self.temporary_identifier() - self.writeline("%s = []" % frame.buffer) + self.writeline(f"{frame.buffer} = []") - def return_buffer_contents(self, frame, force_unescaped=False): + def return_buffer_contents( + self, frame: Frame, force_unescaped: bool = False + ) -> None: """Return the buffer contents of the frame.""" if not force_unescaped: if frame.eval_ctx.volatile: self.writeline("if context.eval_ctx.autoescape:") self.indent() - self.writeline("return Markup(concat(%s))" % frame.buffer) + self.writeline(f"return Markup(concat({frame.buffer}))") self.outdent() self.writeline("else:") self.indent() - self.writeline("return concat(%s)" % frame.buffer) + self.writeline(f"return concat({frame.buffer})") self.outdent() return elif frame.eval_ctx.autoescape: - self.writeline("return Markup(concat(%s))" % frame.buffer) + self.writeline(f"return Markup(concat({frame.buffer}))") return - self.writeline("return concat(%s)" % frame.buffer) + self.writeline(f"return concat({frame.buffer})") - def indent(self): + def indent(self) -> None: """Indent by one.""" self._indentation += 1 - def outdent(self, step=1): + def outdent(self, step: int = 1) -> None: """Outdent by step.""" self._indentation -= step - def start_write(self, frame, node=None): + def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None: """Yield or write into the frame buffer.""" if frame.buffer is None: self.writeline("yield ", node) else: - self.writeline("%s.append(" % frame.buffer, node) + self.writeline(f"{frame.buffer}.append(", node) - def end_write(self, frame): + def end_write(self, frame: Frame) -> None: """End the writing process started by `start_write`.""" if frame.buffer is not None: self.write(")") - def simple_write(self, s, frame, node=None): + def simple_write( + self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None + ) -> None: """Simple shortcut for start_write + write + end_write.""" self.start_write(frame, node) self.write(s) self.end_write(frame) - def blockvisit(self, nodes, frame): + def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None: """Visit a list of nodes as block in a frame. If the current frame is no buffer a dummy ``if 0: yield None`` is written automatically. """ @@ -382,7 +450,7 @@ def blockvisit(self, nodes, frame): except CompilerExit: pass - def write(self, x): + def write(self, x: str) -> None: """Write a string into the output stream.""" if self._new_lines: if not self._first_write: @@ -396,19 +464,26 @@ def write(self, x): self._new_lines = 0 self.stream.write(x) - def writeline(self, x, node=None, extra=0): + def writeline( + self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0 + ) -> None: """Combination of newline and write.""" self.newline(node, extra) self.write(x) - def newline(self, node=None, extra=0): + def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None: """Add one or more newlines before the next write.""" self._new_lines = max(self._new_lines, 1 + extra) if node is not None and node.lineno != self._last_line: self._write_debug_info = node.lineno self._last_line = node.lineno - def signature(self, node, frame, extra_kwargs=None): + def signature( + self, + node: t.Union[nodes.Call, nodes.Filter, nodes.Test], + frame: Frame, + extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + ) -> None: """Writes a function call to the stream for the current node. A leading comma is added automatically. The extra keyword arguments may not include python keywords otherwise a syntax @@ -417,11 +492,10 @@ def signature(self, node, frame, extra_kwargs=None): """ # if any of the given keyword arguments is a python keyword # we have to make sure that no invalid call is created. - kwarg_workaround = False - for kwarg in chain((x.key for x in node.kwargs), extra_kwargs or ()): - if is_python_keyword(kwarg): - kwarg_workaround = True - break + kwarg_workaround = any( + is_python_keyword(t.cast(str, k)) + for k in chain((x.key for x in node.kwargs), extra_kwargs or ()) + ) for arg in node.args: self.write(", ") @@ -432,8 +506,8 @@ def signature(self, node, frame, extra_kwargs=None): self.write(", ") self.visit(kwarg, frame) if extra_kwargs is not None: - for key, value in iteritems(extra_kwargs): - self.write(", %s=%s" % (key, value)) + for key, value in extra_kwargs.items(): + self.write(f", {key}={value}") if node.dyn_args: self.write(", *") self.visit(node.dyn_args, frame) @@ -444,12 +518,12 @@ def signature(self, node, frame, extra_kwargs=None): else: self.write(", **{") for kwarg in node.kwargs: - self.write("%r: " % kwarg.key) + self.write(f"{kwarg.key!r}: ") self.visit(kwarg.value, frame) self.write(", ") if extra_kwargs is not None: - for key, value in iteritems(extra_kwargs): - self.write("%r: %s, " % (key, value)) + for key, value in extra_kwargs.items(): + self.write(f"{key!r}: {value}, ") if node.dyn_kwargs is not None: self.write("}, **") self.visit(node.dyn_kwargs, frame) @@ -461,50 +535,82 @@ def signature(self, node, frame, extra_kwargs=None): self.write(", **") self.visit(node.dyn_kwargs, frame) - def pull_dependencies(self, nodes): - """Pull all the dependencies.""" + def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None: + """Find all filter and test names used in the template and + assign them to variables in the compiled namespace. Checking + that the names are registered with the environment is done when + compiling the Filter and Test nodes. If the node is in an If or + CondExpr node, the check is done at runtime instead. + + .. versionchanged:: 3.0 + Filters and tests in If and CondExpr nodes are checked at + runtime instead of compile time. + """ visitor = DependencyFinderVisitor() + for node in nodes: visitor.visit(node) - for dependency in "filters", "tests": - mapping = getattr(self, dependency) - for name in getattr(visitor, dependency): - if name not in mapping: - mapping[name] = self.temporary_identifier() - self.writeline( - "%s = environment.%s[%r]" % (mapping[name], dependency, name) - ) - def enter_frame(self, frame): + for id_map, names, dependency in (self.filters, visitor.filters, "filters"), ( + self.tests, + visitor.tests, + "tests", + ): + for name in sorted(names): + if name not in id_map: + id_map[name] = self.temporary_identifier() + + # add check during runtime that dependencies used inside of executed + # blocks are defined, as this step may be skipped during compile time + self.writeline("try:") + self.indent() + self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]") + self.outdent() + self.writeline("except KeyError:") + self.indent() + self.writeline("@internalcode") + self.writeline(f"def {id_map[name]}(*unused):") + self.indent() + self.writeline( + f'raise TemplateRuntimeError("No {dependency[:-1]}' + f' named {name!r} found.")' + ) + self.outdent() + self.outdent() + + def enter_frame(self, frame: Frame) -> None: undefs = [] - for target, (action, param) in iteritems(frame.symbols.loads): + for target, (action, param) in frame.symbols.loads.items(): if action == VAR_LOAD_PARAMETER: pass elif action == VAR_LOAD_RESOLVE: - self.writeline("%s = %s(%r)" % (target, self.get_resolve_func(), param)) + self.writeline(f"{target} = {self.get_resolve_func()}({param!r})") elif action == VAR_LOAD_ALIAS: - self.writeline("%s = %s" % (target, param)) + self.writeline(f"{target} = {param}") elif action == VAR_LOAD_UNDEFINED: undefs.append(target) else: raise NotImplementedError("unknown load instruction") if undefs: - self.writeline("%s = missing" % " = ".join(undefs)) + self.writeline(f"{' = '.join(undefs)} = missing") - def leave_frame(self, frame, with_python_scope=False): + def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None: if not with_python_scope: undefs = [] - for target, _ in iteritems(frame.symbols.loads): + for target in frame.symbols.loads: undefs.append(target) if undefs: - self.writeline("%s = missing" % " = ".join(undefs)) + self.writeline(f"{' = '.join(undefs)} = missing") - def func(self, name): - if self.environment.is_async: - return "async def %s" % name - return "def %s" % name + def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str: + return async_value if self.environment.is_async else sync_value - def macro_body(self, node, frame): + def func(self, name: str) -> str: + return f"{self.choose_async()}def {name}" + + def macro_body( + self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame + ) -> t.Tuple[Frame, MacroRef]: """Dump the function def of a macro or call block.""" frame = frame.inner() frame.symbols.analyze_node(node) @@ -513,6 +619,7 @@ def macro_body(self, node, frame): explicit_caller = None skip_special_params = set() args = [] + for idx, arg in enumerate(node.args): if arg.name == "caller": explicit_caller = idx @@ -552,7 +659,7 @@ def macro_body(self, node, frame): # macros are delayed, they never require output checks frame.require_output_check = False frame.symbols.analyze_node(node) - self.writeline("%s(%s):" % (self.func("macro"), ", ".join(args)), node) + self.writeline(f"{self.func('macro')}({', '.join(args)}):", node) self.indent() self.buffer(frame) @@ -561,17 +668,17 @@ def macro_body(self, node, frame): self.push_parameter_definitions(frame) for idx, arg in enumerate(node.args): ref = frame.symbols.ref(arg.name) - self.writeline("if %s is missing:" % ref) + self.writeline(f"if {ref} is missing:") self.indent() try: default = node.defaults[idx - len(node.args)] except IndexError: self.writeline( - "%s = undefined(%r, name=%r)" - % (ref, "parameter %r was not provided" % arg.name, arg.name) + f'{ref} = undefined("parameter {arg.name!r} was not provided",' + f" name={arg.name!r})" ) else: - self.writeline("%s = " % ref) + self.writeline(f"{ref} = ") self.visit(default, frame) self.mark_parameter_stored(ref) self.outdent() @@ -584,38 +691,33 @@ def macro_body(self, node, frame): return frame, macro_ref - def macro_def(self, macro_ref, frame): + def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None: """Dump the macro definition for the def created by macro_body.""" arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args) name = getattr(macro_ref.node, "name", None) if len(macro_ref.node.args) == 1: arg_tuple += "," self.write( - "Macro(environment, macro, %r, (%s), %r, %r, %r, " - "context.eval_ctx.autoescape)" - % ( - name, - arg_tuple, - macro_ref.accesses_kwargs, - macro_ref.accesses_varargs, - macro_ref.accesses_caller, - ) + f"Macro(environment, macro, {name!r}, ({arg_tuple})," + f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r}," + f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)" ) - def position(self, node): + def position(self, node: nodes.Node) -> str: """Return a human readable position for the node.""" - rv = "line %d" % node.lineno + rv = f"line {node.lineno}" if self.name is not None: - rv += " in " + repr(self.name) + rv = f"{rv} in {self.name!r}" return rv - def dump_local_context(self, frame): - return "{%s}" % ", ".join( - "%r: %s" % (name, target) - for name, target in iteritems(frame.symbols.dump_stores()) + def dump_local_context(self, frame: Frame) -> str: + items_kv = ", ".join( + f"{name!r}: {target}" + for name, target in frame.symbols.dump_stores().items() ) + return f"{{{items_kv}}}" - def write_commons(self): + def write_commons(self) -> None: """Writes a common preamble that is used by root and block functions. Primarily this sets up common local helpers and enforces a generator through a dead branch. @@ -627,7 +729,7 @@ def write_commons(self): self.writeline("cond_expr_undefined = Undefined") self.writeline("if 0: yield None") - def push_parameter_definitions(self, frame): + def push_parameter_definitions(self, frame: Frame) -> None: """Pushes all parameter targets from the given frame into a local stack that permits tracking of yet to be assigned parameters. In particular this enables the optimization from `visit_Name` to skip @@ -636,97 +738,109 @@ def push_parameter_definitions(self, frame): """ self._param_def_block.append(frame.symbols.dump_param_targets()) - def pop_parameter_definitions(self): + def pop_parameter_definitions(self) -> None: """Pops the current parameter definitions set.""" self._param_def_block.pop() - def mark_parameter_stored(self, target): + def mark_parameter_stored(self, target: str) -> None: """Marks a parameter in the current parameter definitions as stored. This will skip the enforced undefined checks. """ if self._param_def_block: self._param_def_block[-1].discard(target) - def push_context_reference(self, target): + def push_context_reference(self, target: str) -> None: self._context_reference_stack.append(target) - def pop_context_reference(self): + def pop_context_reference(self) -> None: self._context_reference_stack.pop() - def get_context_ref(self): + def get_context_ref(self) -> str: return self._context_reference_stack[-1] - def get_resolve_func(self): + def get_resolve_func(self) -> str: target = self._context_reference_stack[-1] if target == "context": return "resolve" - return "%s.resolve" % target + return f"{target}.resolve" - def derive_context(self, frame): - return "%s.derived(%s)" % ( - self.get_context_ref(), - self.dump_local_context(frame), - ) + def derive_context(self, frame: Frame) -> str: + return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})" - def parameter_is_undeclared(self, target): + def parameter_is_undeclared(self, target: str) -> bool: """Checks if a given target is an undeclared parameter.""" if not self._param_def_block: return False return target in self._param_def_block[-1] - def push_assign_tracking(self): + def push_assign_tracking(self) -> None: """Pushes a new layer for assignment tracking.""" self._assign_stack.append(set()) - def pop_assign_tracking(self, frame): + def pop_assign_tracking(self, frame: Frame) -> None: """Pops the topmost level for assignment tracking and updates the context variables if necessary. """ vars = self._assign_stack.pop() - if not frame.toplevel or not vars: + if ( + not frame.block_frame + and not frame.loop_frame + and not frame.toplevel + or not vars + ): return public_names = [x for x in vars if x[:1] != "_"] if len(vars) == 1: name = next(iter(vars)) ref = frame.symbols.ref(name) - self.writeline("context.vars[%r] = %s" % (name, ref)) + if frame.loop_frame: + self.writeline(f"_loop_vars[{name!r}] = {ref}") + return + if frame.block_frame: + self.writeline(f"_block_vars[{name!r}] = {ref}") + return + self.writeline(f"context.vars[{name!r}] = {ref}") else: - self.writeline("context.vars.update({") + if frame.loop_frame: + self.writeline("_loop_vars.update({") + elif frame.block_frame: + self.writeline("_block_vars.update({") + else: + self.writeline("context.vars.update({") for idx, name in enumerate(vars): if idx: self.write(", ") ref = frame.symbols.ref(name) - self.write("%r: %s" % (name, ref)) + self.write(f"{name!r}: {ref}") self.write("})") - if public_names: + if not frame.block_frame and not frame.loop_frame and public_names: if len(public_names) == 1: - self.writeline("context.exported_vars.add(%r)" % public_names[0]) + self.writeline(f"context.exported_vars.add({public_names[0]!r})") else: - self.writeline( - "context.exported_vars.update((%s))" - % ", ".join(imap(repr, public_names)) - ) + names_str = ", ".join(map(repr, public_names)) + self.writeline(f"context.exported_vars.update(({names_str}))") # -- Statement Visitors - def visit_Template(self, node, frame=None): + def visit_Template( + self, node: nodes.Template, frame: t.Optional[Frame] = None + ) -> None: assert frame is None, "no root frame allowed" eval_ctx = EvalContext(self.environment, self.name) - from .runtime import exported - - self.writeline("from __future__ import %s" % ", ".join(code_features)) - self.writeline("from jinja2.runtime import " + ", ".join(exported)) + from .runtime import exported, async_exported if self.environment.is_async: - self.writeline( - "from jinja2.asyncsupport import auto_await, " - "auto_aiter, AsyncLoopContext" - ) + exported_names = sorted(exported + async_exported) + else: + exported_names = sorted(exported) + + self.writeline("from __future__ import generator_stop") # Python < 3.7 + self.writeline("from jinja2.runtime import " + ", ".join(exported_names)) # if we want a deferred initialization we cannot move the # environment into a local name - envenv = not self.defer_init and ", environment=environment" or "" + envenv = "" if self.defer_init else ", environment=environment" # do we have an extends tag at all? If not, we can save some # overhead by just not processing any inheritance code. @@ -735,7 +849,7 @@ def visit_Template(self, node, frame=None): # find all blocks for block in node.find_all(nodes.Block): if block.name in self.blocks: - self.fail("block %r defined twice" % block.name, block.lineno) + self.fail(f"block {block.name!r} defined twice", block.lineno) self.blocks[block.name] = block # find all imports and import them @@ -745,16 +859,16 @@ def visit_Template(self, node, frame=None): self.import_aliases[imp] = alias = self.temporary_identifier() if "." in imp: module, obj = imp.rsplit(".", 1) - self.writeline("from %s import %s as %s" % (module, obj, alias)) + self.writeline(f"from {module} import {obj} as {alias}") else: - self.writeline("import %s as %s" % (imp, alias)) + self.writeline(f"import {imp} as {alias}") # add the load name - self.writeline("name = %r" % self.name) + self.writeline(f"name = {self.name!r}") # generate the root render function. self.writeline( - "%s(context, missing=missing%s):" % (self.func("root"), envenv), extra=1 + f"{self.func('root')}(context, missing=missing{envenv}):", extra=1 ) self.indent() self.write_commons() @@ -763,7 +877,7 @@ def visit_Template(self, node, frame=None): frame = Frame(eval_ctx) if "self" in find_undeclared(node.body, ("self",)): ref = frame.symbols.declare_parameter("self") - self.writeline("%s = TemplateReference(context)" % ref) + self.writeline(f"{ref} = TemplateReference(context)") frame.symbols.analyze_node(node) frame.toplevel = frame.rootlevel = True frame.require_output_check = have_extends and not self.has_known_extends @@ -781,13 +895,11 @@ def visit_Template(self, node, frame=None): self.indent() self.writeline("if parent_template is not None:") self.indent() - if supports_yield_from and not self.environment.is_async: + if not self.environment.is_async: self.writeline("yield from parent_template.root_render_func(context)") else: self.writeline( - "%sfor event in parent_template." - "root_render_func(context):" - % (self.environment.is_async and "async " or "") + "async for event in parent_template.root_render_func(context):" ) self.indent() self.writeline("yield event") @@ -795,10 +907,9 @@ def visit_Template(self, node, frame=None): self.outdent(1 + (not self.has_known_extends)) # at this point we now have the blocks collected and can visit them too. - for name, block in iteritems(self.blocks): + for name, block in self.blocks.items(): self.writeline( - "%s(context, missing=missing%s):" - % (self.func("block_" + name), envenv), + f"{self.func('block_' + name)}(context, missing=missing{envenv}):", block, 1, ) @@ -808,32 +919,29 @@ def visit_Template(self, node, frame=None): # toplevel template. This would cause a variety of # interesting issues with identifier tracking. block_frame = Frame(eval_ctx) + block_frame.block_frame = True undeclared = find_undeclared(block.body, ("self", "super")) if "self" in undeclared: ref = block_frame.symbols.declare_parameter("self") - self.writeline("%s = TemplateReference(context)" % ref) + self.writeline(f"{ref} = TemplateReference(context)") if "super" in undeclared: ref = block_frame.symbols.declare_parameter("super") - self.writeline("%s = context.super(%r, block_%s)" % (ref, name, name)) + self.writeline(f"{ref} = context.super({name!r}, block_{name})") block_frame.symbols.analyze_node(block) block_frame.block = name + self.writeline("_block_vars = {}") self.enter_frame(block_frame) self.pull_dependencies(block.body) self.blockvisit(block.body, block_frame) self.leave_frame(block_frame, with_python_scope=True) self.outdent() - self.writeline( - "blocks = {%s}" % ", ".join("%r: block_%s" % (x, x) for x in self.blocks), - extra=1, - ) + blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks) + self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1) + debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info) + self.writeline(f"debug_info = {debug_kv_str!r}") - # add a function that returns the debug info - self.writeline( - "debug_info = %r" % "&".join("%s=%s" % x for x in self.debug_info) - ) - - def visit_Block(self, node, frame): + def visit_Block(self, node: nodes.Block, frame: Frame) -> None: """Call a block and register it for the template.""" level = 0 if frame.toplevel: @@ -851,18 +959,23 @@ def visit_Block(self, node, frame): else: context = self.get_context_ref() - if ( - supports_yield_from - and not self.environment.is_async - and frame.buffer is None - ): + if node.required: + self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node) + self.indent() self.writeline( - "yield from context.blocks[%r][0](%s)" % (node.name, context), node + f'raise TemplateRuntimeError("Required block {node.name!r} not found")', + node, + ) + self.outdent() + + if not self.environment.is_async and frame.buffer is None: + self.writeline( + f"yield from context.blocks[{node.name!r}][0]({context})", node ) else: - loop = self.environment.is_async and "async for" or "for" self.writeline( - "%s event in context.blocks[%r][0](%s):" % (loop, node.name, context), + f"{self.choose_async()}for event in" + f" context.blocks[{node.name!r}][0]({context}):", node, ) self.indent() @@ -871,7 +984,7 @@ def visit_Block(self, node, frame): self.outdent(level) - def visit_Extends(self, node, frame): + def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None: """Calls the extender.""" if not frame.toplevel: self.fail("cannot use extend from a non top-level scope", node.lineno) @@ -888,7 +1001,7 @@ def visit_Extends(self, node, frame): if not self.has_known_extends: self.writeline("if parent_template is not None:") self.indent() - self.writeline("raise TemplateRuntimeError(%r)" % "extended multiple times") + self.writeline('raise TemplateRuntimeError("extended multiple times")') # if we have a known extends already we don't need that code here # as we know that the template execution will end here. @@ -899,10 +1012,8 @@ def visit_Extends(self, node, frame): self.writeline("parent_template = environment.get_template(", node) self.visit(node.template, frame) - self.write(", %r)" % self.name) - self.writeline( - "for name, parent_block in parent_template.blocks.%s():" % dict_item_iter - ) + self.write(f", {self.name!r})") + self.writeline("for name, parent_block in parent_template.blocks.items():") self.indent() self.writeline("context.blocks.setdefault(name, []).append(parent_block)") self.outdent() @@ -916,7 +1027,7 @@ def visit_Extends(self, node, frame): # and now we have one more self.extends_so_far += 1 - def visit_Include(self, node, frame): + def visit_Include(self, node: nodes.Include, frame: Frame) -> None: """Handles includes.""" if node.ignore_missing: self.writeline("try:") @@ -924,16 +1035,16 @@ def visit_Include(self, node, frame): func_name = "get_or_select_template" if isinstance(node.template, nodes.Const): - if isinstance(node.template.value, string_types): + if isinstance(node.template.value, str): func_name = "get_template" elif isinstance(node.template.value, (tuple, list)): func_name = "select_template" elif isinstance(node.template, (nodes.Tuple, nodes.List)): func_name = "select_template" - self.writeline("template = environment.%s(" % func_name, node) + self.writeline(f"template = environment.{func_name}(", node) self.visit(node.template, frame) - self.write(", %r)" % self.name) + self.write(f", {self.name!r})") if node.ignore_missing: self.outdent() self.writeline("except TemplateNotFound:") @@ -945,26 +1056,19 @@ def visit_Include(self, node, frame): skip_event_yield = False if node.with_context: - loop = self.environment.is_async and "async for" or "for" self.writeline( - "%s event in template.root_render_func(" - "template.new_context(context.get_all(), True, " - "%s)):" % (loop, self.dump_local_context(frame)) + f"{self.choose_async()}for event in template.root_render_func(" + "template.new_context(context.get_all(), True," + f" {self.dump_local_context(frame)})):" ) elif self.environment.is_async: self.writeline( - "for event in (await " - "template._get_default_module_async())" + "for event in (await template._get_default_module_async())" "._body_stream:" ) else: - if supports_yield_from: - self.writeline("yield from template._get_default_module()._body_stream") - skip_event_yield = True - else: - self.writeline( - "for event in template._get_default_module()._body_stream:" - ) + self.writeline("yield from template._get_default_module()._body_stream") + skip_event_yield = True if not skip_event_yield: self.indent() @@ -974,53 +1078,37 @@ def visit_Include(self, node, frame): if node.ignore_missing: self.outdent() - def visit_Import(self, node, frame): - """Visit regular imports.""" - self.writeline("%s = " % frame.symbols.ref(node.target), node) - if frame.toplevel: - self.write("context.vars[%r] = " % node.target) - if self.environment.is_async: - self.write("await ") - self.write("environment.get_template(") + def _import_common( + self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame + ) -> None: + self.write(f"{self.choose_async('await ')}environment.get_template(") self.visit(node.template, frame) - self.write(", %r)." % self.name) - if node.with_context: - self.write( - "make_module%s(context.get_all(), True, %s)" - % ( - self.environment.is_async and "_async" or "", - self.dump_local_context(frame), - ) - ) - elif self.environment.is_async: - self.write("_get_default_module_async()") - else: - self.write("_get_default_module()") - if frame.toplevel and not node.target.startswith("_"): - self.writeline("context.exported_vars.discard(%r)" % node.target) + self.write(f", {self.name!r}).") - def visit_FromImport(self, node, frame): + if node.with_context: + f_name = f"make_module{self.choose_async('_async')}" + self.write( + f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})" + ) + else: + self.write(f"_get_default_module{self.choose_async('_async')}(context)") + + def visit_Import(self, node: nodes.Import, frame: Frame) -> None: + """Visit regular imports.""" + self.writeline(f"{frame.symbols.ref(node.target)} = ", node) + if frame.toplevel: + self.write(f"context.vars[{node.target!r}] = ") + + self._import_common(node, frame) + + if frame.toplevel and not node.target.startswith("_"): + self.writeline(f"context.exported_vars.discard({node.target!r})") + + def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None: """Visit named imports.""" self.newline(node) - self.write( - "included_template = %senvironment.get_template(" - % (self.environment.is_async and "await " or "") - ) - self.visit(node.template, frame) - self.write(", %r)." % self.name) - if node.with_context: - self.write( - "make_module%s(context.get_all(), True, %s)" - % ( - self.environment.is_async and "_async" or "", - self.dump_local_context(frame), - ) - ) - elif self.environment.is_async: - self.write("_get_default_module_async()") - else: - self.write("_get_default_module()") - + self.write("included_template = ") + self._import_common(node, frame) var_names = [] discarded_names = [] for name in node.names: @@ -1029,22 +1117,18 @@ def visit_FromImport(self, node, frame): else: alias = name self.writeline( - "%s = getattr(included_template, " - "%r, missing)" % (frame.symbols.ref(alias), name) + f"{frame.symbols.ref(alias)} =" + f" getattr(included_template, {name!r}, missing)" ) - self.writeline("if %s is missing:" % frame.symbols.ref(alias)) + self.writeline(f"if {frame.symbols.ref(alias)} is missing:") self.indent() + message = ( + "the template {included_template.__name__!r}" + f" (imported on {self.position(node)})" + f" does not export the requested name {name!r}" + ) self.writeline( - "%s = undefined(%r %% " - "included_template.__name__, " - "name=%r)" - % ( - frame.symbols.ref(alias), - "the template %%r (imported on %s) does " - "not export the requested name %s" - % (self.position(node), repr(name)), - name, - ) + f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})" ) self.outdent() if frame.toplevel: @@ -1055,35 +1139,35 @@ def visit_FromImport(self, node, frame): if var_names: if len(var_names) == 1: name = var_names[0] - self.writeline( - "context.vars[%r] = %s" % (name, frame.symbols.ref(name)) - ) + self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}") else: - self.writeline( - "context.vars.update({%s})" - % ", ".join( - "%r: %s" % (name, frame.symbols.ref(name)) for name in var_names - ) + names_kv = ", ".join( + f"{name!r}: {frame.symbols.ref(name)}" for name in var_names ) + self.writeline(f"context.vars.update({{{names_kv}}})") if discarded_names: if len(discarded_names) == 1: - self.writeline("context.exported_vars.discard(%r)" % discarded_names[0]) + self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})") else: + names_str = ", ".join(map(repr, discarded_names)) self.writeline( - "context.exported_vars.difference_" - "update((%s))" % ", ".join(imap(repr, discarded_names)) + f"context.exported_vars.difference_update(({names_str}))" ) - def visit_For(self, node, frame): + def visit_For(self, node: nodes.For, frame: Frame) -> None: loop_frame = frame.inner() + loop_frame.loop_frame = True test_frame = frame.inner() else_frame = frame.inner() # try to figure out if we have an extended loop. An extended loop # is necessary if the loop is in recursive mode if the special loop - # variable is accessed in the body. - extended_loop = node.recursive or "loop" in find_undeclared( - node.iter_child_nodes(only=("body",)), ("loop",) + # variable is accessed in the body if the body is a scoped block. + extended_loop = ( + node.recursive + or "loop" + in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",)) + or any(block.scoped for block in node.find_all(nodes.Block)) ) loop_ref = None @@ -1097,13 +1181,13 @@ def visit_For(self, node, frame): if node.test: loop_filter_func = self.temporary_identifier() test_frame.symbols.analyze_node(node, for_branch="test") - self.writeline("%s(fiter):" % self.func(loop_filter_func), node.test) + self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test) self.indent() self.enter_frame(test_frame) - self.writeline(self.environment.is_async and "async for " or "for ") + self.writeline(self.choose_async("async for ", "for ")) self.visit(node.target, loop_frame) self.write(" in ") - self.write(self.environment.is_async and "auto_aiter(fiter)" or "fiter") + self.write(self.choose_async("auto_aiter(fiter)", "fiter")) self.write(":") self.indent() self.writeline("if ", node.test) @@ -1120,7 +1204,7 @@ def visit_For(self, node, frame): # variable is a special one we have to enforce aliasing for it. if node.recursive: self.writeline( - "%s(reciter, loop_render_func, depth=0):" % self.func("loop"), node + f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node ) self.indent() self.buffer(loop_frame) @@ -1131,7 +1215,7 @@ def visit_For(self, node, frame): # make sure the loop variable is a special one and raise a template # assertion error if a loop tries to write to loop if extended_loop: - self.writeline("%s = missing" % loop_ref) + self.writeline(f"{loop_ref} = missing") for name in node.find_all(nodes.Name): if name.ctx == "store" and name.name == "loop": @@ -1142,20 +1226,17 @@ def visit_For(self, node, frame): if node.else_: iteration_indicator = self.temporary_identifier() - self.writeline("%s = 1" % iteration_indicator) + self.writeline(f"{iteration_indicator} = 1") - self.writeline(self.environment.is_async and "async for " or "for ", node) + self.writeline(self.choose_async("async for ", "for "), node) self.visit(node.target, loop_frame) if extended_loop: - if self.environment.is_async: - self.write(", %s in AsyncLoopContext(" % loop_ref) - else: - self.write(", %s in LoopContext(" % loop_ref) + self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(") else: self.write(" in ") if node.test: - self.write("%s(" % loop_filter_func) + self.write(f"{loop_filter_func}(") if node.recursive: self.write("reciter") else: @@ -1170,21 +1251,22 @@ def visit_For(self, node, frame): if node.recursive: self.write(", undefined, loop_render_func, depth):") else: - self.write(extended_loop and ", undefined):" or ":") + self.write(", undefined):" if extended_loop else ":") self.indent() self.enter_frame(loop_frame) + self.writeline("_loop_vars = {}") self.blockvisit(node.body, loop_frame) if node.else_: - self.writeline("%s = 0" % iteration_indicator) + self.writeline(f"{iteration_indicator} = 0") self.outdent() self.leave_frame( loop_frame, with_python_scope=node.recursive and not node.else_ ) if node.else_: - self.writeline("if %s:" % iteration_indicator) + self.writeline(f"if {iteration_indicator}:") self.indent() self.enter_frame(else_frame) self.blockvisit(node.else_, else_frame) @@ -1197,9 +1279,7 @@ def visit_For(self, node, frame): self.return_buffer_contents(loop_frame) self.outdent() self.start_write(frame, node) - if self.environment.is_async: - self.write("await ") - self.write("loop(") + self.write(f"{self.choose_async('await ')}loop(") if self.environment.is_async: self.write("auto_aiter(") self.visit(node.iter, frame) @@ -1208,7 +1288,12 @@ def visit_For(self, node, frame): self.write(", loop)") self.end_write(frame) - def visit_If(self, node, frame): + # at the end of the iteration, clear any assignments made in the + # loop from the top level + if self._assign_stack: + self._assign_stack[-1].difference_update(loop_frame.symbols.stores) + + def visit_If(self, node: nodes.If, frame: Frame) -> None: if_frame = frame.soft() self.writeline("if ", node) self.visit(node.test, if_frame) @@ -1229,17 +1314,17 @@ def visit_If(self, node, frame): self.blockvisit(node.else_, if_frame) self.outdent() - def visit_Macro(self, node, frame): + def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None: macro_frame, macro_ref = self.macro_body(node, frame) self.newline() if frame.toplevel: if not node.name.startswith("_"): - self.write("context.exported_vars.add(%r)" % node.name) - self.writeline("context.vars[%r] = " % node.name) - self.write("%s = " % frame.symbols.ref(node.name)) + self.write(f"context.exported_vars.add({node.name!r})") + self.writeline(f"context.vars[{node.name!r}] = ") + self.write(f"{frame.symbols.ref(node.name)} = ") self.macro_def(macro_ref, macro_frame) - def visit_CallBlock(self, node, frame): + def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None: call_frame, macro_ref = self.macro_body(node, frame) self.writeline("caller = ") self.macro_def(macro_ref, call_frame) @@ -1247,7 +1332,7 @@ def visit_CallBlock(self, node, frame): self.visit_Call(node.call, frame, forward_caller=True) self.end_write(frame) - def visit_FilterBlock(self, node, frame): + def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None: filter_frame = frame.inner() filter_frame.symbols.analyze_node(node) self.enter_frame(filter_frame) @@ -1258,11 +1343,11 @@ def visit_FilterBlock(self, node, frame): self.end_write(frame) self.leave_frame(filter_frame) - def visit_With(self, node, frame): + def visit_With(self, node: nodes.With, frame: Frame) -> None: with_frame = frame.inner() with_frame.symbols.analyze_node(node) self.enter_frame(with_frame) - for target, expr in izip(node.targets, node.values): + for target, expr in zip(node.targets, node.values): self.newline() self.visit(target, with_frame) self.write(" = ") @@ -1270,18 +1355,25 @@ def visit_With(self, node, frame): self.blockvisit(node.body, with_frame) self.leave_frame(with_frame) - def visit_ExprStmt(self, node, frame): + def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None: self.newline(node) self.visit(node.node, frame) - _FinalizeInfo = namedtuple("_FinalizeInfo", ("const", "src")) - #: The default finalize function if the environment isn't configured - #: with one. Or if the environment has one, this is called on that - #: function's output for constants. - _default_finalize = text_type - _finalize = None + class _FinalizeInfo(t.NamedTuple): + const: t.Optional[t.Callable[..., str]] + src: t.Optional[str] - def _make_finalize(self): + @staticmethod + def _default_finalize(value: t.Any) -> t.Any: + """The default finalize function if the environment isn't + configured with one. Or, if the environment has one, this is + called on that function's output for constants. + """ + return str(value) + + _finalize: t.Optional[_FinalizeInfo] = None + + def _make_finalize(self) -> _FinalizeInfo: """Build the finalize function to be used on constants and at runtime. Cached so it's only created once for all output nodes. @@ -1297,39 +1389,48 @@ def _make_finalize(self): if self._finalize is not None: return self._finalize + finalize: t.Optional[t.Callable[..., t.Any]] finalize = default = self._default_finalize src = None if self.environment.finalize: src = "environment.finalize(" env_finalize = self.environment.finalize + pass_arg = { + _PassArg.context: "context", + _PassArg.eval_context: "context.eval_ctx", + _PassArg.environment: "environment", + }.get( + _PassArg.from_obj(env_finalize) # type: ignore + ) + finalize = None - def finalize(value): - return default(env_finalize(value)) + if pass_arg is None: - if getattr(env_finalize, "contextfunction", False) is True: - src += "context, " - finalize = None # noqa: F811 - elif getattr(env_finalize, "evalcontextfunction", False) is True: - src += "context.eval_ctx, " - finalize = None - elif getattr(env_finalize, "environmentfunction", False) is True: - src += "environment, " + def finalize(value: t.Any) -> t.Any: + return default(env_finalize(value)) - def finalize(value): - return default(env_finalize(self.environment, value)) + else: + src = f"{src}{pass_arg}, " + + if pass_arg == "environment": + + def finalize(value: t.Any) -> t.Any: + return default(env_finalize(self.environment, value)) self._finalize = self._FinalizeInfo(finalize, src) return self._finalize - def _output_const_repr(self, group): + def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: """Given a group of constant values converted from ``Output`` child nodes, produce a string to write to the template module source. """ return repr(concat(group)) - def _output_child_to_const(self, node, frame, finalize): + def _output_child_to_const( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> str: """Try to optimize a child of an ``Output`` node by trying to convert it to constant, finalized data at compile time. @@ -1344,25 +1445,29 @@ def _output_child_to_const(self, node, frame, finalize): # Template data doesn't go through finalize. if isinstance(node, nodes.TemplateData): - return text_type(const) + return str(const) - return finalize.const(const) + return finalize.const(const) # type: ignore - def _output_child_pre(self, node, frame, finalize): + def _output_child_pre( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> None: """Output extra source code before visiting a child of an ``Output`` node. """ if frame.eval_ctx.volatile: - self.write("(escape if context.eval_ctx.autoescape else to_string)(") + self.write("(escape if context.eval_ctx.autoescape else str)(") elif frame.eval_ctx.autoescape: self.write("escape(") else: - self.write("to_string(") + self.write("str(") if finalize.src is not None: self.write(finalize.src) - def _output_child_post(self, node, frame, finalize): + def _output_child_post( + self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo + ) -> None: """Output extra source code after visiting a child of an ``Output`` node. """ @@ -1371,7 +1476,7 @@ def _output_child_post(self, node, frame, finalize): if finalize.src is not None: self.write(")") - def visit_Output(self, node, frame): + def visit_Output(self, node: nodes.Output, frame: Frame) -> None: # If an extends is active, don't render outside a block. if frame.require_output_check: # A top-level extends is known to exist at compile time. @@ -1382,7 +1487,7 @@ def visit_Output(self, node, frame): self.indent() finalize = self._make_finalize() - body = [] + body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = [] # Evaluate constants at compile time if possible. Each item in # body will be either a list of static data or a node to be @@ -1414,9 +1519,9 @@ def visit_Output(self, node, frame): if frame.buffer is not None: if len(body) == 1: - self.writeline("%s.append(" % frame.buffer) + self.writeline(f"{frame.buffer}.append(") else: - self.writeline("%s.extend((" % frame.buffer) + self.writeline(f"{frame.buffer}.extend((") self.indent() @@ -1450,7 +1555,7 @@ def visit_Output(self, node, frame): if frame.require_output_check: self.outdent() - def visit_Assign(self, node, frame): + def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None: self.push_assign_tracking() self.newline(node) self.visit(node.target, frame) @@ -1458,7 +1563,7 @@ def visit_Assign(self, node, frame): self.visit(node.node, frame) self.pop_assign_tracking(frame) - def visit_AssignBlock(self, node, frame): + def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None: self.push_assign_tracking() block_frame = frame.inner() # This is a special case. Since a set block always captures we @@ -1475,15 +1580,17 @@ def visit_AssignBlock(self, node, frame): if node.filter is not None: self.visit_Filter(node.filter, block_frame) else: - self.write("concat(%s)" % block_frame.buffer) + self.write(f"concat({block_frame.buffer})") self.write(")") self.pop_assign_tracking(frame) self.leave_frame(block_frame) # -- Expression Visitors - def visit_Name(self, node, frame): - if node.ctx == "store" and frame.toplevel: + def visit_Name(self, node: nodes.Name, frame: Frame) -> None: + if node.ctx == "store" and ( + frame.toplevel or frame.loop_frame or frame.block_frame + ): if self._assign_stack: self._assign_stack[-1].add(node.name) ref = frame.symbols.ref(node.name) @@ -1499,52 +1606,51 @@ def visit_Name(self, node, frame): and not self.parameter_is_undeclared(ref) ): self.write( - "(undefined(name=%r) if %s is missing else %s)" - % (node.name, ref, ref) + f"(undefined(name={node.name!r}) if {ref} is missing else {ref})" ) return self.write(ref) - def visit_NSRef(self, node, frame): + def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None: # NSRefs can only be used to store values; since they use the normal # `foo.bar` notation they will be parsed as a normal attribute access # when used anywhere but in a `set` context ref = frame.symbols.ref(node.name) - self.writeline("if not isinstance(%s, Namespace):" % ref) + self.writeline(f"if not isinstance({ref}, Namespace):") self.indent() self.writeline( - "raise TemplateRuntimeError(%r)" - % "cannot assign attribute on non-namespace object" + "raise TemplateRuntimeError" + '("cannot assign attribute on non-namespace object")' ) self.outdent() - self.writeline("%s[%r]" % (ref, node.attr)) + self.writeline(f"{ref}[{node.attr!r}]") - def visit_Const(self, node, frame): + def visit_Const(self, node: nodes.Const, frame: Frame) -> None: val = node.as_const(frame.eval_ctx) if isinstance(val, float): self.write(str(val)) else: self.write(repr(val)) - def visit_TemplateData(self, node, frame): + def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None: try: self.write(repr(node.as_const(frame.eval_ctx))) except nodes.Impossible: self.write( - "(Markup if context.eval_ctx.autoescape else identity)(%r)" % node.data + f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})" ) - def visit_Tuple(self, node, frame): + def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None: self.write("(") idx = -1 for idx, item in enumerate(node.items): if idx: self.write(", ") self.visit(item, frame) - self.write(idx == 0 and ",)" or ")") + self.write(",)" if idx == 0 else ")") - def visit_List(self, node, frame): + def visit_List(self, node: nodes.List, frame: Frame) -> None: self.write("[") for idx, item in enumerate(node.items): if idx: @@ -1552,7 +1658,7 @@ def visit_List(self, node, frame): self.visit(item, frame) self.write("]") - def visit_Dict(self, node, frame): + def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None: self.write("{") for idx, item in enumerate(node.items): if idx: @@ -1562,96 +1668,59 @@ def visit_Dict(self, node, frame): self.visit(item.value, frame) self.write("}") - def binop(operator, interceptable=True): # noqa: B902 - @optimizeconst - def visitor(self, node, frame): - if ( - self.environment.sandboxed - and operator in self.environment.intercepted_binops - ): - self.write("environment.call_binop(context, %r, " % operator) - self.visit(node.left, frame) - self.write(", ") - self.visit(node.right, frame) - else: - self.write("(") - self.visit(node.left, frame) - self.write(" %s " % operator) - self.visit(node.right, frame) - self.write(")") - - return visitor - - def uaop(operator, interceptable=True): # noqa: B902 - @optimizeconst - def visitor(self, node, frame): - if ( - self.environment.sandboxed - and operator in self.environment.intercepted_unops - ): - self.write("environment.call_unop(context, %r, " % operator) - self.visit(node.node, frame) - else: - self.write("(" + operator) - self.visit(node.node, frame) - self.write(")") - - return visitor - - visit_Add = binop("+") - visit_Sub = binop("-") - visit_Mul = binop("*") - visit_Div = binop("/") - visit_FloorDiv = binop("//") - visit_Pow = binop("**") - visit_Mod = binop("%") - visit_And = binop("and", interceptable=False) - visit_Or = binop("or", interceptable=False) - visit_Pos = uaop("+") - visit_Neg = uaop("-") - visit_Not = uaop("not ", interceptable=False) - del binop, uaop + visit_Add = _make_binop("+") + visit_Sub = _make_binop("-") + visit_Mul = _make_binop("*") + visit_Div = _make_binop("/") + visit_FloorDiv = _make_binop("//") + visit_Pow = _make_binop("**") + visit_Mod = _make_binop("%") + visit_And = _make_binop("and") + visit_Or = _make_binop("or") + visit_Pos = _make_unop("+") + visit_Neg = _make_unop("-") + visit_Not = _make_unop("not ") @optimizeconst - def visit_Concat(self, node, frame): + def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None: if frame.eval_ctx.volatile: - func_name = "(context.eval_ctx.volatile and markup_join or unicode_join)" + func_name = "(markup_join if context.eval_ctx.volatile else str_join)" elif frame.eval_ctx.autoescape: func_name = "markup_join" else: - func_name = "unicode_join" - self.write("%s((" % func_name) + func_name = "str_join" + self.write(f"{func_name}((") for arg in node.nodes: self.visit(arg, frame) self.write(", ") self.write("))") @optimizeconst - def visit_Compare(self, node, frame): + def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None: self.write("(") self.visit(node.expr, frame) for op in node.ops: self.visit(op, frame) self.write(")") - def visit_Operand(self, node, frame): - self.write(" %s " % operators[node.op]) + def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None: + self.write(f" {operators[node.op]} ") self.visit(node.expr, frame) @optimizeconst - def visit_Getattr(self, node, frame): + def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None: if self.environment.is_async: self.write("(await auto_await(") self.write("environment.getattr(") self.visit(node.node, frame) - self.write(", %r)" % node.attr) + self.write(f", {node.attr!r})") if self.environment.is_async: self.write("))") @optimizeconst - def visit_Getitem(self, node, frame): + def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None: # slices bypass the environment getitem method. if isinstance(node.arg, nodes.Slice): self.visit(node.node, frame) @@ -1671,7 +1740,7 @@ def visit_Getitem(self, node, frame): if self.environment.is_async: self.write("))") - def visit_Slice(self, node, frame): + def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None: if node.start is not None: self.visit(node.start, frame) self.write(":") @@ -1681,60 +1750,83 @@ def visit_Slice(self, node, frame): self.write(":") self.visit(node.step, frame) - @optimizeconst - def visit_Filter(self, node, frame): + @contextmanager + def _filter_test_common( + self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool + ) -> t.Iterator[None]: if self.environment.is_async: self.write("await auto_await(") - self.write(self.filters[node.name] + "(") - func = self.environment.filters.get(node.name) - if func is None: - self.fail("no filter named %r" % node.name, node.lineno) - if getattr(func, "contextfilter", False) is True: - self.write("context, ") - elif getattr(func, "evalcontextfilter", False) is True: - self.write("context.eval_ctx, ") - elif getattr(func, "environmentfilter", False) is True: - self.write("environment, ") - # if the filter node is None we are inside a filter block - # and want to write to the current buffer - if node.node is not None: - self.visit(node.node, frame) - elif frame.eval_ctx.volatile: - self.write( - "(context.eval_ctx.autoescape and" - " Markup(concat(%s)) or concat(%s))" % (frame.buffer, frame.buffer) - ) - elif frame.eval_ctx.autoescape: - self.write("Markup(concat(%s))" % frame.buffer) + if is_filter: + self.write(f"{self.filters[node.name]}(") + func = self.environment.filters.get(node.name) else: - self.write("concat(%s)" % frame.buffer) + self.write(f"{self.tests[node.name]}(") + func = self.environment.tests.get(node.name) + + # When inside an If or CondExpr frame, allow the filter to be + # undefined at compile time and only raise an error if it's + # actually called at runtime. See pull_dependencies. + if func is None and not frame.soft_frame: + type_name = "filter" if is_filter else "test" + self.fail(f"No {type_name} named {node.name!r}.", node.lineno) + + pass_arg = { + _PassArg.context: "context", + _PassArg.eval_context: "context.eval_ctx", + _PassArg.environment: "environment", + }.get( + _PassArg.from_obj(func) # type: ignore + ) + + if pass_arg is not None: + self.write(f"{pass_arg}, ") + + # Back to the visitor function to handle visiting the target of + # the filter or test. + yield + self.signature(node, frame) self.write(")") + if self.environment.is_async: self.write(")") @optimizeconst - def visit_Test(self, node, frame): - self.write(self.tests[node.name] + "(") - if node.name not in self.environment.tests: - self.fail("no test named %r" % node.name, node.lineno) - self.visit(node.node, frame) - self.signature(node, frame) - self.write(")") + def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None: + with self._filter_test_common(node, frame, True): + # if the filter node is None we are inside a filter block + # and want to write to the current buffer + if node.node is not None: + self.visit(node.node, frame) + elif frame.eval_ctx.volatile: + self.write( + f"(Markup(concat({frame.buffer}))" + f" if context.eval_ctx.autoescape else concat({frame.buffer}))" + ) + elif frame.eval_ctx.autoescape: + self.write(f"Markup(concat({frame.buffer}))") + else: + self.write(f"concat({frame.buffer})") @optimizeconst - def visit_CondExpr(self, node, frame): - def write_expr2(): + def visit_Test(self, node: nodes.Test, frame: Frame) -> None: + with self._filter_test_common(node, frame, False): + self.visit(node.node, frame) + + @optimizeconst + def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None: + frame = frame.soft() + + def write_expr2() -> None: if node.expr2 is not None: - return self.visit(node.expr2, frame) + self.visit(node.expr2, frame) + return + self.write( - "cond_expr_undefined(%r)" - % ( - "the inline if-" - "expression on %s evaluated to false and " - "no else section was defined." % self.position(node) - ) + f'cond_expr_undefined("the inline if-expression on' + f" {self.position(node)} evaluated to false and no else" + f' section was defined.")' ) self.write("(") @@ -1746,7 +1838,9 @@ def write_expr2(): self.write(")") @optimizeconst - def visit_Call(self, node, frame, forward_caller=False): + def visit_Call( + self, node: nodes.Call, frame: Frame, forward_caller: bool = False + ) -> None: if self.environment.is_async: self.write("await auto_await(") if self.environment.sandboxed: @@ -1754,63 +1848,79 @@ def visit_Call(self, node, frame, forward_caller=False): else: self.write("context.call(") self.visit(node.node, frame) - extra_kwargs = forward_caller and {"caller": "caller"} or None + extra_kwargs = {"caller": "caller"} if forward_caller else None + loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {} + block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {} + if extra_kwargs: + extra_kwargs.update(loop_kwargs, **block_kwargs) + elif loop_kwargs or block_kwargs: + extra_kwargs = dict(loop_kwargs, **block_kwargs) self.signature(node, frame, extra_kwargs) self.write(")") if self.environment.is_async: self.write(")") - def visit_Keyword(self, node, frame): + def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None: self.write(node.key + "=") self.visit(node.value, frame) # -- Unused nodes for extensions - def visit_MarkSafe(self, node, frame): + def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None: self.write("Markup(") self.visit(node.expr, frame) self.write(")") - def visit_MarkSafeIfAutoescape(self, node, frame): - self.write("(context.eval_ctx.autoescape and Markup or identity)(") + def visit_MarkSafeIfAutoescape( + self, node: nodes.MarkSafeIfAutoescape, frame: Frame + ) -> None: + self.write("(Markup if context.eval_ctx.autoescape else identity)(") self.visit(node.expr, frame) self.write(")") - def visit_EnvironmentAttribute(self, node, frame): + def visit_EnvironmentAttribute( + self, node: nodes.EnvironmentAttribute, frame: Frame + ) -> None: self.write("environment." + node.name) - def visit_ExtensionAttribute(self, node, frame): - self.write("environment.extensions[%r].%s" % (node.identifier, node.name)) + def visit_ExtensionAttribute( + self, node: nodes.ExtensionAttribute, frame: Frame + ) -> None: + self.write(f"environment.extensions[{node.identifier!r}].{node.name}") - def visit_ImportedName(self, node, frame): + def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None: self.write(self.import_aliases[node.importname]) - def visit_InternalName(self, node, frame): + def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None: self.write(node.name) - def visit_ContextReference(self, node, frame): + def visit_ContextReference( + self, node: nodes.ContextReference, frame: Frame + ) -> None: self.write("context") - def visit_DerivedContextReference(self, node, frame): + def visit_DerivedContextReference( + self, node: nodes.DerivedContextReference, frame: Frame + ) -> None: self.write(self.derive_context(frame)) - def visit_Continue(self, node, frame): + def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None: self.writeline("continue", node) - def visit_Break(self, node, frame): + def visit_Break(self, node: nodes.Break, frame: Frame) -> None: self.writeline("break", node) - def visit_Scope(self, node, frame): + def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None: scope_frame = frame.inner() scope_frame.symbols.analyze_node(node) self.enter_frame(scope_frame) self.blockvisit(node.body, scope_frame) self.leave_frame(scope_frame) - def visit_OverlayScope(self, node, frame): + def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None: ctx = self.temporary_identifier() - self.writeline("%s = %s" % (ctx, self.derive_context(frame))) - self.writeline("%s.vars = " % ctx) + self.writeline(f"{ctx} = {self.derive_context(frame)}") + self.writeline(f"{ctx}.vars = ") self.visit(node.context, frame) self.push_context_reference(ctx) @@ -1821,9 +1931,11 @@ def visit_OverlayScope(self, node, frame): self.leave_frame(scope_frame) self.pop_context_reference() - def visit_EvalContextModifier(self, node, frame): + def visit_EvalContextModifier( + self, node: nodes.EvalContextModifier, frame: Frame + ) -> None: for keyword in node.options: - self.writeline("context.eval_ctx.%s = " % keyword.key) + self.writeline(f"context.eval_ctx.{keyword.key} = ") self.visit(keyword.value, frame) try: val = keyword.value.as_const(frame.eval_ctx) @@ -1832,12 +1944,14 @@ def visit_EvalContextModifier(self, node, frame): else: setattr(frame.eval_ctx, keyword.key, val) - def visit_ScopedEvalContextModifier(self, node, frame): + def visit_ScopedEvalContextModifier( + self, node: nodes.ScopedEvalContextModifier, frame: Frame + ) -> None: old_ctx_name = self.temporary_identifier() saved_ctx = frame.eval_ctx.save() - self.writeline("%s = context.eval_ctx.save()" % old_ctx_name) + self.writeline(f"{old_ctx_name} = context.eval_ctx.save()") self.visit_EvalContextModifier(node, frame) for child in node.body: self.visit(child, frame) frame.eval_ctx.revert(saved_ctx) - self.writeline("context.eval_ctx.revert(%s)" % old_ctx_name) + self.writeline(f"context.eval_ctx.revert({old_ctx_name})") diff --git a/lib/spack/external/jinja2/constants.py b/lib/spack/external/_vendoring/jinja2/constants.py similarity index 96% rename from lib/spack/external/jinja2/constants.py rename to lib/spack/external/_vendoring/jinja2/constants.py index bf7f2ca7217..41a1c23b0a7 100644 --- a/lib/spack/external/jinja2/constants.py +++ b/lib/spack/external/_vendoring/jinja2/constants.py @@ -1,6 +1,5 @@ -# -*- coding: utf-8 -*- #: list of lorem ipsum words used by the lipsum() helper function -LOREM_IPSUM_WORDS = u"""\ +LOREM_IPSUM_WORDS = """\ a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at auctor augue bibendum blandit class commodo condimentum congue consectetuer consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus diff --git a/lib/spack/external/jinja2/debug.py b/lib/spack/external/_vendoring/jinja2/debug.py similarity index 66% rename from lib/spack/external/jinja2/debug.py rename to lib/spack/external/_vendoring/jinja2/debug.py index 5d8aec31d05..805866bd6f9 100644 --- a/lib/spack/external/jinja2/debug.py +++ b/lib/spack/external/_vendoring/jinja2/debug.py @@ -1,38 +1,38 @@ +import platform import sys +import typing as t from types import CodeType +from types import TracebackType -from . import TemplateSyntaxError -from ._compat import PYPY +from .exceptions import TemplateSyntaxError from .utils import internal_code from .utils import missing +if t.TYPE_CHECKING: + from .runtime import Context -def rewrite_traceback_stack(source=None): + +def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException: """Rewrite the current exception to replace any tracebacks from within compiled template code with tracebacks that look like they came from the template source. This must be called within an ``except`` block. - :param exc_info: A :meth:`sys.exc_info` tuple. If not provided, - the current ``exc_info`` is used. :param source: For ``TemplateSyntaxError``, the original source if known. - :return: A :meth:`sys.exc_info` tuple that can be re-raised. + :return: The original exception with the rewritten traceback. """ - exc_type, exc_value, tb = sys.exc_info() + _, exc_value, tb = sys.exc_info() + exc_value = t.cast(BaseException, exc_value) + tb = t.cast(TracebackType, tb) if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated: exc_value.translated = True exc_value.source = source - - try: - # Remove the old traceback on Python 3, otherwise the frames - # from the compiler still show up. - exc_value.with_traceback(None) - except AttributeError: - pass - + # Remove the old traceback, otherwise the frames from the + # compiler still show up. + exc_value.with_traceback(None) # Outside of runtime, so the frame isn't executing template # code, but it still needs to point at the template. tb = fake_traceback( @@ -70,10 +70,12 @@ def rewrite_traceback_stack(source=None): for tb in reversed(stack): tb_next = tb_set_next(tb, tb_next) - return exc_type, exc_value, tb_next + return exc_value.with_traceback(tb_next) -def fake_traceback(exc_value, tb, filename, lineno): +def fake_traceback( # type: ignore + exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int +) -> TracebackType: """Produce a new traceback object that looks like it came from the template source instead of the compiled code. The filename, line number, and location name will point to the template, and the local @@ -100,79 +102,60 @@ def fake_traceback(exc_value, tb, filename, lineno): "__jinja_exception__": exc_value, } # Raise an exception at the correct line number. - code = compile("\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec") + code: CodeType = compile( + "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec" + ) # Build a new code object that points to the template file and # replaces the location with a block name. - try: - location = "template" + location = "template" - if tb is not None: - function = tb.tb_frame.f_code.co_name + if tb is not None: + function = tb.tb_frame.f_code.co_name - if function == "root": - location = "top-level template code" - elif function.startswith("block_"): - location = 'block "%s"' % function[6:] + if function == "root": + location = "top-level template code" + elif function.startswith("block_"): + location = f"block {function[6:]!r}" - # Collect arguments for the new code object. CodeType only - # accepts positional arguments, and arguments were inserted in - # new Python versions. - code_args = [] - - for attr in ( - "argcount", - "posonlyargcount", # Python 3.8 - "kwonlyargcount", # Python 3 - "nlocals", - "stacksize", - "flags", - "code", # codestring - "consts", # constants - "names", - "varnames", - ("filename", filename), - ("name", location), - "firstlineno", - "lnotab", - "freevars", - "cellvars", - ): - if isinstance(attr, tuple): - # Replace with given value. - code_args.append(attr[1]) - continue - - try: - # Copy original value if it exists. - code_args.append(getattr(code, "co_" + attr)) - except AttributeError: - # Some arguments were added later. - continue - - code = CodeType(*code_args) - except Exception: - # Some environments such as Google App Engine don't support - # modifying code objects. - pass + if sys.version_info >= (3, 8): + code = code.replace(co_name=location) + else: + code = CodeType( + code.co_argcount, + code.co_kwonlyargcount, + code.co_nlocals, + code.co_stacksize, + code.co_flags, + code.co_code, + code.co_consts, + code.co_names, + code.co_varnames, + code.co_filename, + location, + code.co_firstlineno, + code.co_lnotab, + code.co_freevars, + code.co_cellvars, + ) # Execute the new code, which is guaranteed to raise, and return # the new traceback without this frame. try: exec(code, globals, locals) except BaseException: - return sys.exc_info()[2].tb_next + return sys.exc_info()[2].tb_next # type: ignore -def get_template_locals(real_locals): +def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]: """Based on the runtime locals, get the context that would be available at that point in the template. """ # Start with the current template context. - ctx = real_locals.get("context") + ctx: "t.Optional[Context]" = real_locals.get("context") - if ctx: - data = ctx.get_all().copy() + if ctx is not None: + data: t.Dict[str, t.Any] = ctx.get_all().copy() else: data = {} @@ -180,7 +163,7 @@ def get_template_locals(real_locals): # rather than pushing a context. Local variables follow the scheme # l_depth_name. Find the highest-depth local that has a value for # each name. - local_overrides = {} + local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {} for name, value in real_locals.items(): if not name.startswith("l_") or value is missing: @@ -188,8 +171,8 @@ def get_template_locals(real_locals): continue try: - _, depth, name = name.split("_", 2) - depth = int(depth) + _, depth_str, name = name.split("_", 2) + depth = int(depth_str) except ValueError: continue @@ -210,31 +193,37 @@ def get_template_locals(real_locals): if sys.version_info >= (3, 7): # tb_next is directly assignable as of Python 3.7 - def tb_set_next(tb, tb_next): + def tb_set_next( + tb: TracebackType, tb_next: t.Optional[TracebackType] + ) -> TracebackType: tb.tb_next = tb_next return tb -elif PYPY: +elif platform.python_implementation() == "PyPy": # PyPy might have special support, and won't work with ctypes. try: - import tputil + import tputil # type: ignore except ImportError: # Without tproxy support, use the original traceback. - def tb_set_next(tb, tb_next): + def tb_set_next( + tb: TracebackType, tb_next: t.Optional[TracebackType] + ) -> TracebackType: return tb else: # With tproxy support, create a proxy around the traceback that # returns the new tb_next. - def tb_set_next(tb, tb_next): - def controller(op): + def tb_set_next( + tb: TracebackType, tb_next: t.Optional[TracebackType] + ) -> TracebackType: + def controller(op): # type: ignore if op.opname == "__getattribute__" and op.args[0] == "tb_next": return tb_next return op.delegate() - return tputil.make_proxy(controller, obj=tb) + return tputil.make_proxy(controller, obj=tb) # type: ignore else: @@ -250,7 +239,9 @@ class _CTraceback(ctypes.Structure): ("tb_next", ctypes.py_object), ] - def tb_set_next(tb, tb_next): + def tb_set_next( + tb: TracebackType, tb_next: t.Optional[TracebackType] + ) -> TracebackType: c_tb = _CTraceback.from_address(id(tb)) # Clear out the old tb_next. diff --git a/lib/spack/external/jinja2/defaults.py b/lib/spack/external/_vendoring/jinja2/defaults.py similarity index 74% rename from lib/spack/external/jinja2/defaults.py rename to lib/spack/external/_vendoring/jinja2/defaults.py index 8e0e7d77107..638cad3d2d8 100644 --- a/lib/spack/external/jinja2/defaults.py +++ b/lib/spack/external/_vendoring/jinja2/defaults.py @@ -1,5 +1,5 @@ -# -*- coding: utf-8 -*- -from ._compat import range_type +import typing as t + from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401 from .tests import TESTS as DEFAULT_TESTS # noqa: F401 from .utils import Cycler @@ -7,6 +7,9 @@ from .utils import Joiner from .utils import Namespace +if t.TYPE_CHECKING: + import typing_extensions as te + # defaults for the parser / lexer BLOCK_START_STRING = "{%" BLOCK_END_STRING = "%}" @@ -14,17 +17,17 @@ VARIABLE_END_STRING = "}}" COMMENT_START_STRING = "{#" COMMENT_END_STRING = "#}" -LINE_STATEMENT_PREFIX = None -LINE_COMMENT_PREFIX = None +LINE_STATEMENT_PREFIX: t.Optional[str] = None +LINE_COMMENT_PREFIX: t.Optional[str] = None TRIM_BLOCKS = False LSTRIP_BLOCKS = False -NEWLINE_SEQUENCE = "\n" +NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n" KEEP_TRAILING_NEWLINE = False # default filters, tests and namespace DEFAULT_NAMESPACE = { - "range": range_type, + "range": range, "dict": dict, "lipsum": generate_lorem_ipsum, "cycler": Cycler, @@ -33,10 +36,11 @@ } # default policies -DEFAULT_POLICIES = { +DEFAULT_POLICIES: t.Dict[str, t.Any] = { "compiler.ascii_str": True, "urlize.rel": "noopener", "urlize.target": None, + "urlize.extra_schemes": None, "truncate.leeway": 5, "json.dumps_function": None, "json.dumps_kwargs": {"sort_keys": True}, diff --git a/lib/spack/external/jinja2/environment.py b/lib/spack/external/_vendoring/jinja2/environment.py similarity index 56% rename from lib/spack/external/jinja2/environment.py rename to lib/spack/external/_vendoring/jinja2/environment.py index 8430390eeab..a231d9cd576 100644 --- a/lib/spack/external/jinja2/environment.py +++ b/lib/spack/external/_vendoring/jinja2/environment.py @@ -1,25 +1,20 @@ -# -*- coding: utf-8 -*- """Classes for managing templates and their runtime and compile time options. """ import os import sys +import typing +import typing as t import weakref +from collections import ChainMap +from functools import lru_cache from functools import partial from functools import reduce +from types import CodeType from markupsafe import Markup from . import nodes -from ._compat import encode_filename -from ._compat import implements_iterator -from ._compat import implements_to_string -from ._compat import iteritems -from ._compat import PY2 -from ._compat import PYPY -from ._compat import reraise -from ._compat import string_types -from ._compat import text_type from .compiler import CodeGenerator from .compiler import generate from .defaults import BLOCK_END_STRING @@ -44,25 +39,33 @@ from .exceptions import TemplateSyntaxError from .exceptions import UndefinedError from .lexer import get_lexer +from .lexer import Lexer from .lexer import TokenStream from .nodes import EvalContext from .parser import Parser from .runtime import Context from .runtime import new_context from .runtime import Undefined +from .utils import _PassArg from .utils import concat from .utils import consume -from .utils import have_async_gen from .utils import import_string from .utils import internalcode from .utils import LRUCache from .utils import missing +if t.TYPE_CHECKING: + import typing_extensions as te + from .bccache import BytecodeCache + from .ext import Extension + from .loaders import BaseLoader + +_env_bound = t.TypeVar("_env_bound", bound="Environment") + + # for direct template usage we have up to ten living environments -_spontaneous_environments = LRUCache(10) - - -def get_spontaneous_environment(cls, *args): +@lru_cache(maxsize=10) +def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound: """Return a new spontaneous environment. A spontaneous environment is used for templates created directly rather than through an existing environment. @@ -70,75 +73,74 @@ def get_spontaneous_environment(cls, *args): :param cls: Environment class to create. :param args: Positional arguments passed to environment. """ - key = (cls, args) - - try: - return _spontaneous_environments[key] - except KeyError: - _spontaneous_environments[key] = env = cls(*args) - env.shared = True - return env + env = cls(*args) + env.shared = True + return env -def create_cache(size): +def create_cache( + size: int, +) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]: """Return the cache class for the given size.""" if size == 0: return None + if size < 0: return {} - return LRUCache(size) + + return LRUCache(size) # type: ignore -def copy_cache(cache): +def copy_cache( + cache: t.Optional[t.MutableMapping], +) -> t.Optional[t.MutableMapping[t.Tuple[weakref.ref, str], "Template"]]: """Create an empty copy of the given cache.""" if cache is None: return None - elif type(cache) is dict: + + if type(cache) is dict: return {} - return LRUCache(cache.capacity) + + return LRUCache(cache.capacity) # type: ignore -def load_extensions(environment, extensions): +def load_extensions( + environment: "Environment", + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]], +) -> t.Dict[str, "Extension"]: """Load the extensions from the list and bind it to the environment. - Returns a dict of instantiated environments. + Returns a dict of instantiated extensions. """ result = {} + for extension in extensions: - if isinstance(extension, string_types): - extension = import_string(extension) + if isinstance(extension, str): + extension = t.cast(t.Type["Extension"], import_string(extension)) + result[extension.identifier] = extension(environment) + return result -def fail_for_missing_callable(string, name): - msg = string % name - if isinstance(name, Undefined): - try: - name._fail_with_undefined_error() - except Exception as e: - msg = "%s (%s; did you forget to quote the callable name?)" % (msg, e) - raise TemplateRuntimeError(msg) - - -def _environment_sanity_check(environment): +def _environment_config_check(environment: "Environment") -> "Environment": """Perform a sanity check on the environment.""" assert issubclass( environment.undefined, Undefined - ), "undefined must be a subclass of undefined because filters depend on it." + ), "'undefined' must be a subclass of 'jinja2.Undefined'." assert ( environment.block_start_string != environment.variable_start_string != environment.comment_start_string - ), "block, variable and comment start strings must be different" - assert environment.newline_sequence in ( + ), "block, variable and comment start strings must be different." + assert environment.newline_sequence in { "\r", "\r\n", "\n", - ), "newline_sequence set to unknown line ending string." + }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'." return environment -class Environment(object): +class Environment: r"""The core component of Jinja is the `Environment`. It contains important shared variables like configuration, filters, tests, globals and others. Instances of this class may be modified if @@ -256,9 +258,8 @@ class Environment(object): See :ref:`bytecode-cache` for more information. `enable_async` - If set to true this enables async template execution which allows - you to take advantage of newer Python features. This requires - Python 3.6 or later. + If set to true this enables async template execution which + allows using async functions and generators. """ #: if this environment is sandboxed. Modifying this variable won't make @@ -271,7 +272,7 @@ class Environment(object): overlayed = False #: the environment this environment is linked to if it is an overlay - linked_to = None + linked_to: t.Optional["Environment"] = None #: shared environments have this set to `True`. A shared environment #: must not be modified @@ -279,36 +280,38 @@ class Environment(object): #: the class that is used for code generation. See #: :class:`~jinja2.compiler.CodeGenerator` for more information. - code_generator_class = CodeGenerator + code_generator_class: t.Type["CodeGenerator"] = CodeGenerator - #: the context class thatis used for templates. See + #: the context class that is used for templates. See #: :class:`~jinja2.runtime.Context` for more information. - context_class = Context + context_class: t.Type[Context] = Context + + template_class: t.Type["Template"] def __init__( self, - block_start_string=BLOCK_START_STRING, - block_end_string=BLOCK_END_STRING, - variable_start_string=VARIABLE_START_STRING, - variable_end_string=VARIABLE_END_STRING, - comment_start_string=COMMENT_START_STRING, - comment_end_string=COMMENT_END_STRING, - line_statement_prefix=LINE_STATEMENT_PREFIX, - line_comment_prefix=LINE_COMMENT_PREFIX, - trim_blocks=TRIM_BLOCKS, - lstrip_blocks=LSTRIP_BLOCKS, - newline_sequence=NEWLINE_SEQUENCE, - keep_trailing_newline=KEEP_TRAILING_NEWLINE, - extensions=(), - optimized=True, - undefined=Undefined, - finalize=None, - autoescape=False, - loader=None, - cache_size=400, - auto_reload=True, - bytecode_cache=None, - enable_async=False, + block_start_string: str = BLOCK_START_STRING, + block_end_string: str = BLOCK_END_STRING, + variable_start_string: str = VARIABLE_START_STRING, + variable_end_string: str = VARIABLE_END_STRING, + comment_start_string: str = COMMENT_START_STRING, + comment_end_string: str = COMMENT_END_STRING, + line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, + line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, + trim_blocks: bool = TRIM_BLOCKS, + lstrip_blocks: bool = LSTRIP_BLOCKS, + newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, + keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), + optimized: bool = True, + undefined: t.Type[Undefined] = Undefined, + finalize: t.Optional[t.Callable[..., t.Any]] = None, + autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, + loader: t.Optional["BaseLoader"] = None, + cache_size: int = 400, + auto_reload: bool = True, + bytecode_cache: t.Optional["BytecodeCache"] = None, + enable_async: bool = False, ): # !!Important notice!! # The constructor accepts quite a few arguments that should be @@ -336,7 +339,7 @@ def __init__( self.keep_trailing_newline = keep_trailing_newline # runtime information - self.undefined = undefined + self.undefined: t.Type[Undefined] = undefined self.optimized = optimized self.finalize = finalize self.autoescape = autoescape @@ -358,52 +361,47 @@ def __init__( # load extensions self.extensions = load_extensions(self, extensions) - self.enable_async = enable_async - self.is_async = self.enable_async and have_async_gen - if self.is_async: - # runs patch_all() to enable async support - from . import asyncsupport # noqa: F401 + self.is_async = enable_async + _environment_config_check(self) - _environment_sanity_check(self) - - def add_extension(self, extension): + def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None: """Adds an extension after the environment was created. .. versionadded:: 2.5 """ self.extensions.update(load_extensions(self, [extension])) - def extend(self, **attributes): + def extend(self, **attributes: t.Any) -> None: """Add the items to the instance of the environment if they do not exist yet. This is used by :ref:`extensions ` to register callbacks and configuration values without breaking inheritance. """ - for key, value in iteritems(attributes): + for key, value in attributes.items(): if not hasattr(self, key): setattr(self, key, value) def overlay( self, - block_start_string=missing, - block_end_string=missing, - variable_start_string=missing, - variable_end_string=missing, - comment_start_string=missing, - comment_end_string=missing, - line_statement_prefix=missing, - line_comment_prefix=missing, - trim_blocks=missing, - lstrip_blocks=missing, - extensions=missing, - optimized=missing, - undefined=missing, - finalize=missing, - autoescape=missing, - loader=missing, - cache_size=missing, - auto_reload=missing, - bytecode_cache=missing, - ): + block_start_string: str = missing, + block_end_string: str = missing, + variable_start_string: str = missing, + variable_end_string: str = missing, + comment_start_string: str = missing, + comment_end_string: str = missing, + line_statement_prefix: t.Optional[str] = missing, + line_comment_prefix: t.Optional[str] = missing, + trim_blocks: bool = missing, + lstrip_blocks: bool = missing, + extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing, + optimized: bool = missing, + undefined: t.Type[Undefined] = missing, + finalize: t.Optional[t.Callable[..., t.Any]] = missing, + autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing, + loader: t.Optional["BaseLoader"] = missing, + cache_size: int = missing, + auto_reload: bool = missing, + bytecode_cache: t.Optional["BytecodeCache"] = missing, + ) -> "Environment": """Create a new overlay environment that shares all the data with the current environment except for cache and the overridden attributes. Extensions cannot be removed for an overlayed environment. An overlayed @@ -423,7 +421,7 @@ def overlay( rv.overlayed = True rv.linked_to = self - for key, value in iteritems(args): + for key, value in args.items(): if value is not missing: setattr(rv, key, value) @@ -433,25 +431,30 @@ def overlay( rv.cache = copy_cache(self.cache) rv.extensions = {} - for key, value in iteritems(self.extensions): + for key, value in self.extensions.items(): rv.extensions[key] = value.bind(rv) if extensions is not missing: rv.extensions.update(load_extensions(rv, extensions)) - return _environment_sanity_check(rv) + return _environment_config_check(rv) - lexer = property(get_lexer, doc="The lexer for this environment.") + @property + def lexer(self) -> Lexer: + """The lexer for this environment.""" + return get_lexer(self) - def iter_extensions(self): + def iter_extensions(self) -> t.Iterator["Extension"]: """Iterates over the extensions by priority.""" return iter(sorted(self.extensions.values(), key=lambda x: x.priority)) - def getitem(self, obj, argument): + def getitem( + self, obj: t.Any, argument: t.Union[str, t.Any] + ) -> t.Union[t.Any, Undefined]: """Get an item or attribute of an object but prefer the item.""" try: return obj[argument] except (AttributeError, TypeError, LookupError): - if isinstance(argument, string_types): + if isinstance(argument, str): try: attr = str(argument) except Exception: @@ -463,9 +466,9 @@ def getitem(self, obj, argument): pass return self.undefined(obj=obj, name=argument) - def getattr(self, obj, attribute): + def getattr(self, obj: t.Any, attribute: str) -> t.Any: """Get an item or attribute of an object but prefer the attribute. - Unlike :meth:`getitem` the attribute *must* be a bytestring. + Unlike :meth:`getitem` the attribute *must* be a string. """ try: return getattr(obj, attribute) @@ -476,51 +479,113 @@ def getattr(self, obj, attribute): except (TypeError, LookupError, AttributeError): return self.undefined(obj=obj, name=attribute) - def call_filter( - self, name, value, args=None, kwargs=None, context=None, eval_ctx=None - ): - """Invokes a filter on a value the same way the compiler does it. + def _filter_test_common( + self, + name: t.Union[str, Undefined], + value: t.Any, + args: t.Optional[t.Sequence[t.Any]], + kwargs: t.Optional[t.Mapping[str, t.Any]], + context: t.Optional[Context], + eval_ctx: t.Optional[EvalContext], + is_filter: bool, + ) -> t.Any: + if is_filter: + env_map = self.filters + type_name = "filter" + else: + env_map = self.tests + type_name = "test" - Note that on Python 3 this might return a coroutine in case the - filter is running from an environment in async mode and the filter - supports async execution. It's your responsibility to await this - if needed. + func = env_map.get(name) # type: ignore - .. versionadded:: 2.7 - """ - func = self.filters.get(name) if func is None: - fail_for_missing_callable("no filter named %r", name) - args = [value] + list(args or ()) - if getattr(func, "contextfilter", False) is True: + msg = f"No {type_name} named {name!r}." + + if isinstance(name, Undefined): + try: + name._fail_with_undefined_error() + except Exception as e: + msg = f"{msg} ({e}; did you forget to quote the callable name?)" + + raise TemplateRuntimeError(msg) + + args = [value, *(args if args is not None else ())] + kwargs = kwargs if kwargs is not None else {} + pass_arg = _PassArg.from_obj(func) + + if pass_arg is _PassArg.context: if context is None: raise TemplateRuntimeError( - "Attempted to invoke context filter without context" + f"Attempted to invoke a context {type_name} without context." ) + args.insert(0, context) - elif getattr(func, "evalcontextfilter", False) is True: + elif pass_arg is _PassArg.eval_context: if eval_ctx is None: if context is not None: eval_ctx = context.eval_ctx else: eval_ctx = EvalContext(self) - args.insert(0, eval_ctx) - elif getattr(func, "environmentfilter", False) is True: - args.insert(0, self) - return func(*args, **(kwargs or {})) - def call_test(self, name, value, args=None, kwargs=None): - """Invokes a test on a value the same way the compiler does it. + args.insert(0, eval_ctx) + elif pass_arg is _PassArg.environment: + args.insert(0, self) + + return func(*args, **kwargs) + + def call_filter( + self, + name: str, + value: t.Any, + args: t.Optional[t.Sequence[t.Any]] = None, + kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + context: t.Optional[Context] = None, + eval_ctx: t.Optional[EvalContext] = None, + ) -> t.Any: + """Invoke a filter on a value the same way the compiler does. + + This might return a coroutine if the filter is running from an + environment in async mode and the filter supports async + execution. It's your responsibility to await this if needed. .. versionadded:: 2.7 """ - func = self.tests.get(name) - if func is None: - fail_for_missing_callable("no test named %r", name) - return func(value, *(args or ()), **(kwargs or {})) + return self._filter_test_common( + name, value, args, kwargs, context, eval_ctx, True + ) + + def call_test( + self, + name: str, + value: t.Any, + args: t.Optional[t.Sequence[t.Any]] = None, + kwargs: t.Optional[t.Mapping[str, t.Any]] = None, + context: t.Optional[Context] = None, + eval_ctx: t.Optional[EvalContext] = None, + ) -> t.Any: + """Invoke a test on a value the same way the compiler does. + + This might return a coroutine if the test is running from an + environment in async mode and the test supports async execution. + It's your responsibility to await this if needed. + + .. versionchanged:: 3.0 + Tests support ``@pass_context``, etc. decorators. Added + the ``context`` and ``eval_ctx`` parameters. + + .. versionadded:: 2.7 + """ + return self._filter_test_common( + name, value, args, kwargs, context, eval_ctx, False + ) @internalcode - def parse(self, source, name=None, filename=None): + def parse( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> nodes.Template: """Parse the sourcecode and return the abstract syntax tree. This tree of nodes is used by the compiler to convert the template into executable source- or bytecode. This is useful for debugging or to @@ -534,11 +599,18 @@ def parse(self, source, name=None, filename=None): except TemplateSyntaxError: self.handle_exception(source=source) - def _parse(self, source, name, filename): + def _parse( + self, source: str, name: t.Optional[str], filename: t.Optional[str] + ) -> nodes.Template: """Internal parsing function used by `parse` and `compile`.""" - return Parser(self, source, name, encode_filename(filename)).parse() + return Parser(self, source, name, filename).parse() - def lex(self, source, name=None, filename=None): + def lex( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> t.Iterator[t.Tuple[int, str, str]]: """Lex the given sourcecode and return a generator that yields tokens as tuples in the form ``(lineno, token_type, value)``. This can be useful for :ref:`extension development ` @@ -548,13 +620,18 @@ def lex(self, source, name=None, filename=None): of the extensions to be applied you have to filter source through the :meth:`preprocess` method. """ - source = text_type(source) + source = str(source) try: return self.lexer.tokeniter(source, name, filename) except TemplateSyntaxError: self.handle_exception(source=source) - def preprocess(self, source, name=None, filename=None): + def preprocess( + self, + source: str, + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + ) -> str: """Preprocesses the source with all extensions. This is automatically called for all parsing and compiling methods but *not* for :meth:`lex` because there you usually only want the actual source tokenized. @@ -562,28 +639,43 @@ def preprocess(self, source, name=None, filename=None): return reduce( lambda s, e: e.preprocess(s, name, filename), self.iter_extensions(), - text_type(source), + str(source), ) - def _tokenize(self, source, name, filename=None, state=None): + def _tokenize( + self, + source: str, + name: t.Optional[str], + filename: t.Optional[str] = None, + state: t.Optional[str] = None, + ) -> TokenStream: """Called by the parser to do the preprocessing and filtering for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. """ source = self.preprocess(source, name, filename) stream = self.lexer.tokenize(source, name, filename, state) + for ext in self.iter_extensions(): - stream = ext.filter_stream(stream) + stream = ext.filter_stream(stream) # type: ignore + if not isinstance(stream, TokenStream): - stream = TokenStream(stream, name, filename) + stream = TokenStream(stream, name, filename) # type: ignore + return stream - def _generate(self, source, name, filename, defer_init=False): + def _generate( + self, + source: nodes.Template, + name: t.Optional[str], + filename: t.Optional[str], + defer_init: bool = False, + ) -> str: """Internal hook that can be overridden to hook a different generate method in. .. versionadded:: 2.5 """ - return generate( + return generate( # type: ignore source, self, name, @@ -592,16 +684,45 @@ def _generate(self, source, name, filename, defer_init=False): optimized=self.optimized, ) - def _compile(self, source, filename): + def _compile(self, source: str, filename: str) -> CodeType: """Internal hook that can be overridden to hook a different compile method in. .. versionadded:: 2.5 """ - return compile(source, filename, "exec") + return compile(source, filename, "exec") # type: ignore + + @typing.overload + def compile( # type: ignore + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: "te.Literal[False]" = False, + defer_init: bool = False, + ) -> CodeType: + ... + + @typing.overload + def compile( + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: "te.Literal[True]" = ..., + defer_init: bool = False, + ) -> str: + ... @internalcode - def compile(self, source, name=None, filename=None, raw=False, defer_init=False): + def compile( + self, + source: t.Union[str, nodes.Template], + name: t.Optional[str] = None, + filename: t.Optional[str] = None, + raw: bool = False, + defer_init: bool = False, + ) -> t.Union[str, CodeType]: """Compile a node or template source code. The `name` parameter is the load name of the template after it was joined using :meth:`join_path` if necessary, not the filename on the file system. @@ -623,7 +744,7 @@ def compile(self, source, name=None, filename=None, raw=False, defer_init=False) """ source_hint = None try: - if isinstance(source, string_types): + if isinstance(source, str): source_hint = source source = self._parse(source, name, filename) source = self._generate(source, name, filename, defer_init=defer_init) @@ -631,13 +752,13 @@ def compile(self, source, name=None, filename=None, raw=False, defer_init=False) return source if filename is None: filename = "