Compare commits

..

1 Commits

Author SHA1 Message Date
Gregory Becker
d6ce6d30b8 msvc: remove conjoined msvc/ifx detection
Signed-off-by: Gregory Becker <becker33@llnl.gov>
2025-03-27 13:37:25 -07:00
20387 changed files with 478797 additions and 509896 deletions

View File

@@ -28,7 +28,7 @@ max-line-length = 99
# - F821: undefined name `name` # - F821: undefined name `name`
# #
per-file-ignores = per-file-ignores =
var/spack/*/package.py:F403,F405,F821 var/spack/repos/*/package.py:F403,F405,F821
*-ci-package.py:F403,F405,F821 *-ci-package.py:F403,F405,F821
# exclude things we usually do not want linting for. # exclude things we usually do not want linting for.

3
.gitattributes vendored
View File

@@ -1,3 +1,4 @@
*.py diff=python *.py diff=python
*.lp linguist-language=Prolog
lib/spack/external/* linguist-vendored lib/spack/external/* linguist-vendored
*.bat text eol=crlf *.bat text eol=crlf

View File

@@ -59,6 +59,7 @@ jobs:
- name: Package audits (without coverage) - name: Package audits (without coverage)
if: ${{ runner.os == 'Windows' }} if: ${{ runner.os == 'Windows' }}
run: | run: |
. share/spack/setup-env.sh
spack -d audit packages spack -d audit packages
./share/spack/qa/validate_last_exit.ps1 ./share/spack/qa/validate_last_exit.ps1
spack -d audit configs spack -d audit configs

View File

@@ -26,7 +26,7 @@ jobs:
dnf install -y \ dnf install -y \
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \ bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
make patch unzip which xz python3 python3-devel tree \ make patch unzip which xz python3 python3-devel tree \
cmake bison bison-devel libstdc++-static gawk cmake bison bison-devel libstdc++-static
- name: Setup OpenSUSE - name: Setup OpenSUSE
if: ${{ matrix.image == 'opensuse/leap:latest' }} if: ${{ matrix.image == 'opensuse/leap:latest' }}
run: | run: |

View File

@@ -42,17 +42,17 @@ jobs:
# built-in repository or documentation # built-in repository or documentation
filters: | filters: |
bootstrap: bootstrap:
- 'var/spack/repos/spack_repo/builtin/packages/clingo-bootstrap/**' - 'var/spack/repos/builtin/packages/clingo-bootstrap/**'
- 'var/spack/repos/spack_repo/builtin/packages/clingo/**' - 'var/spack/repos/builtin/packages/clingo/**'
- 'var/spack/repos/spack_repo/builtin/packages/python/**' - 'var/spack/repos/builtin/packages/python/**'
- 'var/spack/repos/spack_repo/builtin/packages/re2c/**' - 'var/spack/repos/builtin/packages/re2c/**'
- 'var/spack/repos/spack_repo/builtin/packages/gnupg/**' - 'var/spack/repos/builtin/packages/gnupg/**'
- 'var/spack/repos/spack_repo/builtin/packages/libassuan/**' - 'var/spack/repos/builtin/packages/libassuan/**'
- 'var/spack/repos/spack_repo/builtin/packages/libgcrypt/**' - 'var/spack/repos/builtin/packages/libgcrypt/**'
- 'var/spack/repos/spack_repo/builtin/packages/libgpg-error/**' - 'var/spack/repos/builtin/packages/libgpg-error/**'
- 'var/spack/repos/spack_repo/builtin/packages/libksba/**' - 'var/spack/repos/builtin/packages/libksba/**'
- 'var/spack/repos/spack_repo/builtin/packages/npth/**' - 'var/spack/repos/builtin/packages/npth/**'
- 'var/spack/repos/spack_repo/builtin/packages/pinentry/**' - 'var/spack/repos/builtin/packages/pinentry/**'
- 'lib/spack/**' - 'lib/spack/**'
- 'share/spack/**' - 'share/spack/**'
- '.github/workflows/bootstrap.yml' - '.github/workflows/bootstrap.yml'

View File

@@ -6,7 +6,6 @@ on:
jobs: jobs:
# Check we don't make the situation with circular imports worse # Check we don't make the situation with circular imports worse
import-check: import-check:
continue-on-error: true
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: julia-actions/setup-julia@v2 - uses: julia-actions/setup-julia@v2

View File

@@ -25,16 +25,14 @@ jobs:
with: with:
python-version: '3.13' python-version: '3.13'
cache: 'pip' cache: 'pip'
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
- name: Install Python Packages - name: Install Python Packages
run: | run: |
pip install --upgrade pip setuptools
pip install -r .github/workflows/requirements/style/requirements.txt pip install -r .github/workflows/requirements/style/requirements.txt
- name: vermin (Spack's Core) - name: vermin (Spack's Core)
run: | run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
- name: vermin (Repositories) - name: vermin (Repositories)
run: | run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos var/spack/test_repos
# Run style checks on the files that have been changed # Run style checks on the files that have been changed
style: style:
@@ -42,20 +40,23 @@ jobs:
steps: steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with: with:
fetch-depth: 2 fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
with: with:
python-version: '3.13' python-version: '3.13'
cache: 'pip' cache: 'pip'
cache-dependency-path: '.github/workflows/requirements/style/requirements.txt'
- name: Install Python packages - name: Install Python packages
run: | run: |
pip install --upgrade pip setuptools
pip install -r .github/workflows/requirements/style/requirements.txt pip install -r .github/workflows/requirements/style/requirements.txt
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.
git --version
. .github/workflows/bin/setup_git.sh
- name: Run style tests - name: Run style tests
run: | run: |
bin/spack style --base HEAD^1 share/spack/qa/run-style-tests
bin/spack license verify
pylint -j $(nproc) --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
audit: audit:
uses: ./.github/workflows/audit.yaml uses: ./.github/workflows/audit.yaml
@@ -65,11 +66,7 @@ jobs:
python_version: '3.13' python_version: '3.13'
verify-checksums: verify-checksums:
# do not run if the commit message or PR description contains [skip-verify-checksums] if: ${{ inputs.with_packages == 'true' }}
if: >-
${{ inputs.with_packages == 'true' &&
!contains(github.event.pull_request.body, '[skip-verify-checksums]') &&
!contains(github.event.head_commit.message, '[skip-verify-checksums]') }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
@@ -106,3 +103,21 @@ jobs:
spack -d bootstrap now --dev spack -d bootstrap now --dev
spack -d style -t black spack -d style -t black
spack unit-test -V spack unit-test -V
# Further style checks from pylint
pylint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 0
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
with:
python-version: '3.13'
cache: 'pip'
- name: Install Python packages
run: |
pip install --upgrade pip setuptools pylint
- name: Pylint (Spack Core)
run: |
pylint -j 4 --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib

View File

@@ -1,8 +1,7 @@
black==25.1.0 black==25.1.0
clingo==5.8.0 clingo==5.7.1
flake8==7.2.0 flake8==7.1.2
isort==6.0.1 isort==6.0.1
mypy==1.15.0 mypy==1.15.0
types-six==1.17.0.20250403 types-six==1.17.0.20250304
vermin==1.6.0 vermin==1.6.0
pylint==3.3.7

View File

@@ -1,37 +0,0 @@
name: sync with spack/spack-packages
on:
push:
branches:
- develop
jobs:
sync:
if: github.repository == 'spack/spack'
runs-on: ubuntu-latest
steps:
- name: Checkout spack/spack
run: git clone https://github.com/spack/spack.git
- name: Checkout spack/spack-packages
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
ssh-key: ${{ secrets.SYNC_PACKAGES_KEY }}
path: spack-packages
repository: spack/spack-packages
- name: Install git-filter-repo
run: |
curl -LfsO https://raw.githubusercontent.com/newren/git-filter-repo/refs/tags/v2.47.0/git-filter-repo
echo "67447413e273fc76809289111748870b6f6072f08b17efe94863a92d810b7d94 git-filter-repo" | sha256sum -c -
chmod +x git-filter-repo
sudo mv git-filter-repo /usr/local/bin/
- name: Sync spack/spack-packages with spack/spack
run: |
cd spack-packages
git-filter-repo --quiet --source ../spack \
--path var/spack/repos/ --path-rename var/spack/repos/:python/ \
--path share/spack/gitlab/cloud_pipelines/ --path-rename share/spack/gitlab/cloud_pipelines/:.ci/gitlab/ \
--refs develop
- name: Push
run: |
cd spack-packages
git push git@github.com:spack/spack-packages.git develop:develop --force

View File

@@ -19,6 +19,9 @@ jobs:
on_develop: on_develop:
- ${{ github.ref == 'refs/heads/develop' }} - ${{ github.ref == 'refs/heads/develop' }}
include: include:
- python-version: '3.6'
os: ubuntu-20.04
on_develop: ${{ github.ref == 'refs/heads/develop' }}
- python-version: '3.7' - python-version: '3.7'
os: ubuntu-22.04 os: ubuntu-22.04
on_develop: ${{ github.ref == 'refs/heads/develop' }} on_develop: ${{ github.ref == 'refs/heads/develop' }}

View File

@@ -46,42 +46,18 @@ See the
[Feature Overview](https://spack.readthedocs.io/en/latest/features.html) [Feature Overview](https://spack.readthedocs.io/en/latest/features.html)
for examples and highlights. for examples and highlights.
Installation To install spack and your first package, make sure you have Python & Git.
----------------
To install spack, first make sure you have Python & Git.
Then: Then:
```bash $ git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git
git clone -c feature.manyFiles=true --depth=2 https://github.com/spack/spack.git $ cd spack/bin
``` $ ./spack install zlib
<details>
<summary>What are <code>manyFiles=true</code> and <code>--depth=2</code>?</summary>
<br>
> [!TIP]
> `-c feature.manyFiles=true` improves git's performance on repositories with 1,000+ files. > `-c feature.manyFiles=true` improves git's performance on repositories with 1,000+ files.
> >
> `--depth=2` prunes the git history to reduce the size of the Spack installation. > `--depth=2` prunes the git history to reduce the size of the Spack installation.
</details>
```bash
# For bash/zsh/sh
. spack/share/spack/setup-env.sh
# For tcsh/csh
source spack/share/spack/setup-env.csh
# For fish
. spack/share/spack/setup-env.fish
```
```bash
# Now you're ready to install a package!
spack install zlib-ng
```
Documentation Documentation
---------------- ----------------

View File

@@ -90,9 +90,10 @@ config:
misc_cache: $user_cache_path/cache misc_cache: $user_cache_path/cache
# Abort downloads after this many seconds if not data is received. # Timeout in seconds used for downloading sources etc. This only applies
# Setting this to 0 will disable the timeout. # to the connection phase and can be increased for slow connections or
connect_timeout: 30 # servers. 0 means no timeout.
connect_timeout: 10
# If this is false, tools like curl that use SSL will not verify # If this is false, tools like curl that use SSL will not verify

View File

@@ -25,8 +25,6 @@ packages:
glu: [apple-glu] glu: [apple-glu]
unwind: [apple-libunwind] unwind: [apple-libunwind]
uuid: [apple-libuuid] uuid: [apple-libuuid]
apple-clang:
buildable: false
apple-gl: apple-gl:
buildable: false buildable: false
externals: externals:

View File

@@ -1,4 +0,0 @@
include:
- path: "${platform}"
optional: true
- path: base

View File

@@ -72,8 +72,6 @@ packages:
permissions: permissions:
read: world read: world
write: user write: user
cce:
buildable: false
cray-fftw: cray-fftw:
buildable: false buildable: false
cray-libsci: cray-libsci:
@@ -88,23 +86,13 @@ packages:
buildable: false buildable: false
essl: essl:
buildable: false buildable: false
fj:
buildable: false
fujitsu-mpi: fujitsu-mpi:
buildable: false buildable: false
fujitsu-ssl2: fujitsu-ssl2:
buildable: false buildable: false
glibc:
buildable: false
hpcx-mpi: hpcx-mpi:
buildable: false buildable: false
iconv:
prefer: [libiconv]
mpt: mpt:
buildable: false buildable: false
musl:
buildable: false
spectrum-mpi: spectrum-mpi:
buildable: false buildable: false
xl:
buildable: false

View File

@@ -11,4 +11,4 @@
# ~/.spack/repos.yaml # ~/.spack/repos.yaml
# ------------------------------------------------------------------------- # -------------------------------------------------------------------------
repos: repos:
- $spack/var/spack/repos/spack_repo/builtin - $spack/var/spack/repos/builtin

View File

@@ -20,8 +20,3 @@ packages:
cxx: [msvc] cxx: [msvc]
mpi: [msmpi] mpi: [msmpi]
gl: [wgl] gl: [wgl]
mpi:
require:
- one_of: [msmpi]
msvc:
buildable: false

View File

@@ -276,7 +276,7 @@ remove dependent packages *before* removing their dependencies or use the
Garbage collection Garbage collection
^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^
When Spack builds software from sources, it often installs tools that are needed When Spack builds software from sources, if often installs tools that are needed
just to build or test other software. These are not necessary at runtime. just to build or test other software. These are not necessary at runtime.
To support cases where removing these tools can be a benefit Spack provides To support cases where removing these tools can be a benefit Spack provides
the ``spack gc`` ("garbage collector") command, which will uninstall all unneeded packages: the ``spack gc`` ("garbage collector") command, which will uninstall all unneeded packages:
@@ -1291,61 +1291,55 @@ based on site policies.
Variants Variants
^^^^^^^^ ^^^^^^^^
Variants are named options associated with a particular package and are Variants are named options associated with a particular package. They are
typically used to enable or disable certain features at build time. They optional, as each package must provide default values for each variant it
are optional, as each package must provide default values for each variant makes available. Variants can be specified using
it makes available. a flexible parameter syntax ``name=<value>``. For example,
``spack install mercury debug=True`` will install mercury built with debug
The names of variants available for a particular package depend on flags. The names of particular variants available for a package depend on
what was provided by the package author. ``spack info <package>`` will what was provided by the package author. ``spack info <package>`` will
provide information on what build variants are available. provide information on what build variants are available.
There are different types of variants: For compatibility with earlier versions, variants which happen to be
boolean in nature can be specified by a syntax that represents turning
options on and off. For example, in the previous spec we could have
supplied ``mercury +debug`` with the same effect of enabling the debug
compile time option for the libelf package.
1. Boolean variants. Typically used to enable or disable a feature at Depending on the package a variant may have any default value. For
compile time. For example, a package might have a ``debug`` variant that ``mercury`` here, ``debug`` is ``False`` by default, and we turned it on
can be explicitly enabled with ``+debug`` and disabled with ``~debug``. with ``debug=True`` or ``+debug``. If a variant is ``True`` by default
2. Single-valued variants. Often used to set defaults. For example, a package you can turn it off by either adding ``-name`` or ``~name`` to the spec.
might have a ``compression`` variant that determines the default
compression algorithm, which users could set to ``compression=gzip`` or
``compression=zstd``.
3. Multi-valued variants. A package might have a ``fabrics`` variant that
determines which network fabrics to support. Users could set this to
``fabrics=verbs,ofi`` to enable both InfiniBand verbs and OpenFabrics
interfaces. The values are separated by commas.
The meaning of ``fabrics=verbs,ofi`` is to enable *at least* the specified There are two syntaxes here because, depending on context, ``~`` and
fabrics, but other fabrics may be enabled as well. If the intent is to ``-`` may mean different things. In most shells, the following will
enable *only* the specified fabrics, then the ``fabrics:=verbs,ofi`` result in the shell performing home directory substitution:
syntax should be used with the ``:=`` operator.
.. note:: .. code-block:: sh
In certain shells, the the ``~`` character is expanded to the home mpileaks ~debug # shell may try to substitute this!
directory. To avoid these issues, avoid whitespace between the package mpileaks~debug # use this instead
name and the variant:
.. code-block:: sh If there is a user called ``debug``, the ``~`` will be incorrectly
expanded. In this situation, you would want to write ``libelf
-debug``. However, ``-`` can be ambiguous when included after a
package name without spaces:
mpileaks ~debug # shell may try to substitute this! .. code-block:: sh
mpileaks~debug # use this instead
Alternatively, you can use the ``-`` character to disable a variant, mpileaks-debug # wrong!
but be aware that this requires a space between the package name and mpileaks -debug # right
the variant:
.. code-block:: sh Spack allows the ``-`` character to be part of package names, so the
above will be interpreted as a request for the ``mpileaks-debug``
package, not a request for ``mpileaks`` built without ``debug``
options. In this scenario, you should write ``mpileaks~debug`` to
avoid ambiguity.
mpileaks-debug # wrong: refers to a package named "mpileaks-debug" When spack normalizes specs, it prints them out with no spaces boolean
mpileaks -debug # right: refers to a package named mpileaks with debug disabled variants using the backwards compatibility syntax and uses only ``~``
for disabled boolean variants. The ``-`` and spaces on the command
As a last resort, ``debug=False`` can also be used to disable a boolean variant. line are provided for convenience and legibility.
"""""""""""""""""""""""""""""""""""
Variant propagation to dependencies
"""""""""""""""""""""""""""""""""""
Spack allows variants to propagate their value to the package's Spack allows variants to propagate their value to the package's
dependency by using ``++``, ``--``, and ``~~`` for boolean variants. dependency by using ``++``, ``--``, and ``~~`` for boolean variants.
@@ -1415,29 +1409,27 @@ that executables will run without the need to set ``LD_LIBRARY_PATH``.
.. code-block:: yaml .. code-block:: yaml
packages: compilers:
gcc: - compiler:
externals: spec: gcc@4.9.3
- spec: gcc@4.9.3 paths:
prefix: /opt/gcc cc: /opt/gcc/bin/gcc
extra_attributes: c++: /opt/gcc/bin/g++
compilers: f77: /opt/gcc/bin/gfortran
c: /opt/gcc/bin/gcc fc: /opt/gcc/bin/gfortran
cxx: /opt/gcc/bin/g++ environment:
fortran: /opt/gcc/bin/gfortran unset:
environment: - BAD_VARIABLE
unset: set:
- BAD_VARIABLE GOOD_VARIABLE_NUM: 1
set: GOOD_VARIABLE_STR: good
GOOD_VARIABLE_NUM: 1 prepend_path:
GOOD_VARIABLE_STR: good PATH: /path/to/binutils
prepend_path: append_path:
PATH: /path/to/binutils LD_LIBRARY_PATH: /opt/gcc/lib
append_path: extra_rpaths:
LD_LIBRARY_PATH: /opt/gcc/lib - /path/to/some/compiler/runtime/directory
extra_rpaths: - /path/to/some/other/compiler/runtime/directory
- /path/to/some/compiler/runtime/directory
- /path/to/some/other/compiler/runtime/directory
^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^
@@ -1916,7 +1908,7 @@ diagnostics. Issues, if found, are reported to stdout:
PKG-DIRECTIVES: 1 issue found PKG-DIRECTIVES: 1 issue found
1. lammps: wrong variant in "conflicts" directive 1. lammps: wrong variant in "conflicts" directive
the variant 'adios' does not exist the variant 'adios' does not exist
in /home/spack/spack/var/spack/repos/spack_repo/builtin/packages/lammps/package.py in /home/spack/spack/var/spack/repos/builtin/packages/lammps/package.py
------------ ------------

View File

@@ -45,14 +45,10 @@ provided binary cache, which can be a local directory or a remote URL.
Here is an example where a build cache is created in a local directory named Here is an example where a build cache is created in a local directory named
"spack-cache", to which we push the "ninja" spec: "spack-cache", to which we push the "ninja" spec:
ninja-1.12.1-vmvycib6vmiofkdqgrblo7zsvp7odwut
.. code-block:: console .. code-block:: console
$ spack buildcache push ./spack-cache ninja $ spack buildcache push ./spack-cache ninja
==> Selected 30 specs to push to file:///home/spackuser/spack/spack-cache ==> Pushing binary packages to file:///home/spackuser/spack/spack-cache/build_cache
...
==> [30/30] Pushed ninja@1.12.1/ngldn2k
Note that ``ninja`` must be installed locally for this to work. Note that ``ninja`` must be installed locally for this to work.
@@ -89,7 +85,7 @@ You can see that the mirror is added with ``spack mirror list`` as follows:
spack-public https://spack-llnl-mirror.s3-us-west-2.amazonaws.com/ spack-public https://spack-llnl-mirror.s3-us-west-2.amazonaws.com/
At this point, you've created a buildcache, but Spack hasn't indexed it, so if At this point, you've create a buildcache, but spack hasn't indexed it, so if
you run ``spack buildcache list`` you won't see any results. You need to index you run ``spack buildcache list`` you won't see any results. You need to index
this new build cache as follows: this new build cache as follows:
@@ -102,10 +98,9 @@ Now you can use list:
.. code-block:: console .. code-block:: console
$ spack buildcache list $ spack buildcache list
==> 24 cached builds. ==> 1 cached build.
-- linux-ubuntu22.04-sapphirerapids / gcc@12.3.0 ---------------- -- linux-ubuntu20.04-skylake / gcc@9.3.0 ------------------------
[ ... ] ninja@1.10.2
ninja@1.12.1
With ``mymirror`` configured and an index available, Spack will automatically With ``mymirror`` configured and an index available, Spack will automatically
use it during concretization and installation. That means that you can expect use it during concretization and installation. That means that you can expect
@@ -116,17 +111,17 @@ verify by re-installing ninja:
$ spack uninstall ninja $ spack uninstall ninja
$ spack install ninja $ spack install ninja
[ ... ] ==> Installing ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
==> Installing ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh [24/24] ==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spec.json.sig
gpg: Signature made Thu 06 Mar 2025 10:03:38 AM MST gpg: Signature made Do 12 Jan 2023 16:01:04 CET
gpg: using RSA key 75BC0528114909C076E2607418010FFAD73C9B07 gpg: using RSA key 61B82B2B2350E171BD17A1744E3A689061D57BF6
gpg: Good signature from "example (GPG created for Spack) <example@example.com>" [ultimate] gpg: Good signature from "example (GPG created for Spack) <example@example.com>" [ultimate]
==> Fetching file:///home/spackuser/spack/spack-cache/blobs/sha256/f0/f08eb62661ad159d2d258890127fc6053f5302a2f490c1c7f7bd677721010ee0 ==> Fetching file:///home/spackuser/spack/spack-cache/build_cache/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.10.2/linux-ubuntu20.04-skylake-gcc-9.3.0-ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz.spack
==> Fetching file:///home/spackuser/spack/spack-cache/blobs/sha256/c7/c79ac6e40dfdd01ac499b020e52e57aa91151febaea3ad183f90c0f78b64a31a ==> Extracting ninja-1.10.2-yxferyhmrjkosgta5ei6b4lqf6bxbscz from binary cache
==> Extracting ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh from binary cache ==> ninja: Successfully installed ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
==> ninja: Successfully installed ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh Search: 0.00s. Fetch: 0.17s. Install: 0.12s. Total: 0.29s
Search: 0.00s. Fetch: 0.11s. Install: 0.11s. Extract: 0.10s. Relocate: 0.00s. Total: 0.22s [+] /home/harmen/spack/opt/spack/linux-ubuntu20.04-skylake/gcc-9.3.0/ninja-1.11.1-yxferyhmrjkosgta5ei6b4lqf6bxbscz
[+] /home/spackuser/spack/opt/spack/linux-ubuntu22.04-sapphirerapids/gcc-12.3.0/ninja-1.12.1-ngldn2kpvb6lqc44oqhhow7fzg7xu7lh
It worked! You've just completed a full example of creating a build cache with It worked! You've just completed a full example of creating a build cache with
a spec of interest, adding it as a mirror, updating its index, listing the contents, a spec of interest, adding it as a mirror, updating its index, listing the contents,
@@ -318,7 +313,7 @@ other system dependencies. However, they are still compatible with tools like
``skopeo``, ``podman``, and ``docker`` for pulling and pushing. ``skopeo``, ``podman``, and ``docker`` for pulling and pushing.
.. note:: .. note::
The Docker ``overlayfs2`` storage driver is limited to 128 layers, above which a The docker ``overlayfs2`` storage driver is limited to 128 layers, above which a
``max depth exceeded`` error may be produced when pulling the image. There ``max depth exceeded`` error may be produced when pulling the image. There
are `alternative drivers <https://docs.docker.com/storage/storagedriver/>`_. are `alternative drivers <https://docs.docker.com/storage/storagedriver/>`_.
@@ -349,18 +344,19 @@ which lets you get started quickly. See the following resources for more informa
^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^
Create tarball of installed Spack package and all dependencies. Create tarball of installed Spack package and all dependencies.
Tarballs and specfiles are compressed and checksummed, manifests are signed if gpg2 is available. Tarballs are checksummed and signed if gpg2 is available.
Commands like ``spack buildcache install`` will search Spack mirrors to get the list of build caches. Places them in a directory ``build_cache`` that can be copied to a mirror.
Commands like ``spack buildcache install`` will search Spack mirrors for build_cache to get the list of build caches.
============== ======================================================================================================================== ============== ========================================================================================================================
Arguments Description Arguments Description
============== ======================================================================================================================== ============== ========================================================================================================================
``<specs>`` list of partial specs or hashes with a leading ``/`` to match from installed packages and used for creating build caches ``<specs>`` list of partial specs or hashes with a leading ``/`` to match from installed packages and used for creating build caches
``-d <path>`` directory in which ``v3`` and ``blobs`` directories are created, defaults to ``.`` ``-d <path>`` directory in which ``build_cache`` directory is created, defaults to ``.``
``-f`` overwrite compressed tarball and spec metadata files if they already exist ``-f`` overwrite ``.spack`` file in ``build_cache`` directory if it exists
``-k <key>`` the key to sign package with. In the case where multiple keys exist, the package will be unsigned unless ``-k`` is used. ``-k <key>`` the key to sign package with. In the case where multiple keys exist, the package will be unsigned unless ``-k`` is used.
``-r`` make paths in binaries relative before creating tarball ``-r`` make paths in binaries relative before creating tarball
``-y`` answer yes to all questions about creating unsigned build caches ``-y`` answer yes to all create unsigned ``build_cache`` questions
============== ======================================================================================================================== ============== ========================================================================================================================
^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -401,165 +397,6 @@ List public keys available on Spack mirror.
========= ============================================== ========= ==============================================
Arguments Description Arguments Description
========= ============================================== ========= ==============================================
``-it`` trust the keys downloaded with prompt for each ``-i`` trust the keys downloaded with prompt for each
``-y`` answer yes to all trust all keys downloaded ``-y`` answer yes to all trust all keys downloaded
========= ============================================== ========= ==============================================
.. _build_cache_layout:
------------------
Build Cache Layout
------------------
This section describes the structure and content of URL-style build caches, as
distinguished from OCI-style build caches.
The entry point for a binary package is a manifest json file that points to at
least two other files stored as content-addressed blobs. These files include a spec
metadata file, as well as the installation directory of the package stored as
a compressed archive file. Binary package manifest files are named to indicate
the package name and version, as well as the hash of the concrete spec. For
example::
gcc-runtime-12.3.0-qyu2lvgt3nxh7izxycugdbgf5gsdpkjt.spec.manifest.json
would contain the manifest for a binary package of ``gcc-runtime@12.3.0``.
The id of the built package is defined to be the DAG hash of the concrete spec,
and exists in the name of the file as well. The id distinguishes a particular
binary package from all other binary packages with the same package name and
version. Below is an example binary package manifest file. Such a file would
live in the versioned spec manifests directory of a binary mirror, for example
``v3/manifests/spec/``::
{
"version": 3,
"data": [
{
"contentLength": 10731083,
"mediaType": "application/vnd.spack.install.v2.tar+gzip",
"compression": "gzip",
"checksumAlgorithm": "sha256",
"checksum": "0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210"
},
{
"contentLength": 1000,
"mediaType": "application/vnd.spack.spec.v5+json",
"compression": "gzip",
"checksumAlgorithm": "sha256",
"checksum": "fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041"
}
]
}
The manifest points to both the compressed tar file as well as the compressed
spec metadata file, and contains the checksum of each. This checksum
is also used as the address of the associated file, and hence, must be
known in order to locate the tarball or spec file within the mirror. Once the
tarball or spec metadata file is downloaded, the checksum should be computed locally
and compared to the checksum in the manifest to ensure the contents have not changed
since the binary package was pushed. Spack stores all data files (including compressed
tar files, spec metadata, indices, public keys, etc) within a ``blobs/<hash-algorithm>/``
directory, using the first two characters of the checksum as a sub-directory
to reduce the number files in a single folder. Here is a depiction of the
organization of binary mirror contents::
mirror_directory/
v3/
layout.json
manifests/
spec/
gcc-runtime/
gcc-runtime-12.3.0-s2nqujezsce4x6uhtvxscu7jhewqzztx.spec.manifest.json
gmake/
gmake-4.4.1-lpr4j77rcgkg5536tmiuzwzlcjsiomph.spec.manifest.json
compiler-wrapper/
compiler-wrapper-1.0-s7ieuyievp57vwhthczhaq2ogowf3ohe.spec.manifest.json
index/
index.manifest.json
key/
75BC0528114909C076E2607418010FFAD73C9B07.key.manifest.json
keys.manifest.json
blobs/
sha256/
0f/
0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210
fb/
fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041
2a/
2a21836d206ccf0df780ab0be63fdf76d24501375306a35daa6683c409b7922f
...
Files within the ``manifests`` directory are organized into subdirectories by
the type of entity they represent. Binary package manifests live in the ``spec/``
directory, binary cache index manifests live in the ``index/`` directory, and
manifests for public keys and their indices live in the ``key/`` subdirectory.
Regardless of the type of entity they represent, all manifest files are named
with an extension ``.manifest.json``.
Every manifest contains a ``data`` array, each element of which refers to an
associated file stored a content-addressed blob. Considering the example spec
manifest shown above, the compressed installation archive can be found by
picking out the data blob with the appropriate ``mediaType``, which in this
case would be ``application/vnd.spack.install.v1.tar+gzip``. The associated
file is found by looking in the blobs directory under ``blobs/sha256/fb/`` for
the file named with the complete checksum value.
As mentioned above, every entity in a binary mirror (aka build cache) is stored
as a content-addressed blob pointed to by a manifest. While an example spec
manifest (i.e. a manifest for a binary package) is shown above, here is what
the manifest of a build cache index looks like::
{
"version": 3,
"data": [
{
"contentLength": 6411,
"mediaType": "application/vnd.spack.db.v8+json",
"compression": "none",
"checksumAlgorithm": "sha256",
"checksum": "225a3e9da24d201fdf9d8247d66217f5b3f4d0fc160db1498afd998bfd115234"
}
]
}
Some things to note about this manifest are that it points to a blob that is not
compressed (``compression: "none"``), and that the ``mediaType`` is one we have
not seen yet, ``application/vnd.spack.db.v8+json``. The decision not to compress
build cache indices stems from the fact that spack does not yet sign build cache
index manifests. Once that changes, you may start to see these indices stored as
compressed blobs.
For completeness, here are examples of manifests for the other two types of entities
you might find in a spack build cache. First a public key manifest::
{
"version": 3,
"data": [
{
"contentLength": 2472,
"mediaType": "application/pgp-keys",
"compression": "none",
"checksumAlgorithm": "sha256",
"checksum": "9fc18374aebc84deb2f27898da77d4d4410e5fb44c60c6238cb57fb36147e5c7"
}
]
}
Note the ``mediaType`` of ``application/pgp-keys``. Finally, a public key index manifest::
{
"version": 3,
"data": [
{
"contentLength": 56,
"mediaType": "application/vnd.spack.keyindex.v1+json",
"compression": "none",
"checksumAlgorithm": "sha256",
"checksum": "29b3a0eb6064fd588543bc43ac7d42d708a69058dafe4be0859e3200091a9a1c"
}
]
}
Again note the ``mediaType`` of ``application/vnd.spack.keyindex.v1+json``. Also note
that both the above manifest examples refer to uncompressed blobs, this is for the same
reason spack does not yet compress build cache index blobs.

View File

@@ -14,7 +14,7 @@ is an entire command dedicated to the management of every aspect of bootstrappin
.. command-output:: spack bootstrap --help .. command-output:: spack bootstrap --help
Spack is configured to bootstrap its dependencies lazily by default; i.e., the first time they are needed and Spack is configured to bootstrap its dependencies lazily by default; i.e. the first time they are needed and
can't be found. You can readily check if any prerequisite for using Spack is missing by running: can't be found. You can readily check if any prerequisite for using Spack is missing by running:
.. code-block:: console .. code-block:: console
@@ -36,8 +36,8 @@ can't be found. You can readily check if any prerequisite for using Spack is mis
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg`` In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
are missing and it's giving detailed information on why they are needed and whether are missing and it's giving detailed information on why they are needed and whether
they can be bootstrapped. The return code of this command summarizes the results; if any they can be bootstrapped. The return code of this command summarizes the results, if any
dependencies are missing, the return code is ``1``, otherwise ``0``. Running a command that dependencies are missing the return code is ``1``, otherwise ``0``. Running a command that
concretizes a spec, like: concretizes a spec, like:
.. code-block:: console .. code-block:: console

View File

@@ -66,7 +66,7 @@ on these ideas for each distinct build system that Spack supports:
build_systems/rocmpackage build_systems/rocmpackage
build_systems/sourceforgepackage build_systems/sourceforgepackage
For reference, the :py:mod:`Build System API docs <spack_repo.builtin.build_systems>` For reference, the :py:mod:`Build System API docs <spack.build_systems>`
provide a list of build systems and methods/attributes that can be provide a list of build systems and methods/attributes that can be
overridden. If you are curious about the implementation of a particular overridden. If you are curious about the implementation of a particular
build system, you can view the source code by running: build system, you can view the source code by running:
@@ -83,14 +83,14 @@ packages. You can quickly find examples by running:
.. code-block:: console .. code-block:: console
$ cd var/spack/repos/spack_repo/builtin/packages $ cd var/spack/repos/builtin/packages
$ grep -l QMakePackage */package.py $ grep -l QMakePackage */package.py
You can then view these packages with ``spack edit``. You can then view these packages with ``spack edit``.
This guide is intended to supplement the This guide is intended to supplement the
:py:mod:`Build System API docs <spack_repo.builtin.build_systems>` with examples of :py:mod:`Build System API docs <spack.build_systems>` with examples of
how to override commonly used methods. It also provides rules of thumb how to override commonly used methods. It also provides rules of thumb
and suggestions for package developers who are unfamiliar with a and suggestions for package developers who are unfamiliar with a
particular build system. particular build system.

View File

@@ -27,10 +27,10 @@ it could use the ``require`` directive as follows:
Spack has a number of built-in bundle packages, such as: Spack has a number of built-in bundle packages, such as:
* `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_ * `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_
* `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_ * `EcpProxyApps <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_
* `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/libc/package.py>`_ * `Libc <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/libc/package.py>`_
* `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/xsdk/package.py>`_ * `Xsdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/xsdk/package.py>`_
where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and where ``Xsdk`` also inherits from ``CudaPackage`` and ``RocmPackage`` and
``Libc`` is a virtual bundle package for the C standard library. ``Libc`` is a virtual bundle package for the C standard library.

View File

@@ -129,8 +129,8 @@ Adding flags to cmake
To add additional flags to the ``cmake`` call, simply override the To add additional flags to the ``cmake`` call, simply override the
``cmake_args`` function. The following example defines values for the flags ``cmake_args`` function. The following example defines values for the flags
``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with ``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with
and without the :meth:`~spack_repo.builtin.build_systems.cmake.CMakeBuilder.define` and and without the :meth:`~spack.build_systems.cmake.CMakeBuilder.define` and
:meth:`~spack_repo.builtin.build_systems.cmake.CMakeBuilder.define_from_variant` helper functions: :meth:`~spack.build_systems.cmake.CMakeBuilder.define_from_variant` helper functions:
.. code-block:: python .. code-block:: python
@@ -199,7 +199,7 @@ a variant to control this:
However, not every CMake package accepts all four of these options. However, not every CMake package accepts all four of these options.
Grep the ``CMakeLists.txt`` file to see if the default values are Grep the ``CMakeLists.txt`` file to see if the default values are
missing or replaced. For example, the missing or replaced. For example, the
`dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/dealii/package.py>`_ `dealii <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/dealii/package.py>`_
package overrides the default variant with: package overrides the default variant with:
.. code-block:: python .. code-block:: python

View File

@@ -20,8 +20,8 @@ start is to look at the definitions of other build systems. This guide
focuses mostly on how Spack's build systems work. focuses mostly on how Spack's build systems work.
In this guide, we will be using the In this guide, we will be using the
`perl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/perl/package.py>`_ and `perl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/perl/package.py>`_ and
`cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cmake/package.py>`_ `cmake <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cmake/package.py>`_
packages as examples. ``perl``'s build system is a hand-written packages as examples. ``perl``'s build system is a hand-written
``Configure`` shell script, while ``cmake`` bootstraps itself during ``Configure`` shell script, while ``cmake`` bootstraps itself during
installation. Both of these packages require custom build systems. installation. Both of these packages require custom build systems.

View File

@@ -91,14 +91,14 @@ there are any other variables you need to set, you can do this in the
.. code-block:: python .. code-block:: python
def setup_build_environment(self, env: EnvironmentModifications) -> None: def setup_build_environment(self, env):
env.set("PREFIX", prefix) env.set("PREFIX", prefix)
env.set("BLASLIB", spec["blas"].libs.ld_flags) env.set("BLASLIB", spec["blas"].libs.ld_flags)
`cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cbench/package.py>`_ `cbench <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cbench/package.py>`_
is a good example of a simple package that does this, while is a good example of a simple package that does this, while
`esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/esmf/package.py>`_ `esmf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/esmf/package.py>`_
is a good example of a more complex package. is a good example of a more complex package.
"""""""""""""""""""""" """"""""""""""""""""""
@@ -129,7 +129,7 @@ If you do need access to the spec, you can create a property like so:
] ]
`cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cloverleaf/package.py>`_ `cloverleaf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cloverleaf/package.py>`_
is a good example of a package that uses this strategy. is a good example of a package that uses this strategy.
""""""""""""" """""""""""""
@@ -152,7 +152,7 @@ and a ``filter`` method to help with this. For example:
makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}") makefile.filter(r"^\s*FC\s*=.*", f"FC = {spack_fc}")
`stream <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/stream/package.py>`_ `stream <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/stream/package.py>`_
is a good example of a package that involves editing a Makefile to set is a good example of a package that involves editing a Makefile to set
the appropriate variables. the appropriate variables.
@@ -192,7 +192,7 @@ well for storing variables:
inc.write(f"{key} = {config[key]}\n") inc.write(f"{key} = {config[key]}\n")
`elk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/elk/package.py>`_ `elk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/elk/package.py>`_
is a good example of a package that uses a dictionary to store is a good example of a package that uses a dictionary to store
configuration variables. configuration variables.
@@ -213,7 +213,7 @@ them in a list:
inc.write(f"{var}\n") inc.write(f"{var}\n")
`hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/hpl/package.py>`_ `hpl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/hpl/package.py>`_
is a good example of a package that uses a list to store is a good example of a package that uses a list to store
configuration variables. configuration variables.

View File

@@ -39,7 +39,7 @@ for "CRAN <package-name>" and you should quickly find what you want.
If it isn't on CRAN, try Bioconductor, another common R repository. If it isn't on CRAN, try Bioconductor, another common R repository.
For the purposes of this tutorial, we will be walking through For the purposes of this tutorial, we will be walking through
`r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_caret/package.py>`_ `r-caret <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-caret/package.py>`_
as an example. If you search for "CRAN caret", you will quickly find what as an example. If you search for "CRAN caret", you will quickly find what
you are looking for at https://cran.r-project.org/package=caret. you are looking for at https://cran.r-project.org/package=caret.
https://cran.r-project.org is the main CRAN website. However, CRAN also https://cran.r-project.org is the main CRAN website. However, CRAN also
@@ -337,7 +337,7 @@ Non-R dependencies
^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^
Some packages depend on non-R libraries for linking. Check out the Some packages depend on non-R libraries for linking. Check out the
`r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_stringi/package.py>`_ `r-stringi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-stringi/package.py>`_
package for an example: https://cloud.r-project.org/package=stringi. package for an example: https://cloud.r-project.org/package=stringi.
If you search for the text "SystemRequirements", you will see: If you search for the text "SystemRequirements", you will see:
@@ -352,7 +352,7 @@ Passing arguments to the installation
Some R packages provide additional flags that can be passed to Some R packages provide additional flags that can be passed to
``R CMD INSTALL``, often to locate non-R dependencies. ``R CMD INSTALL``, often to locate non-R dependencies.
`r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/r_rmpi/package.py>`_ `r-rmpi <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/r-rmpi/package.py>`_
is an example of this, and flags for linking to an MPI library. To pass is an example of this, and flags for linking to an MPI library. To pass
these to the installation command, you can override ``configure_args`` these to the installation command, you can override ``configure_args``
like so: like so:

View File

@@ -104,10 +104,10 @@ Finding available options
The first place to start when looking for a list of valid options to The first place to start when looking for a list of valid options to
build a package is ``scons --help``. Some packages like build a package is ``scons --help``. Some packages like
`kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/kahip/package.py>`_ `kahip <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/kahip/package.py>`_
don't bother overwriting the default SCons help message, so this isn't don't bother overwriting the default SCons help message, so this isn't
very useful, but other packages like very useful, but other packages like
`serf <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/serf/package.py>`_ `serf <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/serf/package.py>`_
print a list of valid command-line variables: print a list of valid command-line variables:
.. code-block:: console .. code-block:: console
@@ -177,7 +177,7 @@ print a list of valid command-line variables:
More advanced packages like More advanced packages like
`cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cantera/package.py>`_ `cantera <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cantera/package.py>`_
use ``scons --help`` to print a list of subcommands: use ``scons --help`` to print a list of subcommands:
.. code-block:: console .. code-block:: console

View File

@@ -35,8 +35,8 @@
if not os.path.exists(link_name): if not os.path.exists(link_name):
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True) os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external")) sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/_vendoring"))
sys.path.append(os.path.abspath("_spack_root/lib/spack/")) sys.path.append(os.path.abspath("_spack_root/lib/spack/"))
sys.path.append(os.path.abspath("_spack_root/var/spack/repos/"))
# Add the Spack bin directory to the path so that we can use its output in docs. # Add the Spack bin directory to the path so that we can use its output in docs.
os.environ["SPACK_ROOT"] = os.path.abspath("_spack_root") os.environ["SPACK_ROOT"] = os.path.abspath("_spack_root")
@@ -76,20 +76,11 @@
apidoc_args apidoc_args
+ [ + [
"_spack_root/lib/spack/spack", "_spack_root/lib/spack/spack",
"_spack_root/lib/spack/spack/package.py", # sphinx struggles with os.chdir re-export.
"_spack_root/lib/spack/spack/test/*.py", "_spack_root/lib/spack/spack/test/*.py",
"_spack_root/lib/spack/spack/test/cmd/*.py", "_spack_root/lib/spack/spack/test/cmd/*.py",
] ]
) )
sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/llnl"]) sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/llnl"])
sphinx_apidoc(
apidoc_args
+ [
"--implicit-namespaces",
"_spack_root/var/spack/repos/spack_repo",
"_spack_root/var/spack/repos/spack_repo/builtin/packages",
]
)
# Enable todo items # Enable todo items
todo_include_todos = True todo_include_todos = True
@@ -218,7 +209,7 @@ def setup(sphinx):
# Spack classes that are private and we don't want to expose # Spack classes that are private and we don't want to expose
("py:class", "spack.provider_index._IndexBase"), ("py:class", "spack.provider_index._IndexBase"),
("py:class", "spack.repo._PrependFileLoader"), ("py:class", "spack.repo._PrependFileLoader"),
("py:class", "spack_repo.builtin.build_systems._checks.BuilderWithDefaults"), ("py:class", "spack.build_systems._checks.BuilderWithDefaults"),
# Spack classes that intersphinx is unable to resolve # Spack classes that intersphinx is unable to resolve
("py:class", "spack.version.StandardVersion"), ("py:class", "spack.version.StandardVersion"),
("py:class", "spack.spec.DependencySpec"), ("py:class", "spack.spec.DependencySpec"),
@@ -228,20 +219,14 @@ def setup(sphinx):
("py:class", "spack.install_test.Pb"), ("py:class", "spack.install_test.Pb"),
("py:class", "spack.filesystem_view.SimpleFilesystemView"), ("py:class", "spack.filesystem_view.SimpleFilesystemView"),
("py:class", "spack.traverse.EdgeAndDepth"), ("py:class", "spack.traverse.EdgeAndDepth"),
("py:class", "_vendoring.archspec.cpu.microarchitecture.Microarchitecture"), ("py:class", "archspec.cpu.microarchitecture.Microarchitecture"),
("py:class", "spack.compiler.CompilerCache"), ("py:class", "spack.compiler.CompilerCache"),
# TypeVar that is not handled correctly # TypeVar that is not handled correctly
("py:class", "llnl.util.lang.T"), ("py:class", "llnl.util.lang.T"),
("py:class", "llnl.util.lang.KT"), ("py:class", "llnl.util.lang.KT"),
("py:class", "llnl.util.lang.VT"), ("py:class", "llnl.util.lang.VT"),
("py:class", "llnl.util.lang.K"),
("py:class", "llnl.util.lang.V"),
("py:class", "llnl.util.lang.ClassPropertyType"),
("py:obj", "llnl.util.lang.KT"), ("py:obj", "llnl.util.lang.KT"),
("py:obj", "llnl.util.lang.VT"), ("py:obj", "llnl.util.lang.VT"),
("py:obj", "llnl.util.lang.ClassPropertyType"),
("py:obj", "llnl.util.lang.K"),
("py:obj", "llnl.util.lang.V"),
] ]
# The reST default role (used for this markup: `text`) to use for all documents. # The reST default role (used for this markup: `text`) to use for all documents.

View File

@@ -148,16 +148,15 @@ this can expose you to attacks. Use at your own risk.
``ssl_certs`` ``ssl_certs``
-------------------- --------------------
Path to custom certificates for SSL verification. The value can be a Path to custom certificats for SSL verification. The value can be a
filesystem path, or an environment variable that expands to an absolute file path. filesytem path, or an environment variable that expands to an absolute file path.
The default value is set to the environment variable ``SSL_CERT_FILE`` The default value is set to the environment variable ``SSL_CERT_FILE``
to use the same syntax used by many other applications that automatically to use the same syntax used by many other applications that automatically
detect custom certificates. detect custom certificates.
When ``url_fetch_method:curl`` the ``config:ssl_certs`` should resolve to When ``url_fetch_method:curl`` the ``config:ssl_certs`` should resolve to
a single file. Spack will then set the environment variable ``CURL_CA_BUNDLE`` a single file. Spack will then set the environment variable ``CURL_CA_BUNDLE``
in the subprocess calling ``curl``. If additional ``curl`` arguments are required, in the subprocess calling ``curl``.
they can be set in the config, e.g. ``url_fetch_method:'curl -k -q'``. If ``url_fetch_method:urllib`` then files and directories are supported i.e.
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR`` ``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
will work. will work.
In all cases the expanded path must be absolute for Spack to use the certificates. In all cases the expanded path must be absolute for Spack to use the certificates.

View File

@@ -46,12 +46,6 @@ Each Spack configuration file is nested under a top-level section
corresponding to its name. So, ``config.yaml`` starts with ``config:``, corresponding to its name. So, ``config.yaml`` starts with ``config:``,
``mirrors.yaml`` starts with ``mirrors:``, etc. ``mirrors.yaml`` starts with ``mirrors:``, etc.
.. tip::
Validation and autocompletion of Spack config files can be enabled in
your editor with the YAML language server. See `spack/schemas
<https://github.com/spack/schemas>`_ for more information.
.. _configuration-scopes: .. _configuration-scopes:
-------------------- --------------------

View File

@@ -11,7 +11,7 @@ Container Images
Spack :ref:`environments` can easily be turned into container images. This page Spack :ref:`environments` can easily be turned into container images. This page
outlines two ways in which this can be done: outlines two ways in which this can be done:
1. By installing the environment on the host system and copying the installations 1. By installing the environment on the host system, and copying the installations
into the container image. This approach does not require any tools like Docker into the container image. This approach does not require any tools like Docker
or Singularity to be installed. or Singularity to be installed.
2. By generating a Docker or Singularity recipe that can be used to build the 2. By generating a Docker or Singularity recipe that can be used to build the
@@ -56,8 +56,8 @@ environment roots and its runtime dependencies.
.. note:: .. note::
When using registries like GHCR and Docker Hub, the ``--oci-password`` flag specifies not When using registries like GHCR and Docker Hub, the ``--oci-password`` flag is not
the password for your account, but rather a personal access token you need to generate separately. the password for your account, but a personal access token you need to generate separately.
The specified ``--base-image`` should have a libc that is compatible with the host system. The specified ``--base-image`` should have a libc that is compatible with the host system.
For example if your host system is Ubuntu 20.04, you can use ``ubuntu:20.04``, ``ubuntu:22.04`` For example if your host system is Ubuntu 20.04, you can use ``ubuntu:20.04``, ``ubuntu:22.04``

View File

@@ -226,9 +226,9 @@ If all is well, you'll see something like this:
Modified files: Modified files:
var/spack/repos/spack_repo/builtin/packages/hdf5/package.py var/spack/repos/builtin/packages/hdf5/package.py
var/spack/repos/spack_repo/builtin/packages/hdf/package.py var/spack/repos/builtin/packages/hdf/package.py
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py var/spack/repos/builtin/packages/netcdf/package.py
======================================================= =======================================================
Flake8 checks were clean. Flake8 checks were clean.
@@ -236,9 +236,9 @@ However, if you aren't compliant with PEP 8, flake8 will complain:
.. code-block:: console .. code-block:: console
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused var/spack/repos/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2) var/spack/repos/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
var/spack/repos/spack_repo/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters) var/spack/repos/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
Flake8 found errors. Flake8 found errors.
Most of the error messages are straightforward, but if you don't understand what Most of the error messages are straightforward, but if you don't understand what
@@ -280,7 +280,7 @@ All of these can be installed with Spack, e.g.
.. warning:: .. warning::
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/py-sphinx/package.py>`_. Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-sphinx/package.py>`_.
If you're using a ``python`` from Spack and you installed If you're using a ``python`` from Spack and you installed
``py-sphinx`` and friends, you need to make them available to your ``py-sphinx`` and friends, you need to make them available to your
``python``. The easiest way to do this is to run: ``python``. The easiest way to do this is to run:

View File

@@ -154,7 +154,9 @@ Package-related modules
:mod:`spack.util.naming` :mod:`spack.util.naming`
Contains functions for mapping between Spack package names, Contains functions for mapping between Spack package names,
Python module names, and Python class names. Python module names, and Python class names. Functions like
:func:`~spack.util.naming.mod_to_class` handle mapping package
module names to class names.
:mod:`spack.directives` :mod:`spack.directives`
*Directives* are functions that can be called inside a package definition *Directives* are functions that can be called inside a package definition

View File

@@ -1,34 +0,0 @@
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
SPDX-License-Identifier: (Apache-2.0 OR MIT)
.. _env-vars-yaml:
=============================================
Environment Variable Settings (env_vars.yaml)
=============================================
Spack allows you to include shell environment variable modifications
for a spack environment by including an ``env_vars.yaml``. Environment
varaibles can be modified by setting, unsetting, appending, and prepending
variables in the shell environment.
The changes to the shell environment will take effect when the spack
environment is activated.
for example,
.. code-block:: yaml
env_vars:
set:
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
unset:
ENVAR_TO_UNSET_IN_ENV_LOAD:
prepend_path:
PATH_LIST: "path/to/prepend"
append_path:
PATH_LIST: "path/to/append"
remove_path:
PATH_LIST: "path/to/remove"

View File

@@ -539,9 +539,7 @@ from the command line.
You can also include an environment directly in the ``spack.yaml`` file. It You can also include an environment directly in the ``spack.yaml`` file. It
involves adding the ``include_concrete`` heading in the yaml followed by the involves adding the ``include_concrete`` heading in the yaml followed by the
absolute path to the independent environments. Note, that you may use Spack absolute path to the independent environments.
config variables such as ``$spack`` or environment variables as long as the
expression expands to an absolute path.
.. code-block:: yaml .. code-block:: yaml
@@ -551,7 +549,7 @@ expression expands to an absolute path.
unify: true unify: true
include_concrete: include_concrete:
- /absolute/path/to/environment1 - /absolute/path/to/environment1
- $spack/../path/to/environment2 - /absolute/path/to/environment2
Once the ``spack.yaml`` has been updated you must concretize the environment to Once the ``spack.yaml`` has been updated you must concretize the environment to
@@ -669,11 +667,11 @@ a ``packages.yaml`` file) could contain:
# ... # ...
packages: packages:
all: all:
providers: compiler: [intel]
mpi: [openmpi]
# ... # ...
This configuration sets the default mpi provider to be openmpi. This configuration sets the default compiler for all packages to
``intel``.
^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^
Included configurations Included configurations
@@ -688,8 +686,7 @@ the environment.
spack: spack:
include: include:
- environment/relative/path/to/config.yaml - environment/relative/path/to/config.yaml
- path: https://github.com/path/to/raw/config/compilers.yaml - https://github.com/path/to/raw/config/packages.yaml
sha256: 26e871804a92cd07bb3d611b31b4156ae93d35b6a6d6e0ef3a67871fcb1d258b
- /absolute/path/to/packages.yaml - /absolute/path/to/packages.yaml
- path: /path/to/$os/$target/environment - path: /path/to/$os/$target/environment
optional: true optional: true
@@ -703,11 +700,11 @@ with the ``optional`` clause and conditional with the ``when`` clause. (See
Files are listed using paths to individual files or directories containing them. Files are listed using paths to individual files or directories containing them.
Path entries may be absolute or relative to the environment or specified as Path entries may be absolute or relative to the environment or specified as
URLs. URLs to individual files must link to the **raw** form of the file's URLs. URLs to individual files need link to the **raw** form of the file's
contents (e.g., `GitHub contents (e.g., `GitHub
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_ <https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
or `GitLab or `GitLab
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_) **and** include a valid sha256 for the file. <https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
supported. Spack-specific, environment and user path variables can be used. supported. Spack-specific, environment and user path variables can be used.
(See :ref:`config-file-variables` for more information.) (See :ref:`config-file-variables` for more information.)
@@ -1002,28 +999,6 @@ For example, the following environment has three root packages:
This allows for a much-needed reduction in redundancy between packages This allows for a much-needed reduction in redundancy between packages
and constraints. and constraints.
-------------------------------
Modifying Environment Variables
-------------------------------
Spack Environments can modify the active shell's environment variables when activated. The environment can be
configured to set, unset, prepend, or append using ``env_vars`` configuration in the ``spack.yaml`` or through config scopes
file:
.. code-block:: yaml
spack:
env_vars:
set:
ENVAR_TO_SET_IN_ENV_LOAD: "FOO"
unset:
ENVAR_TO_UNSET_IN_ENV_LOAD:
prepend_path:
PATH_LIST: "path/to/prepend"
append_path:
PATH_LIST: "path/to/append"
remove_path:
PATH_LIST: "path/to/remove"
----------------- -----------------
Environment Views Environment Views

View File

@@ -0,0 +1,161 @@
spack:
definitions:
- compiler-pkgs:
- 'llvm+clang@6.0.1 os=centos7'
- 'gcc@6.5.0 os=centos7'
- 'llvm+clang@6.0.1 os=ubuntu18.04'
- 'gcc@6.5.0 os=ubuntu18.04'
- pkgs:
- readline@7.0
# - xsdk@0.4.0
- compilers:
- '%gcc@5.5.0'
- '%gcc@6.5.0'
- '%gcc@7.3.0'
- '%clang@6.0.0'
- '%clang@6.0.1'
- oses:
- os=ubuntu18.04
- os=centos7
specs:
- matrix:
- [$pkgs]
- [$compilers]
- [$oses]
exclude:
- '%gcc@7.3.0 os=centos7'
- '%gcc@5.5.0 os=ubuntu18.04'
mirrors:
cloud_gitlab: https://mirror.spack.io
compilers:
# The .gitlab-ci.yml for this project picks a Docker container which does
# not have any compilers pre-built and ready to use, so we need to fake the
# existence of those here.
- compiler:
operating_system: centos7
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: gcc@5.5.0
target: x86_64
- compiler:
operating_system: centos7
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: gcc@6.5.0
target: x86_64
- compiler:
operating_system: centos7
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: clang@6.0.0
target: x86_64
- compiler:
operating_system: centos7
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: clang@6.0.1
target: x86_64
- compiler:
operating_system: ubuntu18.04
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: clang@6.0.0
target: x86_64
- compiler:
operating_system: ubuntu18.04
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: clang@6.0.1
target: x86_64
- compiler:
operating_system: ubuntu18.04
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: gcc@6.5.0
target: x86_64
- compiler:
operating_system: ubuntu18.04
modules: []
paths:
cc: /not/used
cxx: /not/used
f77: /not/used
fc: /not/used
spec: gcc@7.3.0
target: x86_64
gitlab-ci:
bootstrap:
- name: compiler-pkgs
compiler-agnostic: true
mappings:
- # spack-cloud-ubuntu
match:
# these are specs, if *any* match the spec under consideration, this
# 'mapping' will be used to generate the CI job
- os=ubuntu18.04
runner-attributes:
# 'tags' and 'image' go directly onto the job, 'variables' will
# be added to what we already necessarily create for the job as
# a part of the CI workflow
tags:
- spack-k8s
image:
name: scottwittenburg/spack_builder_ubuntu_18.04
entrypoint: [""]
- # spack-cloud-centos
match:
# these are specs, if *any* match the spec under consideration, this
# 'mapping' will be used to generate the CI job
- 'os=centos7'
runner-attributes:
tags:
- spack-k8s
image:
name: scottwittenburg/spack_builder_centos_7
entrypoint: [""]
cdash:
build-group: Release Testing
url: http://cdash
project: Spack Testing
site: Spack Docker-Compose Workflow
repos: []
upstreams: {}
modules:
enable: []
packages: {}
config: {}

View File

@@ -131,7 +131,7 @@ creates a simple python file:
It doesn't take much python coding to get from there to a working It doesn't take much python coding to get from there to a working
package: package:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
:lines: 5- :lines: 5-
Spack also provides wrapper functions around common commands like Spack also provides wrapper functions around common commands like

View File

@@ -20,7 +20,7 @@ be present on the machine where Spack is run:
:header-rows: 1 :header-rows: 1
These requirements can be easily installed on most modern Linux systems; These requirements can be easily installed on most modern Linux systems;
on macOS, the Command Line Tools package is required, and a full Xcode suite on macOS, the Command Line Tools package is required, and a full XCode suite
may be necessary for some packages such as Qt and apple-gl. Spack is designed may be necessary for some packages such as Qt and apple-gl. Spack is designed
to run on HPC platforms like Cray. Not all packages should be expected to run on HPC platforms like Cray. Not all packages should be expected
to work on all platforms. to work on all platforms.

View File

@@ -23,6 +23,7 @@ components for use by dependent packages:
packages: packages:
all: all:
compiler: [rocmcc@=5.3.0]
variants: amdgpu_target=gfx90a variants: amdgpu_target=gfx90a
hip: hip:
buildable: false buildable: false
@@ -69,15 +70,16 @@ This is in combination with the following compiler definition:
.. code-block:: yaml .. code-block:: yaml
packages: compilers:
llvm-amdgpu: - compiler:
externals: spec: rocmcc@=5.3.0
- spec: llvm-amdgpu@=5.3.0 paths:
prefix: /opt/rocm-5.3.0 cc: /opt/rocm-5.3.0/bin/amdclang
compilers: cxx: /opt/rocm-5.3.0/bin/amdclang++
c: /opt/rocm-5.3.0/bin/amdclang f77: null
cxx: /opt/rocm-5.3.0/bin/amdclang++ fc: /opt/rocm-5.3.0/bin/amdflang
fortran: null operating_system: rhel8
target: x86_64
This includes the following considerations: This includes the following considerations:

View File

@@ -43,20 +43,6 @@ or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protoco
schemes) are supported. Spack-specific, environment and user path variables schemes) are supported. Spack-specific, environment and user path variables
can be used. (See :ref:`config-file-variables` for more information.) can be used. (See :ref:`config-file-variables` for more information.)
A ``sha256`` is required for remote file URLs and must be specified as follows:
.. code-block:: yaml
include:
- path: https://github.com/path/to/raw/config/compilers.yaml
sha256: 26e871804a92cd07bb3d611b31b4156ae93d35b6a6d6e0ef3a67871fcb1d258b
Additionally, remote file URLs must link to the **raw** form of the file's
contents (e.g., `GitHub
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
or `GitLab
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
.. warning:: .. warning::
Recursive includes are not currently processed in a breadth-first manner Recursive includes are not currently processed in a breadth-first manner

View File

@@ -75,7 +75,6 @@ or refer to the full manual below.
packages_yaml packages_yaml
build_settings build_settings
environments environments
env_vars_yaml
containers containers
mirrors mirrors
module_file_support module_file_support
@@ -103,7 +102,6 @@ or refer to the full manual below.
:caption: API Docs :caption: API Docs
Spack API Docs <spack> Spack API Docs <spack>
Spack Builtin Repo <spack_repo>
LLNL API Docs <llnl> LLNL API Docs <llnl>
================== ==================

View File

@@ -8,7 +8,7 @@
Modules (modules.yaml) Modules (modules.yaml)
====================== ======================
The use of module systems to manage user environments in a controlled way The use of module systems to manage user environment in a controlled way
is a common practice at HPC centers that is sometimes embraced also by is a common practice at HPC centers that is sometimes embraced also by
individual programmers on their development machines. To support this individual programmers on their development machines. To support this
common practice Spack integrates with `Environment Modules common practice Spack integrates with `Environment Modules
@@ -128,7 +128,7 @@ depend on the spec:
.. code-block:: python .. code-block:: python
def setup_run_environment(self, env: EnvironmentModifications) -> None: def setup_run_environment(self, env):
if self.spec.satisfies("+foo"): if self.spec.satisfies("+foo"):
env.set("FOO", "bar") env.set("FOO", "bar")
@@ -142,7 +142,7 @@ For example, a simplified version of the ``python`` package could look like this
.. code-block:: python .. code-block:: python
def setup_dependent_run_environment(self, env: EnvironmentModifications, dependent_spec: Spec) -> None: def setup_dependent_run_environment(self, env, dependent_spec):
if dependent_spec.package.extends(self.spec): if dependent_spec.package.extends(self.spec):
env.prepend_path("PYTHONPATH", dependent_spec.prefix.lib.python) env.prepend_path("PYTHONPATH", dependent_spec.prefix.lib.python)
@@ -490,7 +490,7 @@ that are already in the Lmod hierarchy.
.. note:: .. note::
Tcl and Lua modules also allow for explicit conflicts between module files. Tcl and Lua modules also allow for explicit conflicts between modulefiles.
.. code-block:: yaml .. code-block:: yaml
@@ -513,7 +513,7 @@ that are already in the Lmod hierarchy.
:meth:`~spack.spec.Spec.format` method. :meth:`~spack.spec.Spec.format` method.
For Lmod and Environment Modules versions prior 4.2, it is important to For Lmod and Environment Modules versions prior 4.2, it is important to
express the conflict on both module files conflicting with each other. express the conflict on both modulefiles conflicting with each other.
.. note:: .. note::
@@ -550,7 +550,7 @@ that are already in the Lmod hierarchy.
.. warning:: .. warning::
Consistency of Core packages Consistency of Core packages
The user is responsible for maintaining consistency among core packages, as ``core_specs`` The user is responsible for maintining consistency among core packages, as ``core_specs``
bypasses the hierarchy that allows Lmod to safely switch between coherent software stacks. bypasses the hierarchy that allows Lmod to safely switch between coherent software stacks.
.. warning:: .. warning::

View File

@@ -557,13 +557,14 @@ preferences.
FAQ: :ref:`Why does Spack pick particular versions and variants? <faq-concretizer-precedence>` FAQ: :ref:`Why does Spack pick particular versions and variants? <faq-concretizer-precedence>`
The ``target`` and ``providers`` preferences Most package preferences (``compilers``, ``target`` and ``providers``)
can only be set globally under the ``all`` section of ``packages.yaml``: can only be set globally under the ``all`` section of ``packages.yaml``:
.. code-block:: yaml .. code-block:: yaml
packages: packages:
all: all:
compiler: [gcc@12.2.0, clang@12:, oneapi@2023:]
target: [x86_64_v3] target: [x86_64_v3]
providers: providers:
mpi: [mvapich2, mpich, openmpi] mpi: [mvapich2, mpich, openmpi]

View File

@@ -69,7 +69,7 @@ An example for ``CMake`` is, for instance:
The predefined steps for each build system are called "phases". The predefined steps for each build system are called "phases".
In general, the name and order in which the phases will be executed can be In general, the name and order in which the phases will be executed can be
obtained by either reading the API docs at :py:mod:`~.spack_repo.builtin.build_systems`, or obtained by either reading the API docs at :py:mod:`~.spack.build_systems`, or
using the ``spack info`` command: using the ``spack info`` command:
.. code-block:: console .. code-block:: console
@@ -158,7 +158,7 @@ builder class explicitly. Using the same example as above, this reads:
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz" url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
return url_fmt.format(version) return url_fmt.format(version)
class CMakeBuilder(spack_repo.builtin.build_systems.cmake.CMakeBuilder): class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
def cmake_args(self): def cmake_args(self):
args = [ args = [
self.define_from_variant("BUILD_CODEC", "codec"), self.define_from_variant("BUILD_CODEC", "codec"),
@@ -179,7 +179,7 @@ Spack can be found at :ref:`package_class_structure`.
.. code-block:: python .. code-block:: python
class Foo(CMakePackage): class Foo(CmakePackage):
def cmake_args(self): def cmake_args(self):
... ...
@@ -256,7 +256,7 @@ for details):
# #
# See the Spack documentation for more information on packaging. # See the Spack documentation for more information on packaging.
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
import spack_repo.builtin.build_systems.autotools import spack.build_systems.autotools
from spack.package import * from spack.package import *
@@ -369,9 +369,9 @@ If you have a collection of software expected to work well together with
no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`. no source code of its own, you can create a :ref:`BundlePackage <bundlepackage>`.
Examples where bundle packages can be useful include defining suites of Examples where bundle packages can be useful include defining suites of
applications (e.g, `EcpProxyApps applications (e.g, `EcpProxyApps
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_proxy_apps/package.py>`_), commonly used libraries <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-proxy-apps/package.py>`_), commonly used libraries
(e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/amd_aocl/package.py>`_), (e.g., `AmdAocl <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/amd-aocl/package.py>`_),
and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/ecp_data_vis_sdk/package.py>`_). and software development kits (e.g., `EcpDataVisSdk <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/ecp-data-vis-sdk/package.py>`_).
These versioned packages primarily consist of dependencies on the associated These versioned packages primarily consist of dependencies on the associated
software packages. They can include :ref:`variants <variants>` to ensure software packages. They can include :ref:`variants <variants>` to ensure
@@ -443,7 +443,7 @@ lives in:
.. code-block:: console .. code-block:: console
$ spack location -p gmp $ spack location -p gmp
${SPACK_ROOT}/var/spack/repos/spack_repo/builtin/packages/gmp/package.py ${SPACK_ROOT}/var/spack/repos/builtin/packages/gmp/package.py
but ``spack edit`` provides a much simpler shortcut and saves you the but ``spack edit`` provides a much simpler shortcut and saves you the
trouble of typing the full path. trouble of typing the full path.
@@ -457,19 +457,19 @@ live in Spack's directory structure. In general, :ref:`cmd-spack-create`
handles creating package files for you, so you can skip most of the handles creating package files for you, so you can skip most of the
details here. details here.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
``var/spack/repos/spack_repo/builtin/packages`` ``var/spack/repos/builtin/packages``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A Spack installation directory is structured like a standard UNIX A Spack installation directory is structured like a standard UNIX
install prefix (``bin``, ``lib``, ``include``, ``var``, ``opt``, install prefix (``bin``, ``lib``, ``include``, ``var``, ``opt``,
etc.). Most of the code for Spack lives in ``$SPACK_ROOT/lib/spack``. etc.). Most of the code for Spack lives in ``$SPACK_ROOT/lib/spack``.
Packages themselves live in ``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages``. Packages themselves live in ``$SPACK_ROOT/var/spack/repos/builtin/packages``.
If you ``cd`` to that directory, you will see directories for each If you ``cd`` to that directory, you will see directories for each
package: package:
.. command-output:: cd $SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages && ls .. command-output:: cd $SPACK_ROOT/var/spack/repos/builtin/packages && ls
:shell: :shell:
:ellipsis: 10 :ellipsis: 10
@@ -479,7 +479,7 @@ package lives in:
.. code-block:: none .. code-block:: none
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py $SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py
Alongside the ``package.py`` file, a package may contain extra Alongside the ``package.py`` file, a package may contain extra
directories or files (like patches) that it needs to build. directories or files (like patches) that it needs to build.
@@ -492,12 +492,12 @@ Packages are named after the directory containing ``package.py``. So,
``libelf``'s ``package.py`` lives in a directory called ``libelf``. ``libelf``'s ``package.py`` lives in a directory called ``libelf``.
The ``package.py`` file defines a class called ``Libelf``, which The ``package.py`` file defines a class called ``Libelf``, which
extends Spack's ``Package`` class. For example, here is extends Spack's ``Package`` class. For example, here is
``$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/libelf/package.py``: ``$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py``:
.. code-block:: python .. code-block:: python
:linenos: :linenos:
from spack.package import * from spack import *
class Libelf(Package): class Libelf(Package):
""" ... description ... """ """ ... description ... """
@@ -520,7 +520,7 @@ these:
$ spack install libelf@0.8.13 $ spack install libelf@0.8.13
Spack sees the package name in the spec and looks for Spack sees the package name in the spec and looks for
``libelf/package.py`` in ``var/spack/repos/spack_repo/builtin/packages``. ``libelf/package.py`` in ``var/spack/repos/builtin/packages``.
Likewise, if you run ``spack install py-numpy``, Spack looks for Likewise, if you run ``spack install py-numpy``, Spack looks for
``py-numpy/package.py``. ``py-numpy/package.py``.
@@ -686,7 +686,7 @@ https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.1.tar.bz2
In order to handle this, you can define a ``url_for_version()`` function In order to handle this, you can define a ``url_for_version()`` function
like so: like so:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
:pyobject: Openmpi.url_for_version :pyobject: Openmpi.url_for_version
With the use of this ``url_for_version()``, Spack knows to download OpenMPI ``2.1.1`` With the use of this ``url_for_version()``, Spack knows to download OpenMPI ``2.1.1``
@@ -787,7 +787,7 @@ of GNU. For that, Spack goes a step further and defines a mixin class that
takes care of all of the plumbing and requires packagers to just define a proper takes care of all of the plumbing and requires packagers to just define a proper
``gnu_mirror_path`` attribute: ``gnu_mirror_path`` attribute:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/autoconf/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/autoconf/package.py
:lines: 9-18 :lines: 9-18
^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1089,7 +1089,7 @@ You've already seen the ``homepage`` and ``url`` package attributes:
.. code-block:: python .. code-block:: python
:linenos: :linenos:
from spack.package import * from spack import *
class Mpich(Package): class Mpich(Package):
@@ -1212,7 +1212,7 @@ class-level tarball URL and VCS. For example:
version("master", branch="master") version("master", branch="master")
version("12.12.1", md5="ecd4606fa332212433c98bf950a69cc7") version("12.12.1", md5="ecd4606fa332212433c98bf950a69cc7")
version("12.10.1", md5="667333dbd7c0f031d47d7c5511fd0810") version("12.10.1", md5="667333dbd7c0f031d47d7c5511fd0810")
version("12.8.1", md5="9f37f683ee2b427b5540db8a20ed6b15") version("12.8.1", "9f37f683ee2b427b5540db8a20ed6b15")
If a package contains both a ``url`` and ``git`` class-level attribute, If a package contains both a ``url`` and ``git`` class-level attribute,
Spack decides which to use based on the arguments to the ``version()`` Spack decides which to use based on the arguments to the ``version()``
@@ -1343,7 +1343,7 @@ Submodules
version("1.0.1", tag="v1.0.1", submodules=True) version("1.0.1", tag="v1.0.1", submodules=True)
If a package needs more fine-grained control over submodules, define If a package has needs more fine-grained control over submodules, define
``submodules`` to be a callable function that takes the package instance as ``submodules`` to be a callable function that takes the package instance as
its only argument. The function should return a list of submodules to be fetched. its only argument. The function should return a list of submodules to be fetched.
@@ -1995,7 +1995,7 @@ structure like this:
.. code-block:: none .. code-block:: none
$SPACK_ROOT/var/spack/repos/spack_repo/builtin/packages/ $SPACK_ROOT/var/spack/repos/builtin/packages/
mvapich2/ mvapich2/
package.py package.py
ad_lustre_rwcontig_open_source.patch ad_lustre_rwcontig_open_source.patch
@@ -2133,7 +2133,7 @@ handles ``RPATH``:
.. _pyside-patch: .. _pyside-patch:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/py_pyside/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/py-pyside/package.py
:pyobject: PyPyside.patch :pyobject: PyPyside.patch
:linenos: :linenos:
@@ -2201,7 +2201,7 @@ using the ``spack resource show`` command::
$ spack resource show 3877ab54 $ spack resource show 3877ab54
3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00 3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/m4/gnulib-pgi.patch path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/m4/gnulib-pgi.patch
applies to: builtin.m4 applies to: builtin.m4
``spack resource show`` looks up downloadable resources from package ``spack resource show`` looks up downloadable resources from package
@@ -2219,7 +2219,7 @@ wonder where the extra boost patches are coming from::
^boost@1.68.0%apple-clang@9.0.0+atomic+chrono~clanglibcpp cxxstd=default +date_time~debug+exception+filesystem+graph~icu+iostreams+locale+log+math~mpi+multithreaded~numpy patches=2ab6c72d03dec6a4ae20220a9dfd5c8c572c5294252155b85c6874d97c323199,b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f ~pic+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer~versionedlayout+wave arch=darwin-highsierra-x86_64 ^boost@1.68.0%apple-clang@9.0.0+atomic+chrono~clanglibcpp cxxstd=default +date_time~debug+exception+filesystem+graph~icu+iostreams+locale+log+math~mpi+multithreaded~numpy patches=2ab6c72d03dec6a4ae20220a9dfd5c8c572c5294252155b85c6874d97c323199,b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f ~pic+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer~versionedlayout+wave arch=darwin-highsierra-x86_64
$ spack resource show b37164268 $ spack resource show b37164268
b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f
path: /home/spackuser/src/spack/var/spack/repos/spack_repo/builtin/packages/dealii/boost_1.68.0.patch path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/dealii/boost_1.68.0.patch
applies to: builtin.boost applies to: builtin.boost
patched by: builtin.dealii patched by: builtin.dealii
@@ -2253,15 +2253,22 @@ RPATHs in Spack are handled in one of three ways:
set in standard variables like ``CC``, ``CXX``, ``F77``, and ``FC``, set in standard variables like ``CC``, ``CXX``, ``F77``, and ``FC``,
so most build systems (autotools and many gmake systems) pick them so most build systems (autotools and many gmake systems) pick them
up and use them. up and use them.
#. CMake has its own RPATH handling, and distinguishes between build and #. CMake also respects Spack's compiler wrappers, but many CMake
install RPATHs. By default, during the build it registers RPATHs to builds have logic to overwrite RPATHs when binaries are
all libraries it links to, so that just-built executables can be run installed. Spack provides the ``std_cmake_args`` variable, which
during the build itself. Upon installation, these RPATHs are cleared, includes parameters necessary for CMake build use the right
unless the user defines the install RPATHs. When inheriting from installation RPATH. It can be used like this when ``cmake`` is
``CMakePackage``, Spack handles this automatically, and sets invoked:
``CMAKE_INSTALL_RPATH_USE_LINK_PATH`` and ``CMAKE_INSTALL_RPATH``,
so that libraries of dependencies and the package's own libraries .. code-block:: python
can be found at runtime.
class MyPackage(Package):
...
def install(self, spec, prefix):
cmake("..", *std_cmake_args)
make()
make("install")
#. If you need to modify the build to add your own RPATHs, you can #. If you need to modify the build to add your own RPATHs, you can
use the ``self.rpath`` property of your package, which will use the ``self.rpath`` property of your package, which will
return a list of all the RPATHs that Spack will use when it return a list of all the RPATHs that Spack will use when it
@@ -2308,19 +2315,31 @@ looks like this:
parallel = False parallel = False
You can also disable parallel builds only for specific make Similarly, you can disable parallel builds only for specific make
invocation: commands, as ``libdwarf`` does:
.. code-block:: python .. code-block:: python
:emphasize-lines: 5 :emphasize-lines: 9, 12
:linenos: :linenos:
class Libelf(Package): class Libelf(Package):
... ...
def install(self, spec, prefix): def install(self, spec, prefix):
configure("--prefix=" + prefix,
"--enable-shared",
"--disable-dependency-tracking",
"--disable-debug")
make()
# The mkdir commands in libelf's install can fail in parallel
make("install", parallel=False) make("install", parallel=False)
The first make will run in parallel here, but the second will not. If
you set ``parallel`` to ``False`` at the package level, then each call
to ``make()`` will be sequential by default, but packagers can call
``make(parallel=True)`` to override it.
Note that the ``--jobs`` option works out of the box for all standard Note that the ``--jobs`` option works out of the box for all standard
build systems. If you are using a non-standard build system instead, you build systems. If you are using a non-standard build system instead, you
can use the variable ``make_jobs`` to extract the number of jobs specified can use the variable ``make_jobs`` to extract the number of jobs specified
@@ -2495,7 +2514,7 @@ necessary when there are breaking changes in the dependency that the
package cannot handle. In Spack we often add forward compatibility package cannot handle. In Spack we often add forward compatibility
bounds only at the time a new, breaking version of a dependency is bounds only at the time a new, breaking version of a dependency is
released. As with backward compatibility, it is typical to see a list released. As with backward compatibility, it is typical to see a list
of forward compatibility bounds in a package file as separate lines: of forward compatibility bounds in a package file as seperate lines:
.. code-block:: python .. code-block:: python
@@ -2911,7 +2930,7 @@ this, Spack provides four different methods that can be overridden in a package:
The Qt package, for instance, uses this call: The Qt package, for instance, uses this call:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/qt/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/qt/package.py
:pyobject: Qt.setup_dependent_build_environment :pyobject: Qt.setup_dependent_build_environment
:linenos: :linenos:
@@ -2939,7 +2958,7 @@ variables to be used by the dependent. This is done by implementing
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An :meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`. An
example of this can be found in the ``Python`` package: example of this can be found in the ``Python`` package:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/python/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
:pyobject: Python.setup_dependent_package :pyobject: Python.setup_dependent_package
:linenos: :linenos:
@@ -3371,7 +3390,7 @@ the above attribute implementations:
"/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib/libFooBaz.so" "/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib/libFooBaz.so"
]) ])
# baz library directories in the baz subdirectory of the foo prefix # baz library directories in the baz subdirectory of the foo porefix
>>> spec["baz"].libs.directories >>> spec["baz"].libs.directories
[ [
"/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib" "/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib"
@@ -3685,57 +3704,60 @@ the build system. The build systems currently supported by Spack are:
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| **API docs** | **Description** | | **API docs** | **Description** |
+==========================================================+==================================+ +==========================================================+==================================+
| :class:`~spack_repo.builtin.build_systems.generic` | Generic build system without any | | :class:`~spack.build_systems.generic` | Generic build system without any |
| | base implementation | | | base implementation |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.makefile` | Specialized build system for | | :class:`~spack.build_systems.makefile` | Specialized build system for |
| | software built invoking | | | software built invoking |
| | hand-written Makefiles | | | hand-written Makefiles |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.autotools` | Specialized build system for | | :class:`~spack.build_systems.autotools` | Specialized build system for |
| | software built using | | | software built using |
| | GNU Autotools | | | GNU Autotools |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.cmake` | Specialized build system for | | :class:`~spack.build_systems.cmake` | Specialized build system for |
| | software built using CMake | | | software built using CMake |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.maven` | Specialized build system for | | :class:`~spack.build_systems.maven` | Specialized build system for |
| | software built using Maven | | | software built using Maven |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.meson` | Specialized build system for | | :class:`~spack.build_systems.meson` | Specialized build system for |
| | software built using Meson | | | software built using Meson |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.nmake` | Specialized build system for | | :class:`~spack.build_systems.nmake` | Specialized build system for |
| | software built using NMake | | | software built using NMake |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.qmake` | Specialized build system for | | :class:`~spack.build_systems.qmake` | Specialized build system for |
| | software built using QMake | | | software built using QMake |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.scons` | Specialized build system for | | :class:`~spack.build_systems.scons` | Specialized build system for |
| | software built using SCons | | | software built using SCons |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.waf` | Specialized build system for | | :class:`~spack.build_systems.waf` | Specialized build system for |
| | software built using Waf | | | software built using Waf |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.r` | Specialized build system for | | :class:`~spack.build_systems.r` | Specialized build system for |
| | R extensions | | | R extensions |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.octave` | Specialized build system for | | :class:`~spack.build_systems.octave` | Specialized build system for |
| | Octave packages | | | Octave packages |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.python` | Specialized build system for | | :class:`~spack.build_systems.python` | Specialized build system for |
| | Python extensions | | | Python extensions |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.perl` | Specialized build system for | | :class:`~spack.build_systems.perl` | Specialized build system for |
| | Perl extensions | | | Perl extensions |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.ruby` | Specialized build system for | | :class:`~spack.build_systems.ruby` | Specialized build system for |
| | Ruby extensions | | | Ruby extensions |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.oneapi` | Specialized build system for | | :class:`~spack.build_systems.intel` | Specialized build system for |
| | licensed Intel software |
+----------------------------------------------------------+----------------------------------+
| :class:`~spack.build_systems.oneapi` | Specialized build system for |
| | Intel oneAPI software | | | Intel oneAPI software |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.aspell_dict` | Specialized build system for | | :class:`~spack.build_systems.aspell_dict` | Specialized build system for |
| | Aspell dictionaries | | | Aspell dictionaries |
+----------------------------------------------------------+----------------------------------+ +----------------------------------------------------------+----------------------------------+
@@ -3747,7 +3769,7 @@ the build system. The build systems currently supported by Spack are:
rare cases where manual intervention is needed we need to stress that a rare cases where manual intervention is needed we need to stress that a
package base class depends on the *build system* being used, not the language of the package. package base class depends on the *build system* being used, not the language of the package.
For example, a Python extension installed with CMake would ``extends("python")`` and For example, a Python extension installed with CMake would ``extends("python")`` and
subclass from :class:`~spack_repo.builtin.build_systems.cmake.CMakePackage`. subclass from :class:`~spack.build_systems.cmake.CMakePackage`.
^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^
Overriding builder methods Overriding builder methods
@@ -3755,7 +3777,7 @@ Overriding builder methods
Build-system "phases" have default implementations that fit most of the common cases: Build-system "phases" have default implementations that fit most of the common cases:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/build_systems/autotools.py .. literalinclude:: _spack_root/lib/spack/spack/build_systems/autotools.py
:pyobject: AutotoolsBuilder.configure :pyobject: AutotoolsBuilder.configure
:linenos: :linenos:
@@ -3763,13 +3785,13 @@ It is usually sufficient for a packager to override a few
build system specific helper methods or attributes to provide, for instance, build system specific helper methods or attributes to provide, for instance,
configure arguments: configure arguments:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/m4/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/m4/package.py
:pyobject: M4.configure_args :pyobject: M4.configure_args
:linenos: :linenos:
Each specific build system has a list of attributes and methods that can be overridden to Each specific build system has a list of attributes and methods that can be overridden to
fine-tune the installation of a package without overriding an entire phase. To fine-tune the installation of a package without overriding an entire phase. To
have more information on them the place to go is the API docs of the :py:mod:`~.spack_repo.builtin.build_systems` have more information on them the place to go is the API docs of the :py:mod:`~.spack.build_systems`
module. module.
^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -3811,7 +3833,7 @@ If the ``package.py`` has build instructions in a separate
.. code-block:: python .. code-block:: python
class CMakeBuilder(spack_repo.builtin.build_systems.cmake.CMakeBuilder): class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
def install(self, pkg, spec, prefix): def install(self, pkg, spec, prefix):
... ...
@@ -3824,32 +3846,31 @@ Mixin base classes
Besides build systems, there are other cases where common metadata and behavior can be extracted Besides build systems, there are other cases where common metadata and behavior can be extracted
and reused by many packages. For instance, packages that depend on ``Cuda`` or ``Rocm``, share and reused by many packages. For instance, packages that depend on ``Cuda`` or ``Rocm``, share
common dependencies and constraints. To factor these attributes into a single place, Spack provides common dependencies and constraints. To factor these attributes into a single place, Spack provides
a few mixin classes in the ``spack_repo.builtin.build_systems`` module: a few mixin classes in the ``spack.build_systems`` module:
+----------------------------------------------------------------------------+----------------------------------+ +---------------------------------------------------------------+----------------------------------+
| **API docs** | **Description** | | **API docs** | **Description** |
+============================================================================+==================================+ +===============================================================+==================================+
| :class:`~spack_repo.builtin.build_systems.cuda.CudaPackage` | A helper class for packages that | | :class:`~spack.build_systems.cuda.CudaPackage` | A helper class for packages that |
| | use CUDA | | | use CUDA |
+----------------------------------------------------------------------------+----------------------------------+ +---------------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.rocm.ROCmPackage` | A helper class for packages that | | :class:`~spack.build_systems.rocm.ROCmPackage` | A helper class for packages that |
| | use ROCm | | | use ROCm |
+----------------------------------------------------------------------------+----------------------------------+ +---------------------------------------------------------------+----------------------------------+
| :class:`~spack_repo.builtin.build_systems.gnu.GNUMirrorPackage` | A helper class for GNU packages | | :class:`~spack.build_systems.gnu.GNUMirrorPackage` | A helper class for GNU packages |
| | | +---------------------------------------------------------------+----------------------------------+
+----------------------------------------------------------------------------+----------------------------------+ | :class:`~spack.build_systems.python.PythonExtension` | A helper class for Python |
| :class:`~spack_repo.builtin.build_systems.python.PythonExtension` | A helper class for Python | | | extensions |
| | extensions | +---------------------------------------------------------------+----------------------------------+
+----------------------------------------------------------------------------+----------------------------------+ | :class:`~spack.build_systems.sourceforge.SourceforgePackage` | A helper class for packages |
| :class:`~spack_repo.builtin.build_systems.sourceforge.SourceforgePackage` | A helper class for packages | | | from sourceforge.org |
| | from sourceforge.org | +---------------------------------------------------------------+----------------------------------+
+----------------------------------------------------------------------------+----------------------------------+ | :class:`~spack.build_systems.sourceware.SourcewarePackage` | A helper class for packages |
| :class:`~spack_repo.builtin.build_systems.sourceware.SourcewarePackage` | A helper class for packages | | | from sourceware.org |
| | from sourceware.org | +---------------------------------------------------------------+----------------------------------+
+----------------------------------------------------------------------------+----------------------------------+ | :class:`~spack.build_systems.xorg.XorgPackage` | A helper class for x.org |
| :class:`~spack_repo.builtin.build_systems.xorg.XorgPackage` | A helper class for x.org | | | packages |
| | packages | +---------------------------------------------------------------+----------------------------------+
+----------------------------------------------------------------------------+----------------------------------+
These classes should be used by adding them to the inheritance tree of the package that needs them, These classes should be used by adding them to the inheritance tree of the package that needs them,
for instance: for instance:
@@ -3893,13 +3914,13 @@ Additional build instructions are split into separate builder classes:
.. code-block:: python .. code-block:: python
class CMakeBuilder(spack_repo.builtin.build_systems.cmake.CMakeBuilder): class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
def cmake_args(self): def cmake_args(self):
return [ return [
self.define_from_variant("MY_FEATURE", "my_feature") self.define_from_variant("MY_FEATURE", "my_feature")
] ]
class AutotoolsBuilder(spack_repo.builtin.build_systems.autotools.AutotoolsBuilder): class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
def configure_args(self): def configure_args(self):
return self.with_or_without("my-feature", variant="my_feature") return self.with_or_without("my-feature", variant="my_feature")
@@ -4089,7 +4110,7 @@ Shell command functions
Recall the install method from ``libelf``: Recall the install method from ``libelf``:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/libelf/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
:pyobject: Libelf.install :pyobject: Libelf.install
:linenos: :linenos:
@@ -4880,7 +4901,7 @@ the one passed to install, only the MPI implementations all set some
additional properties on it to help you out. E.g., in openmpi, you'll additional properties on it to help you out. E.g., in openmpi, you'll
find this: find this:
.. literalinclude:: _spack_root/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py .. literalinclude:: _spack_root/var/spack/repos/builtin/packages/openmpi/package.py
:pyobject: Openmpi.setup_dependent_package :pyobject: Openmpi.setup_dependent_package
That code allows the ``openmpi`` package to associate an ``mpicc`` property That code allows the ``openmpi`` package to associate an ``mpicc`` property
@@ -5728,7 +5749,7 @@ running each executable, ``foo`` and ``bar``, as independent test parts.
.. note:: .. note::
The method name ``copy_test_files`` here is for illustration purposes. The method name ``copy_test_files`` here is for illustration purposes.
You are free to use a name that is better suited to your package. You are free to use a name that is more suited to your package.
The key to copying files for stand-alone testing at build time is use The key to copying files for stand-alone testing at build time is use
of the ``run_after`` directive, which ensures the associated files are of the ``run_after`` directive, which ensures the associated files are
@@ -5980,16 +6001,16 @@ with those implemented in the package itself.
* - Parent/Provider Package * - Parent/Provider Package
- Stand-alone Tests - Stand-alone Tests
* - `C * - `C
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/c>`_ <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/c>`_
- Compiles ``hello.c`` and runs it - Compiles ``hello.c`` and runs it
* - `Cxx * - `Cxx
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/cxx>`_ <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/cxx>`_
- Compiles and runs several ``hello`` programs - Compiles and runs several ``hello`` programs
* - `Fortran * - `Fortran
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/fortran>`_ <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/fortran>`_
- Compiles and runs ``hello`` programs (``F`` and ``f90``) - Compiles and runs ``hello`` programs (``F`` and ``f90``)
* - `Mpi * - `Mpi
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/mpi>`_ <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/mpi>`_
- Compiles and runs ``mpi_hello`` (``c``, ``fortran``) - Compiles and runs ``mpi_hello`` (``c``, ``fortran``)
* - :ref:`PythonPackage <pythonpackage>` * - :ref:`PythonPackage <pythonpackage>`
- Imports modules listed in the ``self.import_modules`` property with defaults derived from the tarball - Imports modules listed in the ``self.import_modules`` property with defaults derived from the tarball
@@ -6010,7 +6031,7 @@ maintainers provide additional stand-alone tests customized to the package.
One example of a package that adds its own stand-alone tests to those One example of a package that adds its own stand-alone tests to those
"inherited" by the virtual package it provides an implementation for is "inherited" by the virtual package it provides an implementation for is
the `Openmpi package the `Openmpi package
<https://github.com/spack/spack/blob/develop/var/spack/repos/spack_repo/builtin/packages/openmpi/package.py>`_. <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/openmpi/package.py>`_.
Below are snippets from running and viewing the stand-alone test results Below are snippets from running and viewing the stand-alone test results
for ``openmpi``: for ``openmpi``:
@@ -6162,7 +6183,7 @@ running:
.. code-block:: python .. code-block:: python
from spack.package import * from spack import *
This is already part of the boilerplate for packages created with This is already part of the boilerplate for packages created with
``spack create``. ``spack create``.
@@ -7237,7 +7258,7 @@ which are not, there is the `checked_by` parameter in the license directive:
license("<license>", when="<when>", checked_by="<github username>") license("<license>", when="<when>", checked_by="<github username>")
When you have validated a package license, either when doing so explicitly or When you have validated a github license, either when doing so explicitly or
as part of packaging a new package, please set the `checked_by` parameter as part of packaging a new package, please set the `checked_by` parameter
to your Github username to signal that the license has been manually to your Github username to signal that the license has been manually
verified. verified.

View File

@@ -214,7 +214,7 @@ package versions, simply run the following commands:
Running ``spack mark -i --all`` tells Spack to mark all of the existing Running ``spack mark -i --all`` tells Spack to mark all of the existing
packages within an environment as "implicitly" installed. This tells packages within an environment as "implicitly" installed. This tells
Spack's garbage collection system that these packages should be cleaned up. spack's garbage collection system that these packages should be cleaned up.
Don't worry however, this will not remove your entire environment. Don't worry however, this will not remove your entire environment.
Running ``spack install`` will reexamine your spack environment after Running ``spack install`` will reexamine your spack environment after

View File

@@ -9,7 +9,7 @@ Package Repositories (repos.yaml)
================================= =================================
Spack comes with thousands of built-in package recipes in Spack comes with thousands of built-in package recipes in
``var/spack/repos/spack_repo/builtin/``. This is a **package repository** -- a ``var/spack/repos/builtin/``. This is a **package repository** -- a
directory that Spack searches when it needs to find a package by name. directory that Spack searches when it needs to find a package by name.
You may need to maintain packages for restricted, proprietary or You may need to maintain packages for restricted, proprietary or
experimental software separately from the built-in repository. Spack experimental software separately from the built-in repository. Spack
@@ -69,7 +69,7 @@ The default ``etc/spack/defaults/repos.yaml`` file looks like this:
.. code-block:: yaml .. code-block:: yaml
repos: repos:
- $spack/var/spack/repos/spack_repo/builtin - $spack/var/spack/repos/builtin
The file starts with ``repos:`` and contains a single ordered list of The file starts with ``repos:`` and contains a single ordered list of
paths to repositories. Each path is on a separate line starting with paths to repositories. Each path is on a separate line starting with
@@ -78,16 +78,16 @@ paths to repositories. Each path is on a separate line starting with
.. code-block:: yaml .. code-block:: yaml
repos: repos:
- /opt/repos/spack_repo/local_repo - /opt/local-repo
- $spack/var/spack/repos/spack_repo/builtin - $spack/var/spack/repos/builtin
When Spack interprets a spec, e.g., ``mpich`` in ``spack install mpich``, When Spack interprets a spec, e.g., ``mpich`` in ``spack install mpich``,
it searches these repositories in order (first to last) to resolve each it searches these repositories in order (first to last) to resolve each
package name. In this example, Spack will look for the following package name. In this example, Spack will look for the following
packages and use the first valid file: packages and use the first valid file:
1. ``/opt/repos/spack_repo/local_repo/packages/mpich/package.py`` 1. ``/opt/local-repo/packages/mpich/package.py``
2. ``$spack/var/spack/repos/spack_repo/builtin/packages/mpich/package.py`` 2. ``$spack/var/spack/repos/builtin/packages/mpich/package.py``
.. note:: .. note::
@@ -101,15 +101,14 @@ Namespaces
Every repository in Spack has an associated **namespace** defined in its Every repository in Spack has an associated **namespace** defined in its
top-level ``repo.yaml`` file. If you look at top-level ``repo.yaml`` file. If you look at
``var/spack/repos/spack_repo/builtin/repo.yaml`` in the built-in repository, you'll ``var/spack/repos/builtin/repo.yaml`` in the built-in repository, you'll
see that its namespace is ``builtin``: see that its namespace is ``builtin``:
.. code-block:: console .. code-block:: console
$ cat var/spack/repos/spack_repo/builtin/repo.yaml $ cat var/spack/repos/builtin/repo.yaml
repo: repo:
namespace: builtin namespace: builtin
api: v2.0
Spack records the repository namespace of each installed package. For Spack records the repository namespace of each installed package. For
example, if you install the ``mpich`` package from the ``builtin`` repo, example, if you install the ``mpich`` package from the ``builtin`` repo,
@@ -218,15 +217,15 @@ Suppose you have three repositories: the builtin Spack repo
repo containing your own prototype packages (``proto``). Suppose they repo containing your own prototype packages (``proto``). Suppose they
contain packages as follows: contain packages as follows:
+--------------+-----------------------------------------------+-----------------------------+ +--------------+------------------------------------+-----------------------------+
| Namespace | Path to repo | Packages | | Namespace | Path to repo | Packages |
+==============+===============================================+=============================+ +==============+====================================+=============================+
| ``proto`` | ``~/my_spack_repos/spack_repo/proto`` | ``mpich`` | | ``proto`` | ``~/proto`` | ``mpich`` |
+--------------+-----------------------------------------------+-----------------------------+ +--------------+------------------------------------+-----------------------------+
| ``llnl`` | ``/usr/local/repos/spack_repo/llnl`` | ``hdf5`` | | ``llnl`` | ``/usr/local/llnl`` | ``hdf5`` |
+--------------+-----------------------------------------------+-----------------------------+ +--------------+------------------------------------+-----------------------------+
| ``builtin`` | ``$spack/var/spack/repos/spack_repo/builtin`` | ``mpich``, ``hdf5``, others | | ``builtin`` | ``$spack/var/spack/repos/builtin`` | ``mpich``, ``hdf5``, others |
+--------------+-----------------------------------------------+-----------------------------+ +--------------+------------------------------------+-----------------------------+
Suppose that ``hdf5`` depends on ``mpich``. You can override the Suppose that ``hdf5`` depends on ``mpich``. You can override the
built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``: built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
@@ -234,8 +233,8 @@ built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
.. code-block:: yaml .. code-block:: yaml
repos: repos:
- /usr/local/repos/spack_repo/llnl - /usr/local/llnl
- $spack/var/spack/repos/spack_repo/builtin - $spack/var/spack/repos/builtin
``spack install hdf5`` will install ``llnl.hdf5 ^builtin.mpich``. ``spack install hdf5`` will install ``llnl.hdf5 ^builtin.mpich``.
@@ -244,9 +243,9 @@ If, instead, ``repos.yaml`` looks like this:
.. code-block:: yaml .. code-block:: yaml
repos: repos:
- ~/my_spack_repos/spack_repo/proto - ~/proto
- /usr/local/repos/spack_repo/llnl - /usr/local/llnl
- $spack/var/spack/repos/spack_repo/builtin - $spack/var/spack/repos/builtin
``spack install hdf5`` will install ``llnl.hdf5 ^proto.mpich``. ``spack install hdf5`` will install ``llnl.hdf5 ^proto.mpich``.
@@ -327,8 +326,8 @@ files, use ``spack repo list``.
$ spack repo list $ spack repo list
==> 2 package repositories. ==> 2 package repositories.
myrepo v2.0 ~/my_spack_repos/spack_repo/myrepo myrepo ~/myrepo
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin builtin ~/spack/var/spack/repos/builtin
Each repository is listed with its associated namespace. To get the raw, Each repository is listed with its associated namespace. To get the raw,
merged YAML from all configuration files, use ``spack config get repos``: merged YAML from all configuration files, use ``spack config get repos``:
@@ -336,9 +335,9 @@ merged YAML from all configuration files, use ``spack config get repos``:
.. code-block:: console .. code-block:: console
$ spack config get repos $ spack config get repos
repos: repos:srepos:
- ~/my_spack_repos/spack_repo/myrepo - ~/myrepo
- $spack/var/spack/repos/spack_repo/builtin - $spack/var/spack/repos/builtin
Note that, unlike ``spack repo list``, this does not include the Note that, unlike ``spack repo list``, this does not include the
namespace, which is read from each repo's ``repo.yaml``. namespace, which is read from each repo's ``repo.yaml``.
@@ -352,54 +351,66 @@ yourself; you can use the ``spack repo create`` command.
.. code-block:: console .. code-block:: console
$ spack repo create ~/my_spack_repos myrepo $ spack repo create myrepo
==> Created repo with namespace 'myrepo'. ==> Created repo with namespace 'myrepo'.
==> To register it with spack, run this command: ==> To register it with spack, run this command:
spack repo add ~/my_spack_repos/spack_repo/myrepo spack repo add ~/myrepo
$ ls ~/my_spack_repos/spack_repo/myrepo $ ls myrepo
packages/ repo.yaml packages/ repo.yaml
$ cat ~/my_spack_repos/spack_repo/myrepo/repo.yaml $ cat myrepo/repo.yaml
repo: repo:
namespace: 'myrepo' namespace: 'myrepo'
api: v2.0
Namespaces can also be nested, which can be useful if you have By default, the namespace of a new repo matches its directory's name.
multiple package repositories for an organization. Spack will You can supply a custom namespace with a second argument, e.g.:
create the corresponding directory structure for you:
.. code-block:: console .. code-block:: console
$ spack repo create ~/my_spack_repos llnl.comp $ spack repo create myrepo llnl.comp
==> Created repo with namespace 'llnl.comp'. ==> Created repo with namespace 'llnl.comp'.
==> To register it with spack, run this command: ==> To register it with spack, run this command:
spack repo add ~/my_spack_repos/spack_repo/llnl/comp spack repo add ~/myrepo
$ cat myrepo/repo.yaml
$ cat ~/my_spack_repos/spack_repo/llnl/comp/repo.yaml
repo: repo:
namespace: 'llnl.comp' namespace: 'llnl.comp'
api: v2.0
You can also create repositories with custom structure with the ``-d/--subdirectory``
argument, e.g.:
.. code-block:: console
$ spack repo create -d applications myrepo apps
==> Created repo with namespace 'apps'.
==> To register it with Spack, run this command:
spack repo add ~/myrepo
$ ls myrepo
applications/ repo.yaml
$ cat myrepo/repo.yaml
repo:
namespace: apps
subdirectory: applications
^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^
``spack repo add`` ``spack repo add``
^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^
Once your repository is created, you can register it with Spack with Once your repository is created, you can register it with Spack with
``spack repo add``. You nee to specify the path to the directory that ``spack repo add``:
contains the ``repo.yaml`` file.
.. code-block:: console .. code-block:: console
$ spack repo add ~/my_spack_repos/spack_repo/llnl/comp $ spack repo add ./myrepo
==> Added repo with namespace 'llnl.comp'. ==> Added repo with namespace 'llnl.comp'.
$ spack repo list $ spack repo list
==> 2 package repositories. ==> 2 package repositories.
llnl.comp v2.0 ~/my_spack_repos/spack_repo/llnl/comp llnl.comp ~/myrepo
builtin v2.0 ~/spack/var/spack/repos/spack_repo/builtin builtin ~/spack/var/spack/repos/builtin
This simply adds the repo to your ``repos.yaml`` file. This simply adds the repo to your ``repos.yaml`` file.
@@ -421,43 +432,46 @@ By namespace:
.. code-block:: console .. code-block:: console
$ spack repo rm llnl.comp $ spack repo rm llnl.comp
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp with namespace 'llnl.comp'. ==> Removed repository ~/myrepo with namespace 'llnl.comp'.
$ spack repo list $ spack repo list
==> 1 package repository. ==> 1 package repository.
builtin ~/spack/var/spack/repos/spack_repo/builtin builtin ~/spack/var/spack/repos/builtin
By path: By path:
.. code-block:: console .. code-block:: console
$ spack repo rm ~/my_spack_repos/spack_repo/llnl/comp $ spack repo rm ~/myrepo
==> Removed repository ~/my_spack_repos/spack_repo/llnl/comp ==> Removed repository ~/myrepo
$ spack repo list $ spack repo list
==> 1 package repository. ==> 1 package repository.
builtin ~/spack/var/spack/repos/spack_repo/builtin builtin ~/spack/var/spack/repos/builtin
-------------------------------- --------------------------------
Repo namespaces and Python Repo namespaces and Python
-------------------------------- --------------------------------
Package repositories are implemented as Python packages. To be precise, You may have noticed that namespace notation for repositories is similar
they are `namespace packages to the notation for namespaces in Python. As it turns out, you *can*
<https://packaging.python.org/en/latest/guides/packaging-namespace-packages/>`_ treat Spack repositories like Python packages; this is how they are
with ``spack_repo`` the top-level namespace, followed by the repository implemented.
namespace as submodules. For example, the builtin repository corresponds
to the Python module ``spack_repo.builtin.packages``.
This structure allows you to extend a ``builtin`` package in your own You could, for example, extend a ``builtin`` package in your own
repository: repository:
.. code-block:: python .. code-block:: python
from spack_repo.builtin.packages.mpich.package import Mpich from spack.pkg.builtin.mpich import Mpich
class MyPackage(Mpich): class MyPackage(Mpich):
... ...
Spack populates ``sys.path`` at runtime with the path to the root of your Spack repo namespaces are actually Python namespaces tacked on under
package repository's ``spack_repo`` directory. ``spack.pkg``. The search semantics of ``repos.yaml`` are actually
implemented using Python's built-in `sys.path
<https://docs.python.org/2/library/sys.html#sys.path>`_ search. The
:py:mod:`spack.repo` module implements a custom `Python importer
<https://docs.python.org/2/library/imp.html>`_.

View File

@@ -5,9 +5,9 @@ sphinx-rtd-theme==3.0.2
python-levenshtein==0.27.1 python-levenshtein==0.27.1
docutils==0.21.2 docutils==0.21.2
pygments==2.19.1 pygments==2.19.1
urllib3==2.4.0 urllib3==2.3.0
pytest==8.3.5 pytest==8.3.5
isort==6.0.1 isort==6.0.1
black==25.1.0 black==25.1.0
flake8==7.2.0 flake8==7.1.2
mypy==1.11.1 mypy==1.11.1

View File

@@ -176,72 +176,92 @@ community without needing deep familiarity with GnuPG or Public Key
Infrastructure. Infrastructure.
.. _build_cache_signing: .. _build_cache_format:
------------------- ------------------
Build Cache Signing Build Cache Format
------------------- ------------------
For an in-depth description of the layout of a binary mirror, see A binary package consists of a metadata file unambiguously defining the
the :ref:`documentation<build_cache_layout>` covering binary caches. The built package (and including other details such as how to relocate it)
key takeaway from that discussion that applies here is that the entry point and the installation directory of the package stored as a compressed
to a binary package is it's manifest. The manifest refers unambiguously to the archive file. The metadata files can either be unsigned, in which case
spec metadata and compressed archive, which are stored as content-addressed the contents are simply the json-serialized concrete spec plus metadata,
blobs. or they can be signed, in which case the json-serialized concrete spec
plus metadata is wrapped in a gpg cleartext signature. Built package
metadata files are named to indicate the operating system and
architecture for which the package was built as well as the compiler
used to build it and the packages name and version. For example::
The manifest files can either be signed or unsigned, but are always given linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
a name ending with ``.spec.manifest.json`` regardless. The difference between
signed and unsigned manifests is simply that the signed version is wrapped in would contain the concrete spec and binary metadata for a binary package
a gpg cleartext signature, as illustrated below:: of ``zlib@1.2.12``, built for the ``ubuntu`` operating system and ``haswell``
architecture. The id of the built package exists in the name of the file
as well (after the package name and version) and in this case begins
with ``llv2ys``. The id distinguishes a particular built package from all
other built packages with the same os/arch, compiler, name, and version.
Below is an example of a signed binary package metadata file. Such a
file would live in the ``build_cache`` directory of a binary mirror::
-----BEGIN PGP SIGNED MESSAGE----- -----BEGIN PGP SIGNED MESSAGE-----
Hash: SHA512 Hash: SHA512
{ {
"version": 3, "spec": {
"data": [ <concrete-spec-contents-omitted>
{ },
"contentLength": 10731083,
"mediaType": "application/vnd.spack.install.v2.tar+gzip",
"compression": "gzip",
"checksumAlgorithm": "sha256",
"checksum": "0f24aa6b5dd7150067349865217acd3f6a383083f9eca111d2d2fed726c88210"
},
{
"contentLength": 1000,
"mediaType": "application/vnd.spack.spec.v5+json",
"compression": "gzip",
"checksumAlgorithm": "sha256",
"checksum": "fba751c4796536737c9acbb718dad7429be1fa485f5585d450ab8b25d12ae041"
}
]
}
-----BEGIN PGP SIGNATURE-----
iQGzBAEBCgAdFiEEdbwFKBFJCcB24mB0GAEP+tc8mwcFAmf2rr4ACgkQGAEP+tc8 "buildcache_layout_version": 1,
mwfefwv+KJs8MsQ5ovFaBdmyx5H/3k4rO4QHBzuSPOB6UaxErA9IyOB31iP6vNTU "binary_cache_checksum": {
HzYpxz6F5dJCJWmmNEMN/0+vjhMHEOkqd7M1l5reVcxduTF2yc4tBZUO2gienEHL "hash_algorithm": "sha256",
W0e+SnUznl1yc/aVpChUiahO2zToCsI8HZRNT4tu6iCnE/OpghqjsSdBOZHmSNDD "hash": "4f1e46452c35a5e61bcacca205bae1bfcd60a83a399af201a29c95b7cc3e1423"
5wuuCxfDUyWI6ZlLclaaB7RdbCUUJf/iqi711J+wubvnDFhc6Ynwm1xai5laJ1bD }
ev3NrSb2AAroeNFVo4iECA0fZC1OZQYzaRmAEhBXtCideGJ5Zf2Cp9hmCwNK8Hq6 }
bNt94JP9LqC3FCCJJOMsPyOOhMSA5MU44zyyzloRwEQpHHLuFzVdbTHA3dmTc18n
HxNLkZoEMYRc8zNr40g0yb2lCbc+P11TtL1E+5NlE34MX15mPewRCiIFTMwhCnE3 -----BEGIN PGP SIGNATURE-----
gFSKtW1MKustZE35/RUwd2mpJRf+mSRVCl1f1RiFjktLjz7vWQq7imIUSam0fPDr iQGzBAEBCgAdFiEETZn0sLle8jIrdAPLx/P+voVcifMFAmKAGvwACgkQx/P+voVc
XD4aDogm ifNoVgv/VrhA+wurVs5GB9PhmMA1m5U/AfXZb4BElDRwpT8ZcTPIv5X8xtv60eyn
=RrFX 4EOneGVbZoMThVxgev/NKARorGmhFXRqhWf+jknJZ1dicpqn/qpv34rELKUpgXU+
QDQ4d1P64AIdTczXe2GI9ZvhOo6+bPvK7LIsTkBbtWmopkomVxF0LcMuxAVIbA6b
887yBvVO0VGlqRnkDW7nXx49r3AG2+wDcoU1f8ep8QtjOcMNaPTPJ0UnjD0VQGW6
4ZFaGZWzdo45MY6tF3o5mqM7zJkVobpoW3iUz6J5tjz7H/nMlGgMkUwY9Kxp2PVH
qoj6Zip3LWplnl2OZyAY+vflPFdFh12Xpk4FG7Sxm/ux0r+l8tCAPvtw+G38a5P7
QEk2JBr8qMGKASmnRlJUkm1vwz0a95IF3S9YDfTAA2vz6HH3PtsNLFhtorfx8eBi
Wn5aPJAGEPOawEOvXGGbsH4cDEKPeN0n6cy1k92uPEmBLDVsdnur8q42jk5c2Qyx
j3DXty57
=3gvm
-----END PGP SIGNATURE----- -----END PGP SIGNATURE-----
If a user has trusted the public key associated with the private key If a user has trusted the public key associated with the private key
used to sign the above manifest file, the signature can be verified with used to sign the above spec file, the signature can be verified with
gpg, as follows:: gpg, as follows::
$ gpg --verify gcc-runtime-12.3.0-s2nqujezsce4x6uhtvxscu7jhewqzztx.spec.manifest.json $ gpg verify linux-ubuntu18.04-haswell-gcc-7.5.0-zlib-1.2.12-llv2ysfdxnppzjrt5ldybb5c52qbmoow.spec.json.sig
When attempting to install a binary package that has been signed, spack will The metadata (regardless whether signed or unsigned) contains the checksum
attempt to verify the signature with one of the trusted keys in its keyring, of the ``.spack`` file containing the actual installation. The checksum should
and will fail if unable to do so. While not recommended, it is possible to be compared to a checksum computed locally on the ``.spack`` file to ensure the
force installation of a signed package without verification by providing the contents have not changed since the binary spec plus metadata were signed. The
``--no-check-signature`` argument to ``spack install ...``. ``.spack`` files are actually tarballs containing the compressed archive of the
install tree. These files, along with the metadata files, live within the
``build_cache`` directory of the mirror, and together are organized as follows::
build_cache/
# unsigned metadata (for indexing, contains sha256 of .spack file)
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json
# clearsigned metadata (same as above, but signed)
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spec.json.sig
<arch>/
<compiler>/
<name>-<ver>/
# tar.gz-compressed prefix (may support more compression formats later)
<arch>-<compiler>-<name>-<ver>-24zvipcqgg2wyjpvdq2ajy5jnm564hen.spack
Uncompressing and extracting the ``.spack`` file results in the install tree.
This is in contrast to previous versions of spack, where the ``.spack`` file
contained a (duplicated) metadata file, a signature file and a nested tarball
containing the install tree.
.. _internal_implementation: .. _internal_implementation:
@@ -300,10 +320,10 @@ the following way:
Reputational Public Key are imported into a keyring by the ``spack gpg …`` Reputational Public Key are imported into a keyring by the ``spack gpg …``
sub-command. This is initiated by the jobs build script which is created by sub-command. This is initiated by the jobs build script which is created by
the generate job at the beginning of the pipeline. the generate job at the beginning of the pipeline.
4. Assuming the package has dependencies those spec manifests are verified using 4. Assuming the package has dependencies those specs are verified using
the keyring. the keyring.
5. The package is built and the spec manifest is generated 5. The package is built and the spec.json is generated
6. The spec manifest is signed by the keyring and uploaded to the mirrors 6. The spec.json is signed by the keyring and uploaded to the mirrors
build cache. build cache.
**Reputational Key** **Reputational Key**
@@ -356,24 +376,24 @@ following way:
4. In addition to the secret, the runner creates a tmpfs memory mounted 4. In addition to the secret, the runner creates a tmpfs memory mounted
directory where the GnuPG keyring will be created to verify, and directory where the GnuPG keyring will be created to verify, and
then resign the package specs. then resign the package specs.
5. The job script syncs all spec manifest files from the build cache to 5. The job script syncs all spec.json.sig files from the build cache to
a working directory in the jobs execution environment. a working directory in the jobs execution environment.
6. The job script then runs the ``sign.sh`` script built into the 6. The job script then runs the ``sign.sh`` script built into the
notary Docker image. notary Docker image.
7. The ``sign.sh`` script imports the public components of the 7. The ``sign.sh`` script imports the public components of the
Reputational and Intermediate CI Keys and uses them to verify good Reputational and Intermediate CI Keys and uses them to verify good
signatures on the spec.manifest.json files. If any signed manifest signatures on the spec.json.sig files. If any signed spec does not
does not verify, the job immediately fails. verify the job immediately fails.
8. Assuming all manifests are verified, the ``sign.sh`` script then unpacks 8. Assuming all specs are verified, the ``sign.sh`` script then unpacks
the manifest json data from the signed file in preparation for being the spec json data from the signed file in preparation for being
re-signed with the Reputational Key. re-signed with the Reputational Key.
9. The private components of the Reputational Key are decrypted to 9. The private components of the Reputational Key are decrypted to
standard out using ``aws-encryption-cli`` directly into a ``gpg standard out using ``aws-encryption-cli`` directly into a ``gpg
import …`` statement which imports the key into the import …`` statement which imports the key into the
keyring mounted in-memory. keyring mounted in-memory.
10. The private key is then used to sign each of the manifests and the 10. The private key is then used to sign each of the json specs and the
keyring is removed from disk. keyring is removed from disk.
11. The re-signed manifests are resynced to the AWS S3 Mirror and the 11. The re-signed json specs are resynced to the AWS S3 Mirror and the
public signing of the packages for the develop or release pipeline public signing of the packages for the develop or release pipeline
that created them is complete. that created them is complete.

View File

@@ -11,7 +11,6 @@
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html * Homepage: https://altgraph.readthedocs.io/en/latest/index.html
* Usage: dependency of macholib * Usage: dependency of macholib
* Version: 0.17.3 * Version: 0.17.3
* License: MIT
archspec archspec
-------- --------
@@ -19,7 +18,6 @@
* Homepage: https://pypi.python.org/pypi/archspec * Homepage: https://pypi.python.org/pypi/archspec
* Usage: Labeling, comparison and detection of microarchitectures * Usage: Labeling, comparison and detection of microarchitectures
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47) * Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
* License: Apache-2.0 or MIT
astunparse astunparse
---------------- ----------------
@@ -27,7 +25,6 @@
* Homepage: https://github.com/simonpercivall/astunparse * Homepage: https://github.com/simonpercivall/astunparse
* Usage: Unparsing Python ASTs for package hashes in Spack * Usage: Unparsing Python ASTs for package hashes in Spack
* Version: 1.6.3 (plus modifications) * Version: 1.6.3 (plus modifications)
* License: PSF-2.0
* Note: This is in ``spack.util.unparse`` because it's very heavily * Note: This is in ``spack.util.unparse`` because it's very heavily
modified, and we want to track coverage for it. modified, and we want to track coverage for it.
Specifically, we have modified this library to generate consistent unparsed ASTs Specifically, we have modified this library to generate consistent unparsed ASTs
@@ -44,7 +41,6 @@
* Homepage: https://github.com/python-attrs/attrs * Homepage: https://github.com/python-attrs/attrs
* Usage: Needed by jsonschema. * Usage: Needed by jsonschema.
* Version: 22.1.0 * Version: 22.1.0
* License: MIT
ctest_log_parser ctest_log_parser
---------------- ----------------
@@ -52,7 +48,6 @@
* Homepage: https://github.com/Kitware/CMake/blob/master/Source/CTest/cmCTestBuildHandler.cxx * Homepage: https://github.com/Kitware/CMake/blob/master/Source/CTest/cmCTestBuildHandler.cxx
* Usage: Functions to parse build logs and extract error messages. * Usage: Functions to parse build logs and extract error messages.
* Version: Unversioned * Version: Unversioned
* License: BSD-3-Clause
* Note: This is a homemade port of Kitware's CTest build handler. * Note: This is a homemade port of Kitware's CTest build handler.
distro distro
@@ -61,7 +56,6 @@
* Homepage: https://pypi.python.org/pypi/distro * Homepage: https://pypi.python.org/pypi/distro
* Usage: Provides a more stable linux distribution detection. * Usage: Provides a more stable linux distribution detection.
* Version: 1.8.0 * Version: 1.8.0
* License: Apache-2.0
jinja2 jinja2
------ ------
@@ -69,7 +63,6 @@
* Homepage: https://pypi.python.org/pypi/Jinja2 * Homepage: https://pypi.python.org/pypi/Jinja2
* Usage: A modern and designer-friendly templating language for Python. * Usage: A modern and designer-friendly templating language for Python.
* Version: 3.0.3 (last version supporting Python 3.6) * Version: 3.0.3 (last version supporting Python 3.6)
* License: BSD-3-Clause
jsonschema jsonschema
---------- ----------
@@ -77,7 +70,6 @@
* Homepage: https://pypi.python.org/pypi/jsonschema * Homepage: https://pypi.python.org/pypi/jsonschema
* Usage: An implementation of JSON Schema for Python. * Usage: An implementation of JSON Schema for Python.
* Version: 3.2.0 (last version before 2.7 and 3.6 support was dropped) * Version: 3.2.0 (last version before 2.7 and 3.6 support was dropped)
* License: MIT
* Note: We don't include tests or benchmarks; just what Spack needs. * Note: We don't include tests or benchmarks; just what Spack needs.
macholib macholib
@@ -86,7 +78,6 @@
* Homepage: https://macholib.readthedocs.io/en/latest/index.html# * Homepage: https://macholib.readthedocs.io/en/latest/index.html#
* Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux * Usage: Manipulation of Mach-o binaries for relocating macOS buildcaches on Linux
* Version: 1.16.2 * Version: 1.16.2
* License: MIT
markupsafe markupsafe
---------- ----------
@@ -94,7 +85,6 @@
* Homepage: https://pypi.python.org/pypi/MarkupSafe * Homepage: https://pypi.python.org/pypi/MarkupSafe
* Usage: Implements a XML/HTML/XHTML Markup safe string for Python. * Usage: Implements a XML/HTML/XHTML Markup safe string for Python.
* Version: 2.0.1 (last version supporting Python 3.6) * Version: 2.0.1 (last version supporting Python 3.6)
* License: BSD-3-Clause
pyrsistent pyrsistent
---------- ----------
@@ -102,7 +92,6 @@
* Homepage: http://github.com/tobgu/pyrsistent/ * Homepage: http://github.com/tobgu/pyrsistent/
* Usage: Needed by `jsonschema` * Usage: Needed by `jsonschema`
* Version: 0.18.0 * Version: 0.18.0
* License: MIT
ruamel.yaml ruamel.yaml
------ ------
@@ -112,7 +101,6 @@
actively maintained and has more features, including round-tripping actively maintained and has more features, including round-tripping
comments read from config files. comments read from config files.
* Version: 0.17.21 * Version: 0.17.21
* License: MIT
six six
--- ---
@@ -120,6 +108,5 @@
* Homepage: https://pypi.python.org/pypi/six * Homepage: https://pypi.python.org/pypi/six
* Usage: Python 2 and 3 compatibility utilities. * Usage: Python 2 and 3 compatibility utilities.
* Version: 1.16.0 * Version: 1.16.0
* License: MIT
""" """

View File

@@ -0,0 +1 @@
from _pyrsistent_version import *

View File

@@ -0,0 +1 @@
from altgraph import *

View File

@@ -1,8 +1,8 @@
""" """
_vendoring.altgraph.Dot - Interface to the dot language altgraph.Dot - Interface to the dot language
============================================ ============================================
The :py:mod:`~_vendoring.altgraph.Dot` module provides a simple interface to the The :py:mod:`~altgraph.Dot` module provides a simple interface to the
file format used in the file format used in the
`graphviz <http://www.research.att.com/sw/tools/graphviz/>`_ `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
program. The module is intended to offload the most tedious part of the process program. The module is intended to offload the most tedious part of the process
@@ -20,7 +20,7 @@
Here is a typical usage:: Here is a typical usage::
from _vendoring.altgraph import Graph, Dot from altgraph import Graph, Dot
# create a graph # create a graph
edges = [ (1,2), (1,3), (3,4), (3,5), (4,5), (5,4) ] edges = [ (1,2), (1,3), (3,4), (3,5), (4,5), (5,4) ]
@@ -77,7 +77,7 @@
.. note:: .. note::
dotty (invoked via :py:func:`~_vendoring.altgraph.Dot.display`) may not be able to dotty (invoked via :py:func:`~altgraph.Dot.display`) may not be able to
display all graphics styles. To verify the output save it to an image file display all graphics styles. To verify the output save it to an image file
and look at it that way. and look at it that way.
@@ -111,7 +111,7 @@
import os import os
import warnings import warnings
from _vendoring.altgraph import GraphError from altgraph import GraphError
class Dot(object): class Dot(object):

View File

@@ -1,5 +1,5 @@
""" """
_vendoring.altgraph.Graph - Base Graph class altgraph.Graph - Base Graph class
================================= =================================
.. ..
@@ -15,7 +15,7 @@
from collections import deque from collections import deque
from _vendoring.altgraph import GraphError from altgraph import GraphError
class Graph(object): class Graph(object):

View File

@@ -1,8 +1,8 @@
""" """
_vendoring.altgraph.GraphAlgo - Graph algorithms altgraph.GraphAlgo - Graph algorithms
===================================== =====================================
""" """
from _vendoring.altgraph import GraphError from altgraph import GraphError
def dijkstra(graph, start, end=None): def dijkstra(graph, start, end=None):
@@ -25,7 +25,7 @@ def dijkstra(graph, start, end=None):
and will raise an exception if it discovers that a negative edge has and will raise an exception if it discovers that a negative edge has
caused it to make a mistake. caused it to make a mistake.
Adapted to _vendoring.altgraph by Istvan Albert, Pennsylvania State University - Adapted to altgraph by Istvan Albert, Pennsylvania State University -
June, 9 2004 June, 9 2004
""" """
D = {} # dictionary of final distances D = {} # dictionary of final distances

View File

@@ -1,5 +1,5 @@
""" """
_vendoring.altgraph.GraphStat - Functions providing various graph statistics altgraph.GraphStat - Functions providing various graph statistics
================================================================= =================================================================
""" """

View File

@@ -1,17 +1,17 @@
""" """
_vendoring.altgraph.GraphUtil - Utility classes and functions altgraph.GraphUtil - Utility classes and functions
================================================== ==================================================
""" """
import random import random
from collections import deque from collections import deque
from _vendoring.altgraph import Graph, GraphError from altgraph import Graph, GraphError
def generate_random_graph(node_num, edge_num, self_loops=False, multi_edges=False): def generate_random_graph(node_num, edge_num, self_loops=False, multi_edges=False):
""" """
Generates and returns a :py:class:`~_vendoring.altgraph.Graph.Graph` instance with Generates and returns a :py:class:`~altgraph.Graph.Graph` instance with
*node_num* nodes randomly connected by *edge_num* edges. *node_num* nodes randomly connected by *edge_num* edges.
""" """
g = Graph.Graph() g = Graph.Graph()
@@ -52,7 +52,7 @@ def generate_random_graph(node_num, edge_num, self_loops=False, multi_edges=Fals
def generate_scale_free_graph(steps, growth_num, self_loops=False, multi_edges=False): def generate_scale_free_graph(steps, growth_num, self_loops=False, multi_edges=False):
""" """
Generates and returns a :py:class:`~_vendoring.altgraph.Graph.Graph` instance that Generates and returns a :py:class:`~altgraph.Graph.Graph` instance that
will have *steps* \\* *growth_num* nodes and a scale free (powerlaw) will have *steps* \\* *growth_num* nodes and a scale free (powerlaw)
connectivity. Starting with a fully connected graph with *growth_num* connectivity. Starting with a fully connected graph with *growth_num*
nodes at every step *growth_num* nodes are added to the graph and are nodes at every step *growth_num* nodes are added to the graph and are

View File

@@ -1,14 +1,14 @@
""" """
_vendoring.altgraph.ObjectGraph - Graph of objects with an identifier altgraph.ObjectGraph - Graph of objects with an identifier
========================================================== ==========================================================
A graph of objects that have a "graphident" attribute. A graph of objects that have a "graphident" attribute.
graphident is the key for the object in the graph graphident is the key for the object in the graph
""" """
from _vendoring.altgraph import GraphError from altgraph import GraphError
from _vendoring.altgraph.Graph import Graph from altgraph.Graph import Graph
from _vendoring.altgraph.GraphUtil import filter_stack from altgraph.GraphUtil import filter_stack
class ObjectGraph(object): class ObjectGraph(object):

View File

@@ -1,18 +1,18 @@
""" """
_vendoring.altgraph - a python graph library altgraph - a python graph library
================================= =================================
_vendoring.altgraph is a fork of `graphlib <http://pygraphlib.sourceforge.net>`_ tailored altgraph is a fork of `graphlib <http://pygraphlib.sourceforge.net>`_ tailored
to use newer Python 2.3+ features, including additional support used by the to use newer Python 2.3+ features, including additional support used by the
py2app suite (modulegraph and _vendoring.macholib, specifically). py2app suite (modulegraph and macholib, specifically).
_vendoring.altgraph is a python based graph (network) representation and manipulation altgraph is a python based graph (network) representation and manipulation
package. It has started out as an extension to the package. It has started out as an extension to the
`graph_lib module `graph_lib module
<http://www.ece.arizona.edu/~denny/python_nest/graph_lib_1.0.1.html>`_ <http://www.ece.arizona.edu/~denny/python_nest/graph_lib_1.0.1.html>`_
written by Nathan Denny it has been significantly optimized and expanded. written by Nathan Denny it has been significantly optimized and expanded.
The :class:`_vendoring.altgraph.Graph.Graph` class is loosely modeled after the The :class:`altgraph.Graph.Graph` class is loosely modeled after the
`LEDA <http://www.algorithmic-solutions.com/enleda.htm>`_ `LEDA <http://www.algorithmic-solutions.com/enleda.htm>`_
(Library of Efficient Datatypes) representation. The library (Library of Efficient Datatypes) representation. The library
includes methods for constructing graphs, BFS and DFS traversals, includes methods for constructing graphs, BFS and DFS traversals,
@@ -22,22 +22,22 @@
The package contains the following modules: The package contains the following modules:
- the :py:mod:`_vendoring.altgraph.Graph` module contains the - the :py:mod:`altgraph.Graph` module contains the
:class:`~_vendoring.altgraph.Graph.Graph` class that stores the graph data :class:`~altgraph.Graph.Graph` class that stores the graph data
- the :py:mod:`_vendoring.altgraph.GraphAlgo` module implements graph algorithms - the :py:mod:`altgraph.GraphAlgo` module implements graph algorithms
operating on graphs (:py:class:`~_vendoring.altgraph.Graph.Graph`} instances) operating on graphs (:py:class:`~altgraph.Graph.Graph`} instances)
- the :py:mod:`_vendoring.altgraph.GraphStat` module contains functions for - the :py:mod:`altgraph.GraphStat` module contains functions for
computing statistical measures on graphs computing statistical measures on graphs
- the :py:mod:`_vendoring.altgraph.GraphUtil` module contains functions for - the :py:mod:`altgraph.GraphUtil` module contains functions for
generating, reading and saving graphs generating, reading and saving graphs
- the :py:mod:`_vendoring.altgraph.Dot` module contains functions for displaying - the :py:mod:`altgraph.Dot` module contains functions for displaying
graphs via `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_ graphs via `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
- the :py:mod:`_vendoring.altgraph.ObjectGraph` module implements a graph of - the :py:mod:`altgraph.ObjectGraph` module implements a graph of
objects with a unique identifier objects with a unique identifier
Installation Installation
@@ -62,7 +62,7 @@
Lets assume that we want to analyze the graph below (links to the full picture) Lets assume that we want to analyze the graph below (links to the full picture)
GRAPH_IMG. Our script then might look the following way:: GRAPH_IMG. Our script then might look the following way::
from _vendoring.altgraph import Graph, GraphAlgo, Dot from altgraph import Graph, GraphAlgo, Dot
# these are the edges # these are the edges
edges = [ (1,2), (2,4), (1,3), (2,4), (3,4), (4,5), (6,5), edges = [ (1,2), (2,4), (1,3), (2,4), (3,4), (4,5), (6,5),
@@ -141,7 +141,7 @@
""" """
import pkg_resources import pkg_resources
__version__ = pkg_resources.require("_vendoring.altgraph")[0].version __version__ = pkg_resources.require("altgraph")[0].version
class GraphError(ValueError): class GraphError(ValueError):

View File

@@ -1,3 +0,0 @@
"""Init file to avoid namespace packages"""
__version__ = "0.2.5"

View File

@@ -1,20 +0,0 @@
The MIT License (MIT)
Copyright (c) 2014 Anders Høst
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -38,7 +38,7 @@
"typing.ClassVar", "typing.ClassVar",
"t.ClassVar", "t.ClassVar",
"ClassVar", "ClassVar",
"_vendoring.typing_extensions.ClassVar", "typing_extensions.ClassVar",
) )
# we don't use a double-underscore prefix because that triggers # we don't use a double-underscore prefix because that triggers
# name mangling when trying to create a slot for the field # name mangling when trying to create a slot for the field

View File

@@ -1,6 +1,6 @@
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
from _vendoring.attr import ( from attr import (
NOTHING, NOTHING,
Attribute, Attribute,
Factory, Factory,
@@ -28,7 +28,7 @@
resolve_types, resolve_types,
validate, validate,
) )
from _vendoring.attr._next_gen import asdict, astuple from attr._next_gen import asdict, astuple
from . import converters, exceptions, filters, setters, validators from . import converters, exceptions, filters, setters, validators

View File

@@ -1,3 +1,3 @@
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
from _vendoring.attr.converters import * # noqa from attr.converters import * # noqa

View File

@@ -1,3 +1,3 @@
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
from _vendoring.attr.exceptions import * # noqa from attr.exceptions import * # noqa

View File

@@ -1,3 +1,3 @@
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
from _vendoring.attr.filters import * # noqa from attr.filters import * # noqa

View File

@@ -1,3 +1,3 @@
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
from _vendoring.attr.setters import * # noqa from attr.setters import * # noqa

View File

@@ -1,3 +1,3 @@
# SPDX-License-Identifier: MIT # SPDX-License-Identifier: MIT
from _vendoring.attr.validators import * # noqa from attr.validators import * # noqa

View File

@@ -19,7 +19,7 @@
from types import CodeType from types import CodeType
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
import _vendoring.typing_extensions as te import typing_extensions as te
from .environment import Environment from .environment import Environment
class _MemcachedClient(te.Protocol): class _MemcachedClient(te.Protocol):
@@ -101,7 +101,7 @@ def bytecode_to_string(self) -> bytes:
class BytecodeCache: class BytecodeCache:
"""To implement your own bytecode cache you have to subclass this class """To implement your own bytecode cache you have to subclass this class
and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of
these methods are passed a :class:`~_vendoring.jinja2.bccache.Bucket`. these methods are passed a :class:`~jinja2.bccache.Bucket`.
A very basic bytecode cache that saves the bytecode on the file system:: A very basic bytecode cache that saves the bytecode on the file system::
@@ -193,7 +193,7 @@ class FileSystemBytecodeCache(BytecodeCache):
is created for the user in the system temp directory. is created for the user in the system temp directory.
The pattern can be used to have multiple separate caches operate on the The pattern can be used to have multiple separate caches operate on the
same directory. The default pattern is ``'___vendoring.jinja2_%s.cache'``. ``%s`` same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s``
is replaced with the cache key. is replaced with the cache key.
>>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
@@ -202,7 +202,7 @@ class FileSystemBytecodeCache(BytecodeCache):
""" """
def __init__( def __init__(
self, directory: t.Optional[str] = None, pattern: str = "___vendoring.jinja2_%s.cache" self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache"
) -> None: ) -> None:
if directory is None: if directory is None:
directory = self._get_default_cache_dir() directory = self._get_default_cache_dir()
@@ -225,7 +225,7 @@ def _unsafe_dir() -> "te.NoReturn":
if not hasattr(os, "getuid"): if not hasattr(os, "getuid"):
_unsafe_dir() _unsafe_dir()
dirname = f"__vendoring.jinja2-cache-{os.getuid()}" dirname = f"_jinja2-cache-{os.getuid()}"
actual_dir = os.path.join(tmpdir, dirname) actual_dir = os.path.join(tmpdir, dirname)
try: try:
@@ -332,7 +332,7 @@ class MemcachedBytecodeCache(BytecodeCache):
def __init__( def __init__(
self, self,
client: "_MemcachedClient", client: "_MemcachedClient",
prefix: str = "_vendoring.jinja2/bytecode/", prefix: str = "jinja2/bytecode/",
timeout: t.Optional[int] = None, timeout: t.Optional[int] = None,
ignore_memcache_errors: bool = True, ignore_memcache_errors: bool = True,
): ):

View File

@@ -6,8 +6,8 @@
from itertools import chain from itertools import chain
from keyword import iskeyword as is_python_keyword from keyword import iskeyword as is_python_keyword
from _vendoring.markupsafe import escape from markupsafe import escape
from _vendoring.markupsafe import Markup from markupsafe import Markup
from . import nodes from . import nodes
from .exceptions import TemplateAssertionError from .exceptions import TemplateAssertionError
@@ -23,7 +23,7 @@
from .visitor import NodeVisitor from .visitor import NodeVisitor
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
import _vendoring.typing_extensions as te import typing_extensions as te
from .environment import Environment from .environment import Environment
F = t.TypeVar("F", bound=t.Callable[..., t.Any]) F = t.TypeVar("F", bound=t.Callable[..., t.Any])
@@ -836,7 +836,7 @@ def visit_Template(
exported_names = sorted(exported) exported_names = sorted(exported)
self.writeline("from __future__ import generator_stop") # Python < 3.7 self.writeline("from __future__ import generator_stop") # Python < 3.7
self.writeline("from _vendoring.jinja2.runtime import " + ", ".join(exported_names)) self.writeline("from jinja2.runtime import " + ", ".join(exported_names))
# if we want a deferred initialization we cannot move the # if we want a deferred initialization we cannot move the
# environment into a local name # environment into a local name

View File

@@ -8,7 +8,7 @@
from .utils import Namespace from .utils import Namespace
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
import _vendoring.typing_extensions as te import typing_extensions as te
# defaults for the parser / lexer # defaults for the parser / lexer
BLOCK_START_STRING = "{%" BLOCK_START_STRING = "{%"

View File

@@ -12,7 +12,7 @@
from functools import reduce from functools import reduce
from types import CodeType from types import CodeType
from _vendoring.markupsafe import Markup from markupsafe import Markup
from . import nodes from . import nodes
from .compiler import CodeGenerator from .compiler import CodeGenerator
@@ -55,7 +55,7 @@
from .utils import missing from .utils import missing
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
import _vendoring.typing_extensions as te import typing_extensions as te
from .bccache import BytecodeCache from .bccache import BytecodeCache
from .ext import Extension from .ext import Extension
from .loaders import BaseLoader from .loaders import BaseLoader
@@ -126,7 +126,7 @@ def _environment_config_check(environment: "Environment") -> "Environment":
"""Perform a sanity check on the environment.""" """Perform a sanity check on the environment."""
assert issubclass( assert issubclass(
environment.undefined, Undefined environment.undefined, Undefined
), "'undefined' must be a subclass of '_vendoring.jinja2.Undefined'." ), "'undefined' must be a subclass of 'jinja2.Undefined'."
assert ( assert (
environment.block_start_string environment.block_start_string
!= environment.variable_start_string != environment.variable_start_string
@@ -221,7 +221,7 @@ class Environment:
`autoescape` `autoescape`
If set to ``True`` the XML/HTML autoescaping feature is enabled by If set to ``True`` the XML/HTML autoescaping feature is enabled by
default. For more details about autoescaping see default. For more details about autoescaping see
:class:`~_vendoring.markupsafe.Markup`. As of Jinja 2.4 this can also :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also
be a callable that is passed the template name and has to be a callable that is passed the template name and has to
return ``True`` or ``False`` depending on autoescape should be return ``True`` or ``False`` depending on autoescape should be
enabled by default. enabled by default.
@@ -264,7 +264,7 @@ class Environment:
#: if this environment is sandboxed. Modifying this variable won't make #: if this environment is sandboxed. Modifying this variable won't make
#: the environment sandboxed though. For a real sandboxed environment #: the environment sandboxed though. For a real sandboxed environment
#: have a look at _vendoring.jinja2.sandbox. This flag alone controls the code #: have a look at jinja2.sandbox. This flag alone controls the code
#: generation by the compiler. #: generation by the compiler.
sandboxed = False sandboxed = False
@@ -279,11 +279,11 @@ class Environment:
shared = False shared = False
#: the class that is used for code generation. See #: the class that is used for code generation. See
#: :class:`~_vendoring.jinja2.compiler.CodeGenerator` for more information. #: :class:`~jinja2.compiler.CodeGenerator` for more information.
code_generator_class: t.Type["CodeGenerator"] = CodeGenerator code_generator_class: t.Type["CodeGenerator"] = CodeGenerator
#: the context class that is used for templates. See #: the context class that is used for templates. See
#: :class:`~_vendoring.jinja2.runtime.Context` for more information. #: :class:`~jinja2.runtime.Context` for more information.
context_class: t.Type[Context] = Context context_class: t.Type[Context] = Context
template_class: t.Type["Template"] template_class: t.Type["Template"]
@@ -650,7 +650,7 @@ def _tokenize(
state: t.Optional[str] = None, state: t.Optional[str] = None,
) -> TokenStream: ) -> TokenStream:
"""Called by the parser to do the preprocessing and filtering """Called by the parser to do the preprocessing and filtering
for all the extensions. Returns a :class:`~_vendoring.jinja2.lexer.TokenStream`. for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`.
""" """
source = self.preprocess(source, name, filename) source = self.preprocess(source, name, filename)
stream = self.lexer.tokenize(source, name, filename, state) stream = self.lexer.tokenize(source, name, filename, state)
@@ -1547,7 +1547,7 @@ def __repr__(self) -> str:
class TemplateExpression: class TemplateExpression:
"""The :meth:`_vendoring.jinja2.Environment.compile_expression` method returns an """The :meth:`jinja2.Environment.compile_expression` method returns an
instance of this object. It encapsulates the expression-like access instance of this object. It encapsulates the expression-like access
to the template with an expression it wraps. to the template with an expression it wraps.
""" """

View File

@@ -4,7 +4,7 @@
import typing as t import typing as t
import warnings import warnings
from _vendoring.markupsafe import Markup from markupsafe import Markup
from . import defaults from . import defaults
from . import nodes from . import nodes
@@ -18,7 +18,7 @@
from .utils import pass_context from .utils import pass_context
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
import _vendoring.typing_extensions as te import typing_extensions as te
from .lexer import Token from .lexer import Token
from .lexer import TokenStream from .lexer import TokenStream
from .parser import Parser from .parser import Parser
@@ -108,10 +108,10 @@ def preprocess(
def filter_stream( def filter_stream(
self, stream: "TokenStream" self, stream: "TokenStream"
) -> t.Union["TokenStream", t.Iterable["Token"]]: ) -> t.Union["TokenStream", t.Iterable["Token"]]:
"""It's passed a :class:`~_vendoring.jinja2.lexer.TokenStream` that can be used """It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
to filter tokens returned. This method has to return an iterable of to filter tokens returned. This method has to return an iterable of
:class:`~_vendoring.jinja2.lexer.Token`\\s, but it doesn't have to return a :class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a
:class:`~_vendoring.jinja2.lexer.TokenStream`. :class:`~jinja2.lexer.TokenStream`.
""" """
return stream return stream
@@ -145,7 +145,7 @@ def call_method(
lineno: t.Optional[int] = None, lineno: t.Optional[int] = None,
) -> nodes.Call: ) -> nodes.Call:
"""Call a method of the extension. This is a shortcut for """Call a method of the extension. This is a shortcut for
:meth:`attr` + :class:`_vendoring.jinja2.nodes.Call`. :meth:`attr` + :class:`jinja2.nodes.Call`.
""" """
if args is None: if args is None:
args = [] args = []
@@ -629,9 +629,9 @@ class DebugExtension(Extension):
.. code-block:: text .. code-block:: text
{'context': {'cycler': <class '_vendoring.jinja2.utils.Cycler'>, {'context': {'cycler': <class 'jinja2.utils.Cycler'>,
..., ...,
'namespace': <class '_vendoring.jinja2.utils.Namespace'>}, 'namespace': <class 'jinja2.utils.Namespace'>},
'filters': ['abs', 'attr', 'batch', 'capitalize', 'center', 'count', 'd', 'filters': ['abs', 'attr', 'batch', 'capitalize', 'center', 'count', 'd',
..., 'urlencode', 'urlize', 'wordcount', 'wordwrap', 'xmlattr'], ..., 'urlencode', 'urlize', 'wordcount', 'wordwrap', 'xmlattr'],
'tests': ['!=', '<', '<=', '==', '>', '>=', 'callable', 'defined', 'tests': ['!=', '<', '<=', '==', '>', '>=', 'callable', 'defined',
@@ -679,7 +679,7 @@ def extract_from_ast(
This example explains the behavior: This example explains the behavior:
>>> from _vendoring.jinja2 import Environment >>> from jinja2 import Environment
>>> env = Environment() >>> env = Environment()
>>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}') >>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}')
>>> list(extract_from_ast(node)) >>> list(extract_from_ast(node))

View File

@@ -9,9 +9,9 @@
from itertools import chain from itertools import chain
from itertools import groupby from itertools import groupby
from _vendoring.markupsafe import escape from markupsafe import escape
from _vendoring.markupsafe import Markup from markupsafe import Markup
from _vendoring.markupsafe import soft_str from markupsafe import soft_str
from .async_utils import async_variant from .async_utils import async_variant
from .async_utils import auto_aiter from .async_utils import auto_aiter
@@ -28,7 +28,7 @@
from .utils import urlize from .utils import urlize
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
import _vendoring.typing_extensions as te import typing_extensions as te
from .environment import Environment from .environment import Environment
from .nodes import EvalContext from .nodes import EvalContext
from .runtime import Context from .runtime import Context
@@ -48,7 +48,7 @@ def contextfilter(f: F) -> F:
"""Pass the context as the first argument to the decorated function. """Pass the context as the first argument to the decorated function.
.. deprecated:: 3.0 .. deprecated:: 3.0
Will be removed in Jinja 3.1. Use :func:`~_vendoring.jinja2.pass_context` Will be removed in Jinja 3.1. Use :func:`~jinja2.pass_context`
instead. instead.
""" """
warnings.warn( warnings.warn(
@@ -66,7 +66,7 @@ def evalcontextfilter(f: F) -> F:
.. deprecated:: 3.0 .. deprecated:: 3.0
Will be removed in Jinja 3.1. Use Will be removed in Jinja 3.1. Use
:func:`~_vendoring.jinja2.pass_eval_context` instead. :func:`~jinja2.pass_eval_context` instead.
.. versionadded:: 2.4 .. versionadded:: 2.4
""" """
@@ -85,7 +85,7 @@ def environmentfilter(f: F) -> F:
.. deprecated:: 3.0 .. deprecated:: 3.0
Will be removed in Jinja 3.1. Use Will be removed in Jinja 3.1. Use
:func:`~_vendoring.jinja2.pass_environment` instead. :func:`~jinja2.pass_environment` instead.
""" """
warnings.warn( warnings.warn(
"'environmentfilter' is renamed to 'pass_environment', the old" "'environmentfilter' is renamed to 'pass_environment', the old"
@@ -547,10 +547,10 @@ def do_default(
{{ ''|default('the string was empty', true) }} {{ ''|default('the string was empty', true) }}
.. versionchanged:: 2.11 .. versionchanged:: 2.11
It's now possible to configure the :class:`~_vendoring.jinja2.Environment` with It's now possible to configure the :class:`~jinja2.Environment` with
:class:`~_vendoring.jinja2.ChainableUndefined` to make the `default` filter work :class:`~jinja2.ChainableUndefined` to make the `default` filter work
on nested elements and attributes that may contain undefined values on nested elements and attributes that may contain undefined values
in the chain without getting an :exc:`~_vendoring.jinja2.UndefinedError`. in the chain without getting an :exc:`~jinja2.UndefinedError`.
""" """
if isinstance(value, Undefined) or (boolean and not value): if isinstance(value, Undefined) or (boolean and not value):
return default_value return default_value

View File

@@ -14,7 +14,7 @@
from .utils import LRUCache from .utils import LRUCache
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
import _vendoring.typing_extensions as te import typing_extensions as te
from .environment import Environment from .environment import Environment
# cache for the lexers. Exists in order to be able to have multiple # cache for the lexers. Exists in order to be able to have multiple
@@ -400,7 +400,7 @@ def close(self) -> None:
def expect(self, expr: str) -> Token: def expect(self, expr: str) -> Token:
"""Expect a given token type and return it. This accepts the same """Expect a given token type and return it. This accepts the same
argument as :meth:`_vendoring.jinja2.lexer.Token.test`. argument as :meth:`jinja2.lexer.Token.test`.
""" """
if not self.current.test(expr): if not self.current.test(expr):
expr = describe_token_expr(expr) expr = describe_token_expr(expr)

View File

@@ -47,7 +47,7 @@ class BaseLoader:
A very basic example for a loader that looks up templates on the file A very basic example for a loader that looks up templates on the file
system could look like this:: system could look like this::
from _vendoring.jinja2 import BaseLoader, TemplateNotFound from jinja2 import BaseLoader, TemplateNotFound
from os.path import join, exists, getmtime from os.path import join, exists, getmtime
class MyLoader(BaseLoader): class MyLoader(BaseLoader):
@@ -594,7 +594,7 @@ class ModuleLoader(BaseLoader):
def __init__( def __init__(
self, path: t.Union[str, os.PathLike, t.Sequence[t.Union[str, os.PathLike]]] self, path: t.Union[str, os.PathLike, t.Sequence[t.Union[str, os.PathLike]]]
) -> None: ) -> None:
package_name = f"__vendoring.jinja2_module_templates_{id(self):x}" package_name = f"_jinja2_module_templates_{id(self):x}"
# create a fake module that looks for the templates in the # create a fake module that looks for the templates in the
# path given. # path given.

View File

@@ -36,7 +36,7 @@ def find_undeclared_variables(ast: nodes.Template) -> t.Set[str]:
variables will be used depending on the path the execution takes at variables will be used depending on the path the execution takes at
runtime, all variables are returned. runtime, all variables are returned.
>>> from _vendoring.jinja2 import Environment, meta >>> from jinja2 import Environment, meta
>>> env = Environment() >>> env = Environment()
>>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}') >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}')
>>> meta.find_undeclared_variables(ast) == {'bar'} >>> meta.find_undeclared_variables(ast) == {'bar'}
@@ -64,7 +64,7 @@ def find_referenced_templates(ast: nodes.Template) -> t.Iterator[t.Optional[str]
imports. If dynamic inheritance or inclusion is used, `None` will be imports. If dynamic inheritance or inclusion is used, `None` will be
yielded. yielded.
>>> from _vendoring.jinja2 import Environment, meta >>> from jinja2 import Environment, meta
>>> env = Environment() >>> env = Environment()
>>> ast = env.parse('{% extends "layout.html" %}{% include helper %}') >>> ast = env.parse('{% extends "layout.html" %}{% include helper %}')
>>> list(meta.find_referenced_templates(ast)) >>> list(meta.find_referenced_templates(ast))

View File

@@ -7,12 +7,12 @@
import typing as t import typing as t
from collections import deque from collections import deque
from _vendoring.markupsafe import Markup from markupsafe import Markup
from .utils import _PassArg from .utils import _PassArg
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
import _vendoring.typing_extensions as te import typing_extensions as te
from .environment import Environment from .environment import Environment
_NodeBound = t.TypeVar("_NodeBound", bound="Node") _NodeBound = t.TypeVar("_NodeBound", bound="Node")
@@ -1041,7 +1041,7 @@ class ExtensionAttribute(Expr):
The identifier is the identifier of the :class:`Extension`. The identifier is the identifier of the :class:`Extension`.
This node is usually constructed by calling the This node is usually constructed by calling the
:meth:`~_vendoring.jinja2.ext.Extension.attr` method on an extension. :meth:`~jinja2.ext.Extension.attr` method on an extension.
""" """
fields = ("identifier", "name") fields = ("identifier", "name")
@@ -1063,7 +1063,7 @@ class ImportedName(Expr):
class InternalName(Expr): class InternalName(Expr):
"""An internal name in the compiler. You cannot create these nodes """An internal name in the compiler. You cannot create these nodes
yourself but the parser provides a yourself but the parser provides a
:meth:`~_vendoring.jinja2.parser.Parser.free_identifier` method that creates :meth:`~jinja2.parser.Parser.free_identifier` method that creates
a new identifier for you. This identifier is not available from the a new identifier for you. This identifier is not available from the
template and is not treated specially by the compiler. template and is not treated specially by the compiler.
""" """
@@ -1114,7 +1114,7 @@ def as_const(
class ContextReference(Expr): class ContextReference(Expr):
"""Returns the current template context. It can be used like a """Returns the current template context. It can be used like a
:class:`Name` node, with a ``'load'`` ctx and will return the :class:`Name` node, with a ``'load'`` ctx and will return the
current :class:`~_vendoring.jinja2.runtime.Context` object. current :class:`~jinja2.runtime.Context` object.
Here an example that assigns the current template name to a Here an example that assigns the current template name to a
variable named `foo`:: variable named `foo`::
@@ -1123,7 +1123,7 @@ class ContextReference(Expr):
Getattr(ContextReference(), 'name')) Getattr(ContextReference(), 'name'))
This is basically equivalent to using the This is basically equivalent to using the
:func:`~_vendoring.jinja2.pass_context` decorator when using the high-level :func:`~jinja2.pass_context` decorator when using the high-level
API, which causes a reference to the context to be passed as the API, which causes a reference to the context to be passed as the
first argument to a function. first argument to a function.
""" """
@@ -1188,7 +1188,7 @@ class EvalContextModifier(Stmt):
class ScopedEvalContextModifier(EvalContextModifier): class ScopedEvalContextModifier(EvalContextModifier):
"""Modifies the eval context and reverts it later. Works exactly like """Modifies the eval context and reverts it later. Works exactly like
:class:`EvalContextModifier` but will only modify the :class:`EvalContextModifier` but will only modify the
:class:`~_vendoring.jinja2.nodes.EvalContext` for nodes in the :attr:`body`. :class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
""" """
fields = ("body",) fields = ("body",)

View File

@@ -9,7 +9,7 @@
from .lexer import describe_token_expr from .lexer import describe_token_expr
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
import _vendoring.typing_extensions as te import typing_extensions as te
from .environment import Environment from .environment import Environment
_ImportInclude = t.TypeVar("_ImportInclude", nodes.Import, nodes.Include) _ImportInclude = t.TypeVar("_ImportInclude", nodes.Import, nodes.Include)
@@ -156,7 +156,7 @@ def is_tuple_end(
return False return False
def free_identifier(self, lineno: t.Optional[int] = None) -> nodes.InternalName: def free_identifier(self, lineno: t.Optional[int] = None) -> nodes.InternalName:
"""Return a new free identifier as :class:`~_vendoring.jinja2.nodes.InternalName`.""" """Return a new free identifier as :class:`~jinja2.nodes.InternalName`."""
self._last_identifier += 1 self._last_identifier += 1
rv = object.__new__(nodes.InternalName) rv = object.__new__(nodes.InternalName)
nodes.Node.__init__(rv, f"fi{self._last_identifier}", lineno=lineno) nodes.Node.__init__(rv, f"fi{self._last_identifier}", lineno=lineno)
@@ -687,7 +687,7 @@ def parse_tuple(
explicit_parentheses: bool = False, explicit_parentheses: bool = False,
) -> t.Union[nodes.Tuple, nodes.Expr]: ) -> t.Union[nodes.Tuple, nodes.Expr]:
"""Works like `parse_expression` but if multiple expressions are """Works like `parse_expression` but if multiple expressions are
delimited by a comma a :class:`~_vendoring.jinja2.nodes.Tuple` node is created. delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created.
This method could also return a regular expression instead of a tuple This method could also return a regular expression instead of a tuple
if no commas where found. if no commas where found.

View File

@@ -5,9 +5,9 @@
from collections import abc from collections import abc
from itertools import chain from itertools import chain
from _vendoring.markupsafe import escape # noqa: F401 from markupsafe import escape # noqa: F401
from _vendoring.markupsafe import Markup from markupsafe import Markup
from _vendoring.markupsafe import soft_str from markupsafe import soft_str
from .async_utils import auto_aiter from .async_utils import auto_aiter
from .async_utils import auto_await # noqa: F401 from .async_utils import auto_await # noqa: F401
@@ -28,7 +28,7 @@
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
import logging import logging
import _vendoring.typing_extensions as te import typing_extensions as te
from .environment import Environment from .environment import Environment
class LoopRenderFunc(te.Protocol): class LoopRenderFunc(te.Protocol):
@@ -849,7 +849,7 @@ class Undefined:
>>> foo + 42 >>> foo + 42
Traceback (most recent call last): Traceback (most recent call last):
... ...
_vendoring.jinja2.exceptions.UndefinedError: 'foo' is undefined jinja2.exceptions.UndefinedError: 'foo' is undefined
""" """
__slots__ = ( __slots__ = (
@@ -1020,7 +1020,7 @@ class ChainableUndefined(Undefined):
>>> foo.bar['baz'] + 42 >>> foo.bar['baz'] + 42
Traceback (most recent call last): Traceback (most recent call last):
... ...
_vendoring.jinja2.exceptions.UndefinedError: 'foo' is undefined jinja2.exceptions.UndefinedError: 'foo' is undefined
.. versionadded:: 2.11.0 .. versionadded:: 2.11.0
""" """
@@ -1047,7 +1047,7 @@ class DebugUndefined(Undefined):
>>> foo + 42 >>> foo + 42
Traceback (most recent call last): Traceback (most recent call last):
... ...
_vendoring.jinja2.exceptions.UndefinedError: 'foo' is undefined jinja2.exceptions.UndefinedError: 'foo' is undefined
""" """
__slots__ = () __slots__ = ()
@@ -1077,15 +1077,15 @@ class StrictUndefined(Undefined):
>>> str(foo) >>> str(foo)
Traceback (most recent call last): Traceback (most recent call last):
... ...
_vendoring.jinja2.exceptions.UndefinedError: 'foo' is undefined jinja2.exceptions.UndefinedError: 'foo' is undefined
>>> not foo >>> not foo
Traceback (most recent call last): Traceback (most recent call last):
... ...
_vendoring.jinja2.exceptions.UndefinedError: 'foo' is undefined jinja2.exceptions.UndefinedError: 'foo' is undefined
>>> foo + 42 >>> foo + 42
Traceback (most recent call last): Traceback (most recent call last):
... ...
_vendoring.jinja2.exceptions.UndefinedError: 'foo' is undefined jinja2.exceptions.UndefinedError: 'foo' is undefined
""" """
__slots__ = () __slots__ = ()

View File

@@ -9,8 +9,8 @@
from collections import deque from collections import deque
from string import Formatter from string import Formatter
from _vendoring.markupsafe import EscapeFormatter from markupsafe import EscapeFormatter
from _vendoring.markupsafe import Markup from markupsafe import Markup
from .environment import Environment from .environment import Environment
from .exceptions import SecurityError from .exceptions import SecurityError
@@ -128,7 +128,7 @@ def is_internal_attribute(obj: t.Any, attr: str) -> bool:
python objects. This is useful if the environment method python objects. This is useful if the environment method
:meth:`~SandboxedEnvironment.is_safe_attribute` is overridden. :meth:`~SandboxedEnvironment.is_safe_attribute` is overridden.
>>> from _vendoring.jinja2.sandbox import is_internal_attribute >>> from jinja2.sandbox import is_internal_attribute
>>> is_internal_attribute(str, "mro") >>> is_internal_attribute(str, "mro")
True True
>>> is_internal_attribute(str, "upper") >>> is_internal_attribute(str, "upper")

View File

@@ -12,10 +12,10 @@
from types import CodeType from types import CodeType
from urllib.parse import quote_from_bytes from urllib.parse import quote_from_bytes
import _vendoring.markupsafe import markupsafe
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
import _vendoring.typing_extensions as te import typing_extensions as te
F = t.TypeVar("F", bound=t.Callable[..., t.Any]) F = t.TypeVar("F", bound=t.Callable[..., t.Any])
@@ -28,7 +28,7 @@
def pass_context(f: F) -> F: def pass_context(f: F) -> F:
"""Pass the :class:`~_vendoring.jinja2.runtime.Context` as the first argument """Pass the :class:`~jinja2.runtime.Context` as the first argument
to the decorated function when called while rendering a template. to the decorated function when called while rendering a template.
Can be used on functions, filters, and tests. Can be used on functions, filters, and tests.
@@ -45,7 +45,7 @@ def pass_context(f: F) -> F:
def pass_eval_context(f: F) -> F: def pass_eval_context(f: F) -> F:
"""Pass the :class:`~_vendoring.jinja2.nodes.EvalContext` as the first argument """Pass the :class:`~jinja2.nodes.EvalContext` as the first argument
to the decorated function when called while rendering a template. to the decorated function when called while rendering a template.
See :ref:`eval-context`. See :ref:`eval-context`.
@@ -62,7 +62,7 @@ def pass_eval_context(f: F) -> F:
def pass_environment(f: F) -> F: def pass_environment(f: F) -> F:
"""Pass the :class:`~_vendoring.jinja2.Environment` as the first argument to """Pass the :class:`~jinja2.Environment` as the first argument to
the decorated function when called while rendering a template. the decorated function when called while rendering a template.
Can be used on functions, filters, and tests. Can be used on functions, filters, and tests.
@@ -104,7 +104,7 @@ def contextfunction(f: F) -> F:
"""Pass the context as the first argument to the decorated function. """Pass the context as the first argument to the decorated function.
.. deprecated:: 3.0 .. deprecated:: 3.0
Will be removed in Jinja 3.1. Use :func:`~_vendoring.jinja2.pass_context` Will be removed in Jinja 3.1. Use :func:`~jinja2.pass_context`
instead. instead.
""" """
warnings.warn( warnings.warn(
@@ -122,7 +122,7 @@ def evalcontextfunction(f: F) -> F:
.. deprecated:: 3.0 .. deprecated:: 3.0
Will be removed in Jinja 3.1. Use Will be removed in Jinja 3.1. Use
:func:`~_vendoring.jinja2.pass_eval_context` instead. :func:`~jinja2.pass_eval_context` instead.
.. versionadded:: 2.4 .. versionadded:: 2.4
""" """
@@ -141,7 +141,7 @@ def environmentfunction(f: F) -> F:
.. deprecated:: 3.0 .. deprecated:: 3.0
Will be removed in Jinja 3.1. Use Will be removed in Jinja 3.1. Use
:func:`~_vendoring.jinja2.pass_environment` instead. :func:`~jinja2.pass_environment` instead.
""" """
warnings.warn( warnings.warn(
"'environmentfunction' is renamed to 'pass_environment', the" "'environmentfunction' is renamed to 'pass_environment', the"
@@ -335,9 +335,9 @@ def trim_url(x: str) -> str:
def trim_url(x: str) -> str: def trim_url(x: str) -> str:
return x return x
words = re.split(r"(\s+)", str(_vendoring.markupsafe.escape(text))) words = re.split(r"(\s+)", str(markupsafe.escape(text)))
rel_attr = f' rel="{_vendoring.markupsafe.escape(rel)}"' if rel else "" rel_attr = f' rel="{markupsafe.escape(rel)}"' if rel else ""
target_attr = f' target="{_vendoring.markupsafe.escape(target)}"' if target else "" target_attr = f' target="{markupsafe.escape(target)}"' if target else ""
for i, word in enumerate(words): for i, word in enumerate(words):
head, middle, tail = "", word, "" head, middle, tail = "", word, ""
@@ -455,8 +455,8 @@ def generate_lorem_ipsum(
if not html: if not html:
return "\n\n".join(result) return "\n\n".join(result)
return _vendoring.markupsafe.Markup( return markupsafe.Markup(
"\n".join(f"<p>{_vendoring.markupsafe.escape(x)}</p>" for x in result) "\n".join(f"<p>{markupsafe.escape(x)}</p>" for x in result)
) )
@@ -658,7 +658,7 @@ def select_autoescape(
If you want to enable it for all templates created from strings or If you want to enable it for all templates created from strings or
for all templates with `.html` and `.xml` extensions:: for all templates with `.html` and `.xml` extensions::
from _vendoring.jinja2 import Environment, select_autoescape from jinja2 import Environment, select_autoescape
env = Environment(autoescape=select_autoescape( env = Environment(autoescape=select_autoescape(
enabled_extensions=('html', 'xml'), enabled_extensions=('html', 'xml'),
default_for_string=True, default_for_string=True,
@@ -667,7 +667,7 @@ def select_autoescape(
Example configuration to turn it on at all times except if the template Example configuration to turn it on at all times except if the template
ends with `.txt`:: ends with `.txt`::
from _vendoring.jinja2 import Environment, select_autoescape from jinja2 import Environment, select_autoescape
env = Environment(autoescape=select_autoescape( env = Environment(autoescape=select_autoescape(
disabled_extensions=('txt',), disabled_extensions=('txt',),
default_for_string=True, default_for_string=True,
@@ -703,10 +703,10 @@ def autoescape(template_name: t.Optional[str]) -> bool:
def htmlsafe_json_dumps( def htmlsafe_json_dumps(
obj: t.Any, dumps: t.Optional[t.Callable[..., str]] = None, **kwargs: t.Any obj: t.Any, dumps: t.Optional[t.Callable[..., str]] = None, **kwargs: t.Any
) -> _vendoring.markupsafe.Markup: ) -> markupsafe.Markup:
"""Serialize an object to a string of JSON with :func:`json.dumps`, """Serialize an object to a string of JSON with :func:`json.dumps`,
then replace HTML-unsafe characters with Unicode escapes and mark then replace HTML-unsafe characters with Unicode escapes and mark
the result safe with :class:`~_vendoring.markupsafe.Markup`. the result safe with :class:`~markupsafe.Markup`.
This is available in templates as the ``|tojson`` filter. This is available in templates as the ``|tojson`` filter.
@@ -732,7 +732,7 @@ def htmlsafe_json_dumps(
if dumps is None: if dumps is None:
dumps = json.dumps dumps = json.dumps
return _vendoring.markupsafe.Markup( return markupsafe.Markup(
dumps(obj, **kwargs) dumps(obj, **kwargs)
.replace("<", "\\u003c") .replace("<", "\\u003c")
.replace(">", "\\u003e") .replace(">", "\\u003e")
@@ -833,11 +833,11 @@ def __repr__(self) -> str:
return f"<Namespace {self.__attrs!r}>" return f"<Namespace {self.__attrs!r}>"
class Markup(_vendoring.markupsafe.Markup): class Markup(markupsafe.Markup):
def __new__(cls, base="", encoding=None, errors="strict"): # type: ignore def __new__(cls, base="", encoding=None, errors="strict"): # type: ignore
warnings.warn( warnings.warn(
"'_vendoring.jinja2.Markup' is deprecated and will be removed in Jinja" "'jinja2.Markup' is deprecated and will be removed in Jinja"
" 3.1. Import '_vendoring.markupsafe.Markup' instead.", " 3.1. Import 'markupsafe.Markup' instead.",
DeprecationWarning, DeprecationWarning,
stacklevel=2, stacklevel=2,
) )
@@ -846,9 +846,9 @@ def __new__(cls, base="", encoding=None, errors="strict"): # type: ignore
def escape(s: t.Any) -> str: def escape(s: t.Any) -> str:
warnings.warn( warnings.warn(
"'_vendoring.jinja2.escape' is deprecated and will be removed in Jinja" "'jinja2.escape' is deprecated and will be removed in Jinja"
" 3.1. Import '_vendoring.markupsafe.escape' instead.", " 3.1. Import 'markupsafe.escape' instead.",
DeprecationWarning, DeprecationWarning,
stacklevel=2, stacklevel=2,
) )
return _vendoring.markupsafe.escape(s) return markupsafe.escape(s)

View File

@@ -6,7 +6,7 @@
from .nodes import Node from .nodes import Node
if t.TYPE_CHECKING: if t.TYPE_CHECKING:
import _vendoring.typing_extensions as te import typing_extensions as te
class VisitCallable(te.Protocol): class VisitCallable(te.Protocol):
def __call__(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any: def __call__(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any:

View File

@@ -0,0 +1 @@
from jsonschema import *

View File

@@ -8,18 +8,18 @@
instance under a schema, and will create a validator for you. instance under a schema, and will create a validator for you.
""" """
from _vendoring.jsonschema.exceptions import ( from jsonschema.exceptions import (
ErrorTree, FormatError, RefResolutionError, SchemaError, ValidationError ErrorTree, FormatError, RefResolutionError, SchemaError, ValidationError
) )
from _vendoring.jsonschema._format import ( from jsonschema._format import (
FormatChecker, FormatChecker,
draft3_format_checker, draft3_format_checker,
draft4_format_checker, draft4_format_checker,
draft6_format_checker, draft6_format_checker,
draft7_format_checker, draft7_format_checker,
) )
from _vendoring.jsonschema._types import TypeChecker from jsonschema._types import TypeChecker
from _vendoring.jsonschema.validators import ( from jsonschema.validators import (
Draft3Validator, Draft3Validator,
Draft4Validator, Draft4Validator,
Draft6Validator, Draft6Validator,

View File

@@ -1,2 +1,2 @@
from _vendoring.jsonschema.cli import main from jsonschema.cli import main
main() main()

View File

@@ -3,8 +3,8 @@
import socket import socket
import struct import struct
from _vendoring.jsonschema.compat import str_types from jsonschema.compat import str_types
from _vendoring.jsonschema.exceptions import FormatError from jsonschema.exceptions import FormatError
class FormatChecker(object): class FormatChecker(object):

View File

@@ -1,6 +1,6 @@
from _vendoring.jsonschema import _utils from jsonschema import _utils
from _vendoring.jsonschema.compat import iteritems from jsonschema.compat import iteritems
from _vendoring.jsonschema.exceptions import ValidationError from jsonschema.exceptions import ValidationError
def dependencies_draft3(validator, dependencies, instance, schema): def dependencies_draft3(validator, dependencies, instance, schema):

View File

@@ -9,7 +9,7 @@
import sys import sys
from _vendoring.jsonschema.compat import PY3 from jsonschema.compat import PY3
class _NoModuleFound(Exception): class _NoModuleFound(Exception):

View File

@@ -1,10 +1,10 @@
import numbers import numbers
from _vendoring.pyrsistent import pmap from pyrsistent import pmap
import _vendoring.attr import attr
from _vendoring.jsonschema.compat import int_types, str_types from jsonschema.compat import int_types, str_types
from _vendoring.jsonschema.exceptions import UndefinedTypeCheck from jsonschema.exceptions import UndefinedTypeCheck
def is_array(checker, instance): def is_array(checker, instance):
@@ -45,7 +45,7 @@ def is_any(checker, instance):
return True return True
@_vendoring.attr.s(frozen=True) @attr.s(frozen=True)
class TypeChecker(object): class TypeChecker(object):
""" """
A ``type`` property checker. A ``type`` property checker.
@@ -61,7 +61,7 @@ class TypeChecker(object):
The initial mapping of types to their checking functions. The initial mapping of types to their checking functions.
""" """
_type_checkers = _vendoring.attr.ib(default=pmap(), converter=pmap) _type_checkers = attr.ib(default=pmap(), converter=pmap)
def is_type(self, instance, type): def is_type(self, instance, type):
""" """
@@ -131,7 +131,7 @@ def redefine_many(self, definitions=()):
A new `TypeChecker` instance. A new `TypeChecker` instance.
""" """
return _vendoring.attr.evolve( return attr.evolve(
self, type_checkers=self._type_checkers.update(definitions), self, type_checkers=self._type_checkers.update(definitions),
) )
@@ -162,7 +162,7 @@ def remove(self, *types):
checkers = checkers.remove(each) checkers = checkers.remove(each)
except KeyError: except KeyError:
raise UndefinedTypeCheck(each) raise UndefinedTypeCheck(each)
return _vendoring.attr.evolve(self, type_checkers=checkers) return attr.evolve(self, type_checkers=checkers)
draft3_type_checker = TypeChecker( draft3_type_checker = TypeChecker(

View File

@@ -3,7 +3,7 @@
import pkgutil import pkgutil
import re import re
from _vendoring.jsonschema.compat import MutableMapping, str_types, urlsplit from jsonschema.compat import MutableMapping, str_types, urlsplit
class URIDict(MutableMapping): class URIDict(MutableMapping):
@@ -51,7 +51,7 @@ def load_schema(name):
Load a schema from ./schemas/``name``.json and return it. Load a schema from ./schemas/``name``.json and return it.
""" """
data = pkgutil.get_data("_vendoring.jsonschema", "schemas/{0}.json".format(name)) data = pkgutil.get_data("jsonschema", "schemas/{0}.json".format(name))
return json.loads(data.decode("utf-8")) return json.loads(data.decode("utf-8"))

View File

@@ -1,6 +1,6 @@
import re import re
from _vendoring.jsonschema._utils import ( from jsonschema._utils import (
ensure_list, ensure_list,
equal, equal,
extras_msg, extras_msg,
@@ -9,8 +9,8 @@
unbool, unbool,
uniq, uniq,
) )
from _vendoring.jsonschema.exceptions import FormatError, ValidationError from jsonschema.exceptions import FormatError, ValidationError
from _vendoring.jsonschema.compat import iteritems from jsonschema.compat import iteritems
def patternProperties(validator, patternProperties, instance, schema): def patternProperties(validator, patternProperties, instance, schema):

View File

@@ -6,10 +6,10 @@
""" """
from twisted.python.filepath import FilePath from twisted.python.filepath import FilePath
from pyperf import Runner from pyperf import Runner
from _vendoring.pyrsistent import m from pyrsistent import m
from _vendoring.jsonschema.tests._suite import Version from jsonschema.tests._suite import Version
import _vendoring.jsonschema import jsonschema
issue232 = Version( issue232 = Version(

View File

@@ -7,7 +7,7 @@
""" """
from pyperf import Runner from pyperf import Runner
from _vendoring.jsonschema.tests._suite import Suite from jsonschema.tests._suite import Suite
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -6,9 +6,9 @@
import json import json
import sys import sys
from _vendoring.jsonschema import __version__ from jsonschema import __version__
from _vendoring.jsonschema._reflect import namedAny from jsonschema._reflect import namedAny
from _vendoring.jsonschema.validators import validator_for from jsonschema.validators import validator_for
def _namedAnyWithDefault(name): def _namedAnyWithDefault(name):

View File

@@ -6,10 +6,10 @@
import pprint import pprint
import textwrap import textwrap
import _vendoring.attr import attr
from _vendoring.jsonschema import _utils from jsonschema import _utils
from _vendoring.jsonschema.compat import PY3, iteritems from jsonschema.compat import PY3, iteritems
WEAK_MATCHES = frozenset(["anyOf", "oneOf"]) WEAK_MATCHES = frozenset(["anyOf", "oneOf"])
@@ -149,13 +149,13 @@ class SchemaError(_Error):
_word_for_instance_in_error_message = "schema" _word_for_instance_in_error_message = "schema"
@_vendoring.attr.s(hash=True) @attr.s(hash=True)
class RefResolutionError(Exception): class RefResolutionError(Exception):
""" """
A ref could not be resolved. A ref could not be resolved.
""" """
_cause = _vendoring.attr.ib() _cause = attr.ib()
def __str__(self): def __str__(self):
return str(self._cause) return str(self._cause)

View File

@@ -0,0 +1,5 @@
def bug(issue=None):
message = "A known bug."
if issue is not None:
message += " See issue #{issue}.".format(issue=issue)
return message

View File

@@ -0,0 +1,239 @@
"""
Python representations of the JSON Schema Test Suite tests.
"""
from functools import partial
import json
import os
import re
import subprocess
import sys
import unittest
from twisted.python.filepath import FilePath
import attr
from jsonschema.compat import PY3
from jsonschema.validators import validators
import jsonschema
def _find_suite():
root = os.environ.get("JSON_SCHEMA_TEST_SUITE")
if root is not None:
return FilePath(root)
root = FilePath(jsonschema.__file__).parent().sibling("json")
if not root.isdir(): # pragma: no cover
raise ValueError(
(
"Can't find the JSON-Schema-Test-Suite directory. "
"Set the 'JSON_SCHEMA_TEST_SUITE' environment "
"variable or run the tests from alongside a checkout "
"of the suite."
),
)
return root
@attr.s(hash=True)
class Suite(object):
_root = attr.ib(default=attr.Factory(_find_suite))
def _remotes(self):
jsonschema_suite = self._root.descendant(["bin", "jsonschema_suite"])
remotes = subprocess.check_output(
[sys.executable, jsonschema_suite.path, "remotes"],
)
return {
"http://localhost:1234/" + name: schema
for name, schema in json.loads(remotes.decode("utf-8")).items()
}
def benchmark(self, runner): # pragma: no cover
for name in validators:
self.version(name=name).benchmark(runner=runner)
def version(self, name):
return Version(
name=name,
path=self._root.descendant(["tests", name]),
remotes=self._remotes(),
)
@attr.s(hash=True)
class Version(object):
_path = attr.ib()
_remotes = attr.ib()
name = attr.ib()
def benchmark(self, runner, **kwargs): # pragma: no cover
for suite in self.tests():
for test in suite:
runner.bench_func(
test.fully_qualified_name,
partial(test.validate_ignoring_errors, **kwargs),
)
def tests(self):
return (
test
for child in self._path.globChildren("*.json")
for test in self._tests_in(
subject=child.basename()[:-5],
path=child,
)
)
def format_tests(self):
path = self._path.descendant(["optional", "format"])
return (
test
for child in path.globChildren("*.json")
for test in self._tests_in(
subject=child.basename()[:-5],
path=child,
)
)
def tests_of(self, name):
return self._tests_in(
subject=name,
path=self._path.child(name + ".json"),
)
def optional_tests_of(self, name):
return self._tests_in(
subject=name,
path=self._path.descendant(["optional", name + ".json"]),
)
def to_unittest_testcase(self, *suites, **kwargs):
name = kwargs.pop("name", "Test" + self.name.title())
methods = {
test.method_name: test.to_unittest_method(**kwargs)
for suite in suites
for tests in suite
for test in tests
}
cls = type(name, (unittest.TestCase,), methods)
try:
cls.__module__ = _someone_save_us_the_module_of_the_caller()
except Exception: # pragma: no cover
# We're doing crazy things, so if they go wrong, like a function
# behaving differently on some other interpreter, just make them
# not happen.
pass
return cls
def _tests_in(self, subject, path):
for each in json.loads(path.getContent().decode("utf-8")):
yield (
_Test(
version=self,
subject=subject,
case_description=each["description"],
schema=each["schema"],
remotes=self._remotes,
**test
) for test in each["tests"]
)
@attr.s(hash=True, repr=False)
class _Test(object):
version = attr.ib()
subject = attr.ib()
case_description = attr.ib()
description = attr.ib()
data = attr.ib()
schema = attr.ib(repr=False)
valid = attr.ib()
_remotes = attr.ib()
def __repr__(self): # pragma: no cover
return "<Test {}>".format(self.fully_qualified_name)
@property
def fully_qualified_name(self): # pragma: no cover
return " > ".join(
[
self.version.name,
self.subject,
self.case_description,
self.description,
]
)
@property
def method_name(self):
delimiters = r"[\W\- ]+"
name = "test_%s_%s_%s" % (
re.sub(delimiters, "_", self.subject),
re.sub(delimiters, "_", self.case_description),
re.sub(delimiters, "_", self.description),
)
if not PY3: # pragma: no cover
name = name.encode("utf-8")
return name
def to_unittest_method(self, skip=lambda test: None, **kwargs):
if self.valid:
def fn(this):
self.validate(**kwargs)
else:
def fn(this):
with this.assertRaises(jsonschema.ValidationError):
self.validate(**kwargs)
fn.__name__ = self.method_name
reason = skip(self)
return unittest.skipIf(reason is not None, reason)(fn)
def validate(self, Validator, **kwargs):
resolver = jsonschema.RefResolver.from_schema(
schema=self.schema,
store=self._remotes,
id_of=Validator.ID_OF,
)
jsonschema.validate(
instance=self.data,
schema=self.schema,
cls=Validator,
resolver=resolver,
**kwargs
)
def validate_ignoring_errors(self, Validator): # pragma: no cover
try:
self.validate(Validator=Validator)
except jsonschema.ValidationError:
pass
def _someone_save_us_the_module_of_the_caller():
"""
The FQON of the module 2nd stack frames up from here.
This is intended to allow us to dynamicallly return test case classes that
are indistinguishable from being defined in the module that wants them.
Otherwise, trial will mis-print the FQON, and copy pasting it won't re-run
the class that really is running.
Save us all, this is all so so so so so terrible.
"""
return sys._getframe(2).f_globals["__name__"]

View File

@@ -0,0 +1,151 @@
from unittest import TestCase
import json
import subprocess
import sys
from jsonschema import Draft4Validator, ValidationError, cli, __version__
from jsonschema.compat import NativeIO
from jsonschema.exceptions import SchemaError
def fake_validator(*errors):
errors = list(reversed(errors))
class FakeValidator(object):
def __init__(self, *args, **kwargs):
pass
def iter_errors(self, instance):
if errors:
return errors.pop()
return []
def check_schema(self, schema):
pass
return FakeValidator
class TestParser(TestCase):
FakeValidator = fake_validator()
instance_file = "foo.json"
schema_file = "schema.json"
def setUp(self):
cli.open = self.fake_open
self.addCleanup(delattr, cli, "open")
def fake_open(self, path):
if path == self.instance_file:
contents = ""
elif path == self.schema_file:
contents = {}
else: # pragma: no cover
self.fail("What is {!r}".format(path))
return NativeIO(json.dumps(contents))
def test_find_validator_by_fully_qualified_object_name(self):
arguments = cli.parse_args(
[
"--validator",
"jsonschema.tests.test_cli.TestParser.FakeValidator",
"--instance", self.instance_file,
self.schema_file,
]
)
self.assertIs(arguments["validator"], self.FakeValidator)
def test_find_validator_in_jsonschema(self):
arguments = cli.parse_args(
[
"--validator", "Draft4Validator",
"--instance", self.instance_file,
self.schema_file,
]
)
self.assertIs(arguments["validator"], Draft4Validator)
class TestCLI(TestCase):
def test_draft3_schema_draft4_validator(self):
stdout, stderr = NativeIO(), NativeIO()
with self.assertRaises(SchemaError):
cli.run(
{
"validator": Draft4Validator,
"schema": {
"anyOf": [
{"minimum": 20},
{"type": "string"},
{"required": True},
],
},
"instances": [1],
"error_format": "{error.message}",
},
stdout=stdout,
stderr=stderr,
)
def test_successful_validation(self):
stdout, stderr = NativeIO(), NativeIO()
exit_code = cli.run(
{
"validator": fake_validator(),
"schema": {},
"instances": [1],
"error_format": "{error.message}",
},
stdout=stdout,
stderr=stderr,
)
self.assertFalse(stdout.getvalue())
self.assertFalse(stderr.getvalue())
self.assertEqual(exit_code, 0)
def test_unsuccessful_validation(self):
error = ValidationError("I am an error!", instance=1)
stdout, stderr = NativeIO(), NativeIO()
exit_code = cli.run(
{
"validator": fake_validator([error]),
"schema": {},
"instances": [1],
"error_format": "{error.instance} - {error.message}",
},
stdout=stdout,
stderr=stderr,
)
self.assertFalse(stdout.getvalue())
self.assertEqual(stderr.getvalue(), "1 - I am an error!")
self.assertEqual(exit_code, 1)
def test_unsuccessful_validation_multiple_instances(self):
first_errors = [
ValidationError("9", instance=1),
ValidationError("8", instance=1),
]
second_errors = [ValidationError("7", instance=2)]
stdout, stderr = NativeIO(), NativeIO()
exit_code = cli.run(
{
"validator": fake_validator(first_errors, second_errors),
"schema": {},
"instances": [1, 2],
"error_format": "{error.instance} - {error.message}\t",
},
stdout=stdout,
stderr=stderr,
)
self.assertFalse(stdout.getvalue())
self.assertEqual(stderr.getvalue(), "1 - 9\t1 - 8\t2 - 7\t")
self.assertEqual(exit_code, 1)
def test_version(self):
version = subprocess.check_output(
[sys.executable, "-m", "jsonschema", "--version"],
stderr=subprocess.STDOUT,
)
version = version.decode("utf-8").strip()
self.assertEqual(version, __version__)

View File

@@ -0,0 +1,462 @@
from unittest import TestCase
import textwrap
from jsonschema import Draft4Validator, exceptions
from jsonschema.compat import PY3
class TestBestMatch(TestCase):
def best_match(self, errors):
errors = list(errors)
best = exceptions.best_match(errors)
reversed_best = exceptions.best_match(reversed(errors))
msg = "Didn't return a consistent best match!\nGot: {0}\n\nThen: {1}"
self.assertEqual(
best._contents(), reversed_best._contents(),
msg=msg.format(best, reversed_best),
)
return best
def test_shallower_errors_are_better_matches(self):
validator = Draft4Validator(
{
"properties": {
"foo": {
"minProperties": 2,
"properties": {"bar": {"type": "object"}},
},
},
},
)
best = self.best_match(validator.iter_errors({"foo": {"bar": []}}))
self.assertEqual(best.validator, "minProperties")
def test_oneOf_and_anyOf_are_weak_matches(self):
"""
A property you *must* match is probably better than one you have to
match a part of.
"""
validator = Draft4Validator(
{
"minProperties": 2,
"anyOf": [{"type": "string"}, {"type": "number"}],
"oneOf": [{"type": "string"}, {"type": "number"}],
}
)
best = self.best_match(validator.iter_errors({}))
self.assertEqual(best.validator, "minProperties")
def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self):
"""
If the most relevant error is an anyOf, then we traverse its context
and select the otherwise *least* relevant error, since in this case
that means the most specific, deep, error inside the instance.
I.e. since only one of the schemas must match, we look for the most
relevant one.
"""
validator = Draft4Validator(
{
"properties": {
"foo": {
"anyOf": [
{"type": "string"},
{"properties": {"bar": {"type": "array"}}},
],
},
},
},
)
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
self.assertEqual(best.validator_value, "array")
def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self):
"""
If the most relevant error is an oneOf, then we traverse its context
and select the otherwise *least* relevant error, since in this case
that means the most specific, deep, error inside the instance.
I.e. since only one of the schemas must match, we look for the most
relevant one.
"""
validator = Draft4Validator(
{
"properties": {
"foo": {
"oneOf": [
{"type": "string"},
{"properties": {"bar": {"type": "array"}}},
],
},
},
},
)
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
self.assertEqual(best.validator_value, "array")
def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self):
"""
Now, if the error is allOf, we traverse but select the *most* relevant
error from the context, because all schemas here must match anyways.
"""
validator = Draft4Validator(
{
"properties": {
"foo": {
"allOf": [
{"type": "string"},
{"properties": {"bar": {"type": "array"}}},
],
},
},
},
)
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
self.assertEqual(best.validator_value, "string")
def test_nested_context_for_oneOf(self):
validator = Draft4Validator(
{
"properties": {
"foo": {
"oneOf": [
{"type": "string"},
{
"oneOf": [
{"type": "string"},
{
"properties": {
"bar": {"type": "array"},
},
},
],
},
],
},
},
},
)
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
self.assertEqual(best.validator_value, "array")
def test_one_error(self):
validator = Draft4Validator({"minProperties": 2})
error, = validator.iter_errors({})
self.assertEqual(
exceptions.best_match(validator.iter_errors({})).validator,
"minProperties",
)
def test_no_errors(self):
validator = Draft4Validator({})
self.assertIsNone(exceptions.best_match(validator.iter_errors({})))
class TestByRelevance(TestCase):
def test_short_paths_are_better_matches(self):
shallow = exceptions.ValidationError("Oh no!", path=["baz"])
deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"])
match = max([shallow, deep], key=exceptions.relevance)
self.assertIs(match, shallow)
match = max([deep, shallow], key=exceptions.relevance)
self.assertIs(match, shallow)
def test_global_errors_are_even_better_matches(self):
shallow = exceptions.ValidationError("Oh no!", path=[])
deep = exceptions.ValidationError("Oh yes!", path=["foo"])
errors = sorted([shallow, deep], key=exceptions.relevance)
self.assertEqual(
[list(error.path) for error in errors],
[["foo"], []],
)
errors = sorted([deep, shallow], key=exceptions.relevance)
self.assertEqual(
[list(error.path) for error in errors],
[["foo"], []],
)
def test_weak_validators_are_lower_priority(self):
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
best_match = exceptions.by_relevance(weak="a")
match = max([weak, normal], key=best_match)
self.assertIs(match, normal)
match = max([normal, weak], key=best_match)
self.assertIs(match, normal)
def test_strong_validators_are_higher_priority(self):
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
strong = exceptions.ValidationError("Oh fine!", path=[], validator="c")
best_match = exceptions.by_relevance(weak="a", strong="c")
match = max([weak, normal, strong], key=best_match)
self.assertIs(match, strong)
match = max([strong, normal, weak], key=best_match)
self.assertIs(match, strong)
class TestErrorTree(TestCase):
def test_it_knows_how_many_total_errors_it_contains(self):
# FIXME: https://github.com/Julian/jsonschema/issues/442
errors = [
exceptions.ValidationError("Something", validator=i)
for i in range(8)
]
tree = exceptions.ErrorTree(errors)
self.assertEqual(tree.total_errors, 8)
def test_it_contains_an_item_if_the_item_had_an_error(self):
errors = [exceptions.ValidationError("a message", path=["bar"])]
tree = exceptions.ErrorTree(errors)
self.assertIn("bar", tree)
def test_it_does_not_contain_an_item_if_the_item_had_no_error(self):
errors = [exceptions.ValidationError("a message", path=["bar"])]
tree = exceptions.ErrorTree(errors)
self.assertNotIn("foo", tree)
def test_validators_that_failed_appear_in_errors_dict(self):
error = exceptions.ValidationError("a message", validator="foo")
tree = exceptions.ErrorTree([error])
self.assertEqual(tree.errors, {"foo": error})
def test_it_creates_a_child_tree_for_each_nested_path(self):
errors = [
exceptions.ValidationError("a bar message", path=["bar"]),
exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]),
]
tree = exceptions.ErrorTree(errors)
self.assertIn(0, tree["bar"])
self.assertNotIn(1, tree["bar"])
def test_children_have_their_errors_dicts_built(self):
e1, e2 = (
exceptions.ValidationError("1", validator="foo", path=["bar", 0]),
exceptions.ValidationError("2", validator="quux", path=["bar", 0]),
)
tree = exceptions.ErrorTree([e1, e2])
self.assertEqual(tree["bar"][0].errors, {"foo": e1, "quux": e2})
def test_multiple_errors_with_instance(self):
e1, e2 = (
exceptions.ValidationError(
"1",
validator="foo",
path=["bar", "bar2"],
instance="i1"),
exceptions.ValidationError(
"2",
validator="quux",
path=["foobar", 2],
instance="i2"),
)
exceptions.ErrorTree([e1, e2])
def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self):
error = exceptions.ValidationError("123", validator="foo", instance=[])
tree = exceptions.ErrorTree([error])
with self.assertRaises(IndexError):
tree[0]
def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self):
"""
If a validator is dumb (like :validator:`required` in draft 3) and
refers to a path that isn't in the instance, the tree still properly
returns a subtree for that path.
"""
error = exceptions.ValidationError(
"a message", validator="foo", instance={}, path=["foo"],
)
tree = exceptions.ErrorTree([error])
self.assertIsInstance(tree["foo"], exceptions.ErrorTree)
class TestErrorInitReprStr(TestCase):
def make_error(self, **kwargs):
defaults = dict(
message=u"hello",
validator=u"type",
validator_value=u"string",
instance=5,
schema={u"type": u"string"},
)
defaults.update(kwargs)
return exceptions.ValidationError(**defaults)
def assertShows(self, expected, **kwargs):
if PY3: # pragma: no cover
expected = expected.replace("u'", "'")
expected = textwrap.dedent(expected).rstrip("\n")
error = self.make_error(**kwargs)
message_line, _, rest = str(error).partition("\n")
self.assertEqual(message_line, error.message)
self.assertEqual(rest, expected)
def test_it_calls_super_and_sets_args(self):
error = self.make_error()
self.assertGreater(len(error.args), 1)
def test_repr(self):
self.assertEqual(
repr(exceptions.ValidationError(message="Hello!")),
"<ValidationError: %r>" % "Hello!",
)
def test_unset_error(self):
error = exceptions.ValidationError("message")
self.assertEqual(str(error), "message")
kwargs = {
"validator": "type",
"validator_value": "string",
"instance": 5,
"schema": {"type": "string"},
}
# Just the message should show if any of the attributes are unset
for attr in kwargs:
k = dict(kwargs)
del k[attr]
error = exceptions.ValidationError("message", **k)
self.assertEqual(str(error), "message")
def test_empty_paths(self):
self.assertShows(
"""
Failed validating u'type' in schema:
{u'type': u'string'}
On instance:
5
""",
path=[],
schema_path=[],
)
def test_one_item_paths(self):
self.assertShows(
"""
Failed validating u'type' in schema:
{u'type': u'string'}
On instance[0]:
5
""",
path=[0],
schema_path=["items"],
)
def test_multiple_item_paths(self):
self.assertShows(
"""
Failed validating u'type' in schema[u'items'][0]:
{u'type': u'string'}
On instance[0][u'a']:
5
""",
path=[0, u"a"],
schema_path=[u"items", 0, 1],
)
def test_uses_pprint(self):
self.assertShows(
"""
Failed validating u'maxLength' in schema:
{0: 0,
1: 1,
2: 2,
3: 3,
4: 4,
5: 5,
6: 6,
7: 7,
8: 8,
9: 9,
10: 10,
11: 11,
12: 12,
13: 13,
14: 14,
15: 15,
16: 16,
17: 17,
18: 18,
19: 19}
On instance:
[0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24]
""",
instance=list(range(25)),
schema=dict(zip(range(20), range(20))),
validator=u"maxLength",
)
def test_str_works_with_instances_having_overriden_eq_operator(self):
"""
Check for https://github.com/Julian/jsonschema/issues/164 which
rendered exceptions unusable when a `ValidationError` involved
instances with an `__eq__` method that returned truthy values.
"""
class DontEQMeBro(object):
def __eq__(this, other): # pragma: no cover
self.fail("Don't!")
def __ne__(this, other): # pragma: no cover
self.fail("Don't!")
instance = DontEQMeBro()
error = exceptions.ValidationError(
"a message",
validator="foo",
instance=instance,
validator_value="some",
schema="schema",
)
self.assertIn(repr(instance), str(error))
class TestHashable(TestCase):
def test_hashable(self):
set([exceptions.ValidationError("")])
set([exceptions.SchemaError("")])

Some files were not shown because too many files have changed in this diff Show More