Compare commits
345 Commits
bugfix/rel
...
bug/stop_a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
deb3f0f43a | ||
|
|
5958306466 | ||
|
|
464d294b63 | ||
|
|
b7f4a421e4 | ||
|
|
fe64afb479 | ||
|
|
0277067e5f | ||
|
|
334102addd | ||
|
|
52d475e89b | ||
|
|
2bb844ee0e | ||
|
|
897616f073 | ||
|
|
34f0222fdd | ||
|
|
d125bdfaa0 | ||
|
|
6a60201e80 | ||
|
|
a3f5b712d3 | ||
|
|
b2336402a7 | ||
|
|
5091dc2124 | ||
|
|
7bed76786d | ||
|
|
0e45ad2d7f | ||
|
|
43e064880e | ||
|
|
04a95e3425 | ||
|
|
496b28bb58 | ||
|
|
94bd667ba6 | ||
|
|
186730a284 | ||
|
|
6e31094d77 | ||
|
|
27cbf46dd6 | ||
|
|
83a98c3807 | ||
|
|
2262bd1797 | ||
|
|
f0b70b7c8e | ||
|
|
f8d4b4746c | ||
|
|
f33f67ebd3 | ||
|
|
cc866f3c02 | ||
|
|
a43ed9286f | ||
|
|
071778b919 | ||
|
|
a4f0fbafbb | ||
|
|
52cfd17917 | ||
|
|
f34598214b | ||
|
|
f064c5ee4f | ||
|
|
3e992aa283 | ||
|
|
c226835aa5 | ||
|
|
9e616ca7b7 | ||
|
|
b23bd81cd9 | ||
|
|
39a346bde0 | ||
|
|
89d3590300 | ||
|
|
0f74b7ec44 | ||
|
|
422ae20a45 | ||
|
|
efc5d0a3b4 | ||
|
|
431473ea9b | ||
|
|
cbe47f2066 | ||
|
|
e43795de28 | ||
|
|
dc1b6aa8c6 | ||
|
|
b924a5db14 | ||
|
|
0264bb8343 | ||
|
|
c24bb64792 | ||
|
|
cdb04931ab | ||
|
|
dce4f161d4 | ||
|
|
ffc3272a2b | ||
|
|
8d19db8628 | ||
|
|
1250b77607 | ||
|
|
1b5eaf62de | ||
|
|
dd03a1a91e | ||
|
|
7cd8bae799 | ||
|
|
ae2509339d | ||
|
|
7a8862e316 | ||
|
|
20780ec61c | ||
|
|
98dfe7e7e0 | ||
|
|
94f5d0a3f4 | ||
|
|
997dba8ffd | ||
|
|
6f95296972 | ||
|
|
c85c725b08 | ||
|
|
a06d93f6cb | ||
|
|
8daaee3ed6 | ||
|
|
e40eceba4c | ||
|
|
c220abb011 | ||
|
|
71cb9fd9fd | ||
|
|
8fdb6e2b61 | ||
|
|
676d321b28 | ||
|
|
d6d1d33e88 | ||
|
|
89830c2117 | ||
|
|
4e16948a1f | ||
|
|
c530672f0e | ||
|
|
38af2c6c9b | ||
|
|
a99d485016 | ||
|
|
fc9bfe5317 | ||
|
|
4e80b18851 | ||
|
|
fee855030d | ||
|
|
c06aaebefd | ||
|
|
c2e1a12cdf | ||
|
|
27202b2240 | ||
|
|
c2015f6a9c | ||
|
|
e09ab1a01c | ||
|
|
2018606c32 | ||
|
|
eba3e1a20c | ||
|
|
95f2b10b4f | ||
|
|
04f298a34f | ||
|
|
bcf497a5fa | ||
|
|
e0d8964453 | ||
|
|
ae15c87bf0 | ||
|
|
cd19782943 | ||
|
|
9d5ce4962d | ||
|
|
b5b54dbd02 | ||
|
|
3cd599d6f6 | ||
|
|
2f5c8ee699 | ||
|
|
e028ee0d59 | ||
|
|
7fa31911fe | ||
|
|
f181b6fef6 | ||
|
|
e2bda65947 | ||
|
|
50a3e3bfd3 | ||
|
|
eda8151fa0 | ||
|
|
edd55bc73d | ||
|
|
0863148c4f | ||
|
|
d098f92c4a | ||
|
|
6ed282bb2d | ||
|
|
e945d3763b | ||
|
|
262c680997 | ||
|
|
054149698b | ||
|
|
a294acfb73 | ||
|
|
9602d0a03c | ||
|
|
a3048438c3 | ||
|
|
6331778552 | ||
|
|
1513e57eee | ||
|
|
d52e55d8de | ||
|
|
9c8a75ff9c | ||
|
|
f3035f98e1 | ||
|
|
15951f0e6b | ||
|
|
1c3881b48a | ||
|
|
704c81eda4 | ||
|
|
60a5b2a85e | ||
|
|
4381cb5957 | ||
|
|
74d64fd61a | ||
|
|
82bdda9a89 | ||
|
|
e974b44e86 | ||
|
|
69b8cddb1b | ||
|
|
8e659f512e | ||
|
|
5daf023aec | ||
|
|
87abda4cdd | ||
|
|
fa5e186d4a | ||
|
|
e1cc28a30a | ||
|
|
17edf1ae90 | ||
|
|
79fd1c5114 | ||
|
|
13e36c5457 | ||
|
|
b2694013d4 | ||
|
|
8f3b025b55 | ||
|
|
37fbe30c4a | ||
|
|
314867e635 | ||
|
|
9345bf81b9 | ||
|
|
adf4e91658 | ||
|
|
20ad47f9e1 | ||
|
|
7e5de95a30 | ||
|
|
e9f7fb03c9 | ||
|
|
9d4291e590 | ||
|
|
8f98f1d182 | ||
|
|
654f6839eb | ||
|
|
c8daa7218d | ||
|
|
d862507bcf | ||
|
|
7c6b253d89 | ||
|
|
abd418cc31 | ||
|
|
544826c825 | ||
|
|
afd65a4cd0 | ||
|
|
b6da473800 | ||
|
|
eee0f2b9d5 | ||
|
|
cb5d53e5a9 | ||
|
|
b5ba20ada4 | ||
|
|
d236b9438d | ||
|
|
b7b6542804 | ||
|
|
34873f5fe7 | ||
|
|
522a7c8ee0 | ||
|
|
433f743074 | ||
|
|
38196894db | ||
|
|
db69a291d4 | ||
|
|
389beaef97 | ||
|
|
8ebad6963b | ||
|
|
43042c14e0 | ||
|
|
6b91d3700b | ||
|
|
7aa2bdca44 | ||
|
|
5005e9a856 | ||
|
|
0a43dd1019 | ||
|
|
a6d89853d4 | ||
|
|
116deb0386 | ||
|
|
6647b2a4ca | ||
|
|
6c62fa461e | ||
|
|
e9edb5c96b | ||
|
|
9869a2d0da | ||
|
|
024c2b690a | ||
|
|
cd211acd80 | ||
|
|
494f0053ae | ||
|
|
d45280369f | ||
|
|
c68652c88c | ||
|
|
a94b4eef79 | ||
|
|
94dd9685cc | ||
|
|
a0f175f327 | ||
|
|
ea692ca6f7 | ||
|
|
02aab770db | ||
|
|
cdc5c44bb1 | ||
|
|
da9e152ed1 | ||
|
|
ece1e3905e | ||
|
|
bfc69f0e4b | ||
|
|
7703043195 | ||
|
|
f83e0fb81a | ||
|
|
90592b3cbe | ||
|
|
04536db387 | ||
|
|
e470131a77 | ||
|
|
1e35d06bf0 | ||
|
|
1a576e732e | ||
|
|
30244c2c40 | ||
|
|
9ace418371 | ||
|
|
87978f7989 | ||
|
|
e96b91dd37 | ||
|
|
541ccd6669 | ||
|
|
140e8085dd | ||
|
|
2597fff76b | ||
|
|
225595235c | ||
|
|
d9c4b91af3 | ||
|
|
ee2974869c | ||
|
|
c940e9fc7e | ||
|
|
c3a929fdfc | ||
|
|
6313d84d38 | ||
|
|
10ccbc9a2c | ||
|
|
bbe69aa725 | ||
|
|
8b3858081d | ||
|
|
809d0523d9 | ||
|
|
e1f6a2e6b5 | ||
|
|
956bb91cfe | ||
|
|
23cb7d761e | ||
|
|
1813a809ff | ||
|
|
e3f0be41d4 | ||
|
|
618fd04729 | ||
|
|
04bed42831 | ||
|
|
661fd7bb67 | ||
|
|
18615b1485 | ||
|
|
f754ed9788 | ||
|
|
6443086222 | ||
|
|
556565ca0d | ||
|
|
40886599ce | ||
|
|
2310da0070 | ||
|
|
e76de4138e | ||
|
|
43aeb4e48e | ||
|
|
305f1d68d6 | ||
|
|
1b273ed5c9 | ||
|
|
bb1d0d1900 | ||
|
|
f262693983 | ||
|
|
8c81469f94 | ||
|
|
6c250640c4 | ||
|
|
fb93979b94 | ||
|
|
6357de4e61 | ||
|
|
96535cc4f9 | ||
|
|
bd0ffa8a3c | ||
|
|
ac49ce8b3b | ||
|
|
9240614928 | ||
|
|
89044ff5d5 | ||
|
|
315f01fc96 | ||
|
|
e99aff93e1 | ||
|
|
42677ded8e | ||
|
|
d375a7ea0a | ||
|
|
3b4c622661 | ||
|
|
ba87593afd | ||
|
|
af1ec6275d | ||
|
|
3ff2fb43cf | ||
|
|
a148e37c4c | ||
|
|
4d876bac0c | ||
|
|
114b35144e | ||
|
|
69184dc390 | ||
|
|
70aea4cd82 | ||
|
|
e091eacde0 | ||
|
|
9447ec9600 | ||
|
|
351a1d8d37 | ||
|
|
b0f996c8ef | ||
|
|
4de197f778 | ||
|
|
bd7d3faa10 | ||
|
|
bc152da68b | ||
|
|
72ca7d6ee5 | ||
|
|
acc688b5e1 | ||
|
|
0cf3176462 | ||
|
|
116e1bb6cb | ||
|
|
9c1be2a510 | ||
|
|
5319b6e3b1 | ||
|
|
ce94a786ae | ||
|
|
25c29d277f | ||
|
|
e51337fcd9 | ||
|
|
3139894794 | ||
|
|
5bbb63ed93 | ||
|
|
bc618c2c6c | ||
|
|
97a1d48512 | ||
|
|
9ec638c7bb | ||
|
|
c1d51593d1 | ||
|
|
948bc98fa6 | ||
|
|
28d31316b7 | ||
|
|
f876813531 | ||
|
|
ec8ef9d292 | ||
|
|
7836b60825 | ||
|
|
17367a702e | ||
|
|
1bd2c40228 | ||
|
|
71e71c0b60 | ||
|
|
f920976fa6 | ||
|
|
26dcb0ffc4 | ||
|
|
f6802b733a | ||
|
|
7b9ce3a940 | ||
|
|
dc88538e80 | ||
|
|
9875a0e228 | ||
|
|
ee46a1077c | ||
|
|
5d11bc9a77 | ||
|
|
a9b2a18880 | ||
|
|
27ba835236 | ||
|
|
e4da7db567 | ||
|
|
2f35b0487b | ||
|
|
81bb380916 | ||
|
|
08573b4cb8 | ||
|
|
b9717ca261 | ||
|
|
5d6a9a70c8 | ||
|
|
f3b5704663 | ||
|
|
d5773ac5bf | ||
|
|
363a263114 | ||
|
|
140a2721e6 | ||
|
|
8b6bb245db | ||
|
|
059bc53760 | ||
|
|
0960c0810c | ||
|
|
dc87157e80 | ||
|
|
d17511a806 | ||
|
|
e199d7ef6b | ||
|
|
b286c62feb | ||
|
|
32c442afdb | ||
|
|
75154318af | ||
|
|
74f4c73c24 | ||
|
|
9445b931ec | ||
|
|
621760a8b8 | ||
|
|
065f349813 | ||
|
|
48b1531966 | ||
|
|
262d5e6791 | ||
|
|
361a2ccdf6 | ||
|
|
c930f871f3 | ||
|
|
b9a2e71a8b | ||
|
|
01d077d4bc | ||
|
|
79a04605d3 | ||
|
|
a873c7f456 | ||
|
|
5881ec5389 | ||
|
|
50cb742377 | ||
|
|
4ccfe5108c | ||
|
|
39e6f987bb | ||
|
|
d458e82286 | ||
|
|
1314273a42 | ||
|
|
0337bb50d9 | ||
|
|
4227a546a1 | ||
|
|
5f4af3ac8a | ||
|
|
4a53536464 | ||
|
|
3b491ccbbd | ||
|
|
f92a2d688d |
6
.github/workflows/build-containers.yml
vendored
6
.github/workflows/build-containers.yml
vendored
@@ -12,6 +12,7 @@ on:
|
||||
- develop
|
||||
paths:
|
||||
- '.github/workflows/build-containers.yml'
|
||||
- 'share/spack/docker/*'
|
||||
# Let's also build & tag Spack containers on releases.
|
||||
release:
|
||||
types: [published]
|
||||
@@ -66,14 +67,15 @@ jobs:
|
||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 # @v1
|
||||
uses: docker/login-action@42d299face0c5c43a0487c477f595ac9cf22f1a7 # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 # @v1
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: docker/login-action@42d299face0c5c43a0487c477f595ac9cf22f1a7 # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
38
.github/workflows/unit_tests.yaml
vendored
38
.github/workflows/unit_tests.yaml
vendored
@@ -97,7 +97,14 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9]
|
||||
concretizer: ['original', 'clingo']
|
||||
concretizer: ['clingo']
|
||||
include:
|
||||
- python-version: 2.7
|
||||
concretizer: original
|
||||
- python-version: 3.6
|
||||
concretizer: original
|
||||
- python-version: 3.9
|
||||
concretizer: original
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
with:
|
||||
@@ -314,3 +321,32 @@ jobs:
|
||||
with:
|
||||
files: ./coverage.xml
|
||||
flags: unittests,macos
|
||||
|
||||
# Run audits on all the packages in the built-in repository
|
||||
package-audits:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@dc73133d4da04e56a135ae2246682783cc7c7cb6 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]
|
||||
- name: Package audits (with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (wwithout coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
- uses: codecov/codecov-action@f32b3a3741e1053eb607407145bc9619351dc93b # @v2.1.0
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,audits
|
||||
|
||||
25
CHANGELOG.md
25
CHANGELOG.md
@@ -1,3 +1,28 @@
|
||||
# v0.17.1 (2021-12-23)
|
||||
|
||||
### Spack Bugfixes
|
||||
* Allow locks to work under high contention (#27846)
|
||||
* Improve errors messages from clingo (#27707 #27970)
|
||||
* Respect package permissions for sbang (#25764)
|
||||
* Fix --enable-locks behavior (#24675)
|
||||
* Fix log-format reporter ignoring install errors (#25961)
|
||||
* Fix overloaded argparse keys (#27379)
|
||||
* Allow style commands to run with targets other than "develop" (#27472)
|
||||
* Log lock messages to debug level, instead of verbose level (#27408)
|
||||
* Handle invalid unicode while logging (#21447)
|
||||
* spack audit: fix API calls to variants (#27713)
|
||||
* Provide meaningful message for empty environment installs (#28031)
|
||||
* Added opensuse leap containers to spack containerize (#27837)
|
||||
* Revert "patches: make re-applied patches idempotent" (#27625)
|
||||
* MANPATH can use system defaults (#21682)
|
||||
* Add "setdefault" subcommand to `spack module tcl` (#14686)
|
||||
* Regenerate views when specs already installed (#28113)
|
||||
|
||||
### Package bugfixes
|
||||
* Fix external package detection for OpenMPI (#27255)
|
||||
* Update the UPC++ package to 2021.9.0 (#26996)
|
||||
* Added py-vermin v1.3.2 (#28072)
|
||||
|
||||
# v0.17.0 (2021-11-05)
|
||||
|
||||
`v0.17.0` is a major feature release.
|
||||
|
||||
12
COPYRIGHT
12
COPYRIGHT
@@ -38,6 +38,10 @@ PackageName: argparse
|
||||
PackageHomePage: https://pypi.python.org/pypi/argparse
|
||||
PackageLicenseDeclared: Python-2.0
|
||||
|
||||
PackageName: attrs
|
||||
PackageHomePage: https://github.com/python-attrs/attrs
|
||||
PackageLicenseDeclared: MIT
|
||||
|
||||
PackageName: ctest_log_parser
|
||||
PackageHomePage: https://github.com/Kitware/CMake
|
||||
PackageLicenseDeclared: BSD-3-Clause
|
||||
@@ -46,8 +50,8 @@ PackageName: distro
|
||||
PackageHomePage: https://pypi.python.org/pypi/distro
|
||||
PackageLicenseDeclared: Apache-2.0
|
||||
|
||||
PackageName: functools
|
||||
PackageHomePage: https://github.com/python/cpython/blob/2.7/Lib/functools.py
|
||||
PackageName: functools32
|
||||
PackageHomePage: https://github.com/MiCHiLU/python-functools32
|
||||
PackageLicenseDeclared: Python-2.0
|
||||
|
||||
PackageName: jinja2
|
||||
@@ -70,6 +74,10 @@ PackageName: py
|
||||
PackageHomePage: https://pypi.python.org/pypi/py
|
||||
PackageLicenseDeclared: MIT
|
||||
|
||||
PackageName: pyrsistent
|
||||
PackageHomePage: http://github.com/tobgu/pyrsistent
|
||||
PackageLicenseDeclared: MIT
|
||||
|
||||
PackageName: pytest
|
||||
PackageHomePage: https://pypi.python.org/pypi/pytest
|
||||
PackageLicenseDeclared: MIT
|
||||
|
||||
@@ -59,7 +59,7 @@ are available:
|
||||
install_files : install file listing read from install_manifest.json
|
||||
environment_variables : environment variables parsed from spack-build-env.txt
|
||||
config_args : config args loaded from spack-configure-args.txt
|
||||
abigail : Application Binary Interface (ABI) features for objects
|
||||
libabigail : Application Binary Interface (ABI) features for objects
|
||||
|
||||
|
||||
In the above, the first three are fairly simple - parsing metadata files from
|
||||
|
||||
@@ -129,6 +129,9 @@ are currently supported are summarized in the table below:
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
* - openSUSE Leap
|
||||
- ``opensuse/leap``
|
||||
- ``spack/leap15``
|
||||
|
||||
All the images are tagged with the corresponding release of Spack:
|
||||
|
||||
|
||||
@@ -671,6 +671,13 @@ If you need to write a hook that is relevant to a failure within a build
|
||||
process, you would want to instead use ``on_phase_failure``.
|
||||
|
||||
|
||||
"""""""""""""""""""""""""""
|
||||
``on_install_cancel(spec)``
|
||||
"""""""""""""""""""""""""""
|
||||
|
||||
The same, but triggered if a spec install is cancelled for any reason.
|
||||
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""""""""""
|
||||
``on_phase_success(pkg, phase_name, log_file)``
|
||||
"""""""""""""""""""""""""""""""""""""""""""""""
|
||||
|
||||
@@ -2943,7 +2943,7 @@ The package base class, usually specialized for a given build system, determines
|
||||
actual set of entities available for overriding.
|
||||
The classes that are currently provided by Spack are:
|
||||
|
||||
+-------------------------=--------------------------------+----------------------------------+
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| **Base Class** | **Purpose** |
|
||||
+==========================================================+==================================+
|
||||
| :class:`~spack.package.Package` | General base class not |
|
||||
|
||||
28
lib/spack/external/__init__.py
vendored
28
lib/spack/external/__init__.py
vendored
@@ -24,6 +24,13 @@
|
||||
vendored copy ever needs to be updated again:
|
||||
https://github.com/spack/spack/pull/6786/commits/dfcef577b77249106ea4e4c69a6cd9e64fa6c418
|
||||
|
||||
attrs
|
||||
----------------
|
||||
|
||||
* Homepage: https://github.com/python-attrs/attrs
|
||||
* Usage: Needed by jsonschema.
|
||||
* Version: 21.2.0 (83d3cd70f90a3f4d19ee8b508e58d1c58821c0ad)
|
||||
|
||||
ctest_log_parser
|
||||
----------------
|
||||
|
||||
@@ -40,6 +47,12 @@
|
||||
* Version: 1.6.0 (64946a1e2a9ff529047070657728600e006c99ff)
|
||||
* Note: Last version supporting Python 2.7
|
||||
|
||||
functools32
|
||||
-----------
|
||||
* Homepage: https://github.com/MiCHiLU/python-functools32
|
||||
* Usage: Needed by jsonschema when using Python 2.7.
|
||||
* Version: 3.2.3-2
|
||||
|
||||
jinja2
|
||||
------
|
||||
|
||||
@@ -52,11 +65,8 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/jsonschema
|
||||
* Usage: An implementation of JSON Schema for Python.
|
||||
* Version: 2.4.0 (last version before functools32 dependency was added)
|
||||
* Note: functools32 doesn't support Python 2.6 or 3.0, so jsonschema
|
||||
cannot be upgraded any further until we drop 2.6.
|
||||
Also, jsonschema/validators.py has been modified NOT to try to import
|
||||
requests (see 7a1dd517b8).
|
||||
* Version: 3.2.0 (last version before 2.7 and 3.6 support was dropped)
|
||||
* Note: We don't include tests or benchmarks; just what Spack needs.
|
||||
|
||||
markupsafe
|
||||
----------
|
||||
@@ -75,6 +85,14 @@
|
||||
* Note: This packages has been modified:
|
||||
* https://github.com/pytest-dev/py/pull/186 was backported
|
||||
|
||||
pyrsistent
|
||||
----------
|
||||
|
||||
* Homepage: http://github.com/tobgu/pyrsistent/
|
||||
* Usage: Needed by `jsonschema`
|
||||
* Version: 0.16.1 (last version supporting Python 2.7)
|
||||
* Note: We only include the parts needed for `jsonschema`.
|
||||
|
||||
pytest
|
||||
------
|
||||
|
||||
|
||||
21
lib/spack/external/attr/LICENSE
vendored
Normal file
21
lib/spack/external/attr/LICENSE
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 Hynek Schlawack
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
78
lib/spack/external/attr/__init__.py
vendored
Normal file
78
lib/spack/external/attr/__init__.py
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import sys
|
||||
|
||||
from functools import partial
|
||||
|
||||
from . import converters, exceptions, filters, setters, validators
|
||||
from ._cmp import cmp_using
|
||||
from ._config import get_run_validators, set_run_validators
|
||||
from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
|
||||
from ._make import (
|
||||
NOTHING,
|
||||
Attribute,
|
||||
Factory,
|
||||
attrib,
|
||||
attrs,
|
||||
fields,
|
||||
fields_dict,
|
||||
make_class,
|
||||
validate,
|
||||
)
|
||||
from ._version_info import VersionInfo
|
||||
|
||||
|
||||
__version__ = "21.2.0"
|
||||
__version_info__ = VersionInfo._from_version_string(__version__)
|
||||
|
||||
__title__ = "attrs"
|
||||
__description__ = "Classes Without Boilerplate"
|
||||
__url__ = "https://www.attrs.org/"
|
||||
__uri__ = __url__
|
||||
__doc__ = __description__ + " <" + __uri__ + ">"
|
||||
|
||||
__author__ = "Hynek Schlawack"
|
||||
__email__ = "hs@ox.cx"
|
||||
|
||||
__license__ = "MIT"
|
||||
__copyright__ = "Copyright (c) 2015 Hynek Schlawack"
|
||||
|
||||
|
||||
s = attributes = attrs
|
||||
ib = attr = attrib
|
||||
dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
|
||||
|
||||
__all__ = [
|
||||
"Attribute",
|
||||
"Factory",
|
||||
"NOTHING",
|
||||
"asdict",
|
||||
"assoc",
|
||||
"astuple",
|
||||
"attr",
|
||||
"attrib",
|
||||
"attributes",
|
||||
"attrs",
|
||||
"cmp_using",
|
||||
"converters",
|
||||
"evolve",
|
||||
"exceptions",
|
||||
"fields",
|
||||
"fields_dict",
|
||||
"filters",
|
||||
"get_run_validators",
|
||||
"has",
|
||||
"ib",
|
||||
"make_class",
|
||||
"resolve_types",
|
||||
"s",
|
||||
"set_run_validators",
|
||||
"setters",
|
||||
"validate",
|
||||
"validators",
|
||||
]
|
||||
|
||||
if sys.version_info[:2] >= (3, 6):
|
||||
from ._next_gen import define, field, frozen, mutable
|
||||
|
||||
__all__.extend((define, field, frozen, mutable))
|
||||
152
lib/spack/external/attr/_cmp.py
vendored
Normal file
152
lib/spack/external/attr/_cmp.py
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import functools
|
||||
|
||||
from ._compat import new_class
|
||||
from ._make import _make_ne
|
||||
|
||||
|
||||
_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
|
||||
|
||||
|
||||
def cmp_using(
|
||||
eq=None,
|
||||
lt=None,
|
||||
le=None,
|
||||
gt=None,
|
||||
ge=None,
|
||||
require_same_type=True,
|
||||
class_name="Comparable",
|
||||
):
|
||||
"""
|
||||
Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and
|
||||
``cmp`` arguments to customize field comparison.
|
||||
|
||||
The resulting class will have a full set of ordering methods if
|
||||
at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
|
||||
|
||||
:param Optional[callable] eq: `callable` used to evaluate equality
|
||||
of two objects.
|
||||
:param Optional[callable] lt: `callable` used to evaluate whether
|
||||
one object is less than another object.
|
||||
:param Optional[callable] le: `callable` used to evaluate whether
|
||||
one object is less than or equal to another object.
|
||||
:param Optional[callable] gt: `callable` used to evaluate whether
|
||||
one object is greater than another object.
|
||||
:param Optional[callable] ge: `callable` used to evaluate whether
|
||||
one object is greater than or equal to another object.
|
||||
|
||||
:param bool require_same_type: When `True`, equality and ordering methods
|
||||
will return `NotImplemented` if objects are not of the same type.
|
||||
|
||||
:param Optional[str] class_name: Name of class. Defaults to 'Comparable'.
|
||||
|
||||
See `comparison` for more details.
|
||||
|
||||
.. versionadded:: 21.1.0
|
||||
"""
|
||||
|
||||
body = {
|
||||
"__slots__": ["value"],
|
||||
"__init__": _make_init(),
|
||||
"_requirements": [],
|
||||
"_is_comparable_to": _is_comparable_to,
|
||||
}
|
||||
|
||||
# Add operations.
|
||||
num_order_functions = 0
|
||||
has_eq_function = False
|
||||
|
||||
if eq is not None:
|
||||
has_eq_function = True
|
||||
body["__eq__"] = _make_operator("eq", eq)
|
||||
body["__ne__"] = _make_ne()
|
||||
|
||||
if lt is not None:
|
||||
num_order_functions += 1
|
||||
body["__lt__"] = _make_operator("lt", lt)
|
||||
|
||||
if le is not None:
|
||||
num_order_functions += 1
|
||||
body["__le__"] = _make_operator("le", le)
|
||||
|
||||
if gt is not None:
|
||||
num_order_functions += 1
|
||||
body["__gt__"] = _make_operator("gt", gt)
|
||||
|
||||
if ge is not None:
|
||||
num_order_functions += 1
|
||||
body["__ge__"] = _make_operator("ge", ge)
|
||||
|
||||
type_ = new_class(class_name, (object,), {}, lambda ns: ns.update(body))
|
||||
|
||||
# Add same type requirement.
|
||||
if require_same_type:
|
||||
type_._requirements.append(_check_same_type)
|
||||
|
||||
# Add total ordering if at least one operation was defined.
|
||||
if 0 < num_order_functions < 4:
|
||||
if not has_eq_function:
|
||||
# functools.total_ordering requires __eq__ to be defined,
|
||||
# so raise early error here to keep a nice stack.
|
||||
raise ValueError(
|
||||
"eq must be define is order to complete ordering from "
|
||||
"lt, le, gt, ge."
|
||||
)
|
||||
type_ = functools.total_ordering(type_)
|
||||
|
||||
return type_
|
||||
|
||||
|
||||
def _make_init():
|
||||
"""
|
||||
Create __init__ method.
|
||||
"""
|
||||
|
||||
def __init__(self, value):
|
||||
"""
|
||||
Initialize object with *value*.
|
||||
"""
|
||||
self.value = value
|
||||
|
||||
return __init__
|
||||
|
||||
|
||||
def _make_operator(name, func):
|
||||
"""
|
||||
Create operator method.
|
||||
"""
|
||||
|
||||
def method(self, other):
|
||||
if not self._is_comparable_to(other):
|
||||
return NotImplemented
|
||||
|
||||
result = func(self.value, other.value)
|
||||
if result is NotImplemented:
|
||||
return NotImplemented
|
||||
|
||||
return result
|
||||
|
||||
method.__name__ = "__%s__" % (name,)
|
||||
method.__doc__ = "Return a %s b. Computed by attrs." % (
|
||||
_operation_names[name],
|
||||
)
|
||||
|
||||
return method
|
||||
|
||||
|
||||
def _is_comparable_to(self, other):
|
||||
"""
|
||||
Check whether `other` is comparable to `self`.
|
||||
"""
|
||||
for func in self._requirements:
|
||||
if not func(self, other):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _check_same_type(self, other):
|
||||
"""
|
||||
Return True if *self* and *other* are of the same type, False otherwise.
|
||||
"""
|
||||
return other.value.__class__ is self.value.__class__
|
||||
242
lib/spack/external/attr/_compat.py
vendored
Normal file
242
lib/spack/external/attr/_compat.py
vendored
Normal file
@@ -0,0 +1,242 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import platform
|
||||
import sys
|
||||
import types
|
||||
import warnings
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PYPY = platform.python_implementation() == "PyPy"
|
||||
|
||||
|
||||
if PYPY or sys.version_info[:2] >= (3, 6):
|
||||
ordered_dict = dict
|
||||
else:
|
||||
from collections import OrderedDict
|
||||
|
||||
ordered_dict = OrderedDict
|
||||
|
||||
|
||||
if PY2:
|
||||
from collections import Mapping, Sequence
|
||||
|
||||
from UserDict import IterableUserDict
|
||||
|
||||
# We 'bundle' isclass instead of using inspect as importing inspect is
|
||||
# fairly expensive (order of 10-15 ms for a modern machine in 2016)
|
||||
def isclass(klass):
|
||||
return isinstance(klass, (type, types.ClassType))
|
||||
|
||||
def new_class(name, bases, kwds, exec_body):
|
||||
"""
|
||||
A minimal stub of types.new_class that we need for make_class.
|
||||
"""
|
||||
ns = {}
|
||||
exec_body(ns)
|
||||
|
||||
return type(name, bases, ns)
|
||||
|
||||
# TYPE is used in exceptions, repr(int) is different on Python 2 and 3.
|
||||
TYPE = "type"
|
||||
|
||||
def iteritems(d):
|
||||
return d.iteritems()
|
||||
|
||||
# Python 2 is bereft of a read-only dict proxy, so we make one!
|
||||
class ReadOnlyDict(IterableUserDict):
|
||||
"""
|
||||
Best-effort read-only dict wrapper.
|
||||
"""
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise TypeError(
|
||||
"'mappingproxy' object does not support item assignment"
|
||||
)
|
||||
|
||||
def update(self, _):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'update'"
|
||||
)
|
||||
|
||||
def __delitem__(self, _):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise TypeError(
|
||||
"'mappingproxy' object does not support item deletion"
|
||||
)
|
||||
|
||||
def clear(self):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'clear'"
|
||||
)
|
||||
|
||||
def pop(self, key, default=None):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'pop'"
|
||||
)
|
||||
|
||||
def popitem(self):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'popitem'"
|
||||
)
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
# We gently pretend we're a Python 3 mappingproxy.
|
||||
raise AttributeError(
|
||||
"'mappingproxy' object has no attribute 'setdefault'"
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
# Override to be identical to the Python 3 version.
|
||||
return "mappingproxy(" + repr(self.data) + ")"
|
||||
|
||||
def metadata_proxy(d):
|
||||
res = ReadOnlyDict()
|
||||
res.data.update(d) # We blocked update, so we have to do it like this.
|
||||
return res
|
||||
|
||||
def just_warn(*args, **kw): # pragma: no cover
|
||||
"""
|
||||
We only warn on Python 3 because we are not aware of any concrete
|
||||
consequences of not setting the cell on Python 2.
|
||||
"""
|
||||
|
||||
|
||||
else: # Python 3 and later.
|
||||
from collections.abc import Mapping, Sequence # noqa
|
||||
|
||||
def just_warn(*args, **kw):
|
||||
"""
|
||||
We only warn on Python 3 because we are not aware of any concrete
|
||||
consequences of not setting the cell on Python 2.
|
||||
"""
|
||||
warnings.warn(
|
||||
"Running interpreter doesn't sufficiently support code object "
|
||||
"introspection. Some features like bare super() or accessing "
|
||||
"__class__ will not work with slotted classes.",
|
||||
RuntimeWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
def isclass(klass):
|
||||
return isinstance(klass, type)
|
||||
|
||||
TYPE = "class"
|
||||
|
||||
def iteritems(d):
|
||||
return d.items()
|
||||
|
||||
new_class = types.new_class
|
||||
|
||||
def metadata_proxy(d):
|
||||
return types.MappingProxyType(dict(d))
|
||||
|
||||
|
||||
def make_set_closure_cell():
|
||||
"""Return a function of two arguments (cell, value) which sets
|
||||
the value stored in the closure cell `cell` to `value`.
|
||||
"""
|
||||
# pypy makes this easy. (It also supports the logic below, but
|
||||
# why not do the easy/fast thing?)
|
||||
if PYPY:
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
cell.__setstate__((value,))
|
||||
|
||||
return set_closure_cell
|
||||
|
||||
# Otherwise gotta do it the hard way.
|
||||
|
||||
# Create a function that will set its first cellvar to `value`.
|
||||
def set_first_cellvar_to(value):
|
||||
x = value
|
||||
return
|
||||
|
||||
# This function will be eliminated as dead code, but
|
||||
# not before its reference to `x` forces `x` to be
|
||||
# represented as a closure cell rather than a local.
|
||||
def force_x_to_be_a_cell(): # pragma: no cover
|
||||
return x
|
||||
|
||||
try:
|
||||
# Extract the code object and make sure our assumptions about
|
||||
# the closure behavior are correct.
|
||||
if PY2:
|
||||
co = set_first_cellvar_to.func_code
|
||||
else:
|
||||
co = set_first_cellvar_to.__code__
|
||||
if co.co_cellvars != ("x",) or co.co_freevars != ():
|
||||
raise AssertionError # pragma: no cover
|
||||
|
||||
# Convert this code object to a code object that sets the
|
||||
# function's first _freevar_ (not cellvar) to the argument.
|
||||
if sys.version_info >= (3, 8):
|
||||
# CPython 3.8+ has an incompatible CodeType signature
|
||||
# (added a posonlyargcount argument) but also added
|
||||
# CodeType.replace() to do this without counting parameters.
|
||||
set_first_freevar_code = co.replace(
|
||||
co_cellvars=co.co_freevars, co_freevars=co.co_cellvars
|
||||
)
|
||||
else:
|
||||
args = [co.co_argcount]
|
||||
if not PY2:
|
||||
args.append(co.co_kwonlyargcount)
|
||||
args.extend(
|
||||
[
|
||||
co.co_nlocals,
|
||||
co.co_stacksize,
|
||||
co.co_flags,
|
||||
co.co_code,
|
||||
co.co_consts,
|
||||
co.co_names,
|
||||
co.co_varnames,
|
||||
co.co_filename,
|
||||
co.co_name,
|
||||
co.co_firstlineno,
|
||||
co.co_lnotab,
|
||||
# These two arguments are reversed:
|
||||
co.co_cellvars,
|
||||
co.co_freevars,
|
||||
]
|
||||
)
|
||||
set_first_freevar_code = types.CodeType(*args)
|
||||
|
||||
def set_closure_cell(cell, value):
|
||||
# Create a function using the set_first_freevar_code,
|
||||
# whose first closure cell is `cell`. Calling it will
|
||||
# change the value of that cell.
|
||||
setter = types.FunctionType(
|
||||
set_first_freevar_code, {}, "setter", (), (cell,)
|
||||
)
|
||||
# And call it to set the cell.
|
||||
setter(value)
|
||||
|
||||
# Make sure it works on this interpreter:
|
||||
def make_func_with_cell():
|
||||
x = None
|
||||
|
||||
def func():
|
||||
return x # pragma: no cover
|
||||
|
||||
return func
|
||||
|
||||
if PY2:
|
||||
cell = make_func_with_cell().func_closure[0]
|
||||
else:
|
||||
cell = make_func_with_cell().__closure__[0]
|
||||
set_closure_cell(cell, 100)
|
||||
if cell.cell_contents != 100:
|
||||
raise AssertionError # pragma: no cover
|
||||
|
||||
except Exception:
|
||||
return just_warn
|
||||
else:
|
||||
return set_closure_cell
|
||||
|
||||
|
||||
set_closure_cell = make_set_closure_cell()
|
||||
23
lib/spack/external/attr/_config.py
vendored
Normal file
23
lib/spack/external/attr/_config.py
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
__all__ = ["set_run_validators", "get_run_validators"]
|
||||
|
||||
_run_validators = True
|
||||
|
||||
|
||||
def set_run_validators(run):
|
||||
"""
|
||||
Set whether or not validators are run. By default, they are run.
|
||||
"""
|
||||
if not isinstance(run, bool):
|
||||
raise TypeError("'run' must be bool.")
|
||||
global _run_validators
|
||||
_run_validators = run
|
||||
|
||||
|
||||
def get_run_validators():
|
||||
"""
|
||||
Return whether or not validators are run.
|
||||
"""
|
||||
return _run_validators
|
||||
395
lib/spack/external/attr/_funcs.py
vendored
Normal file
395
lib/spack/external/attr/_funcs.py
vendored
Normal file
@@ -0,0 +1,395 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import copy
|
||||
|
||||
from ._compat import iteritems
|
||||
from ._make import NOTHING, _obj_setattr, fields
|
||||
from .exceptions import AttrsAttributeNotFoundError
|
||||
|
||||
|
||||
def asdict(
|
||||
inst,
|
||||
recurse=True,
|
||||
filter=None,
|
||||
dict_factory=dict,
|
||||
retain_collection_types=False,
|
||||
value_serializer=None,
|
||||
):
|
||||
"""
|
||||
Return the ``attrs`` attribute values of *inst* as a dict.
|
||||
|
||||
Optionally recurse into other ``attrs``-decorated classes.
|
||||
|
||||
:param inst: Instance of an ``attrs``-decorated class.
|
||||
:param bool recurse: Recurse into classes that are also
|
||||
``attrs``-decorated.
|
||||
:param callable filter: A callable whose return code determines whether an
|
||||
attribute or element is included (``True``) or dropped (``False``). Is
|
||||
called with the `attr.Attribute` as the first argument and the
|
||||
value as the second argument.
|
||||
:param callable dict_factory: A callable to produce dictionaries from. For
|
||||
example, to produce ordered dictionaries instead of normal Python
|
||||
dictionaries, pass in ``collections.OrderedDict``.
|
||||
:param bool retain_collection_types: Do not convert to ``list`` when
|
||||
encountering an attribute whose type is ``tuple`` or ``set``. Only
|
||||
meaningful if ``recurse`` is ``True``.
|
||||
:param Optional[callable] value_serializer: A hook that is called for every
|
||||
attribute or dict key/value. It receives the current instance, field
|
||||
and value and must return the (updated) value. The hook is run *after*
|
||||
the optional *filter* has been applied.
|
||||
|
||||
:rtype: return type of *dict_factory*
|
||||
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. versionadded:: 16.0.0 *dict_factory*
|
||||
.. versionadded:: 16.1.0 *retain_collection_types*
|
||||
.. versionadded:: 20.3.0 *value_serializer*
|
||||
"""
|
||||
attrs = fields(inst.__class__)
|
||||
rv = dict_factory()
|
||||
for a in attrs:
|
||||
v = getattr(inst, a.name)
|
||||
if filter is not None and not filter(a, v):
|
||||
continue
|
||||
|
||||
if value_serializer is not None:
|
||||
v = value_serializer(inst, a, v)
|
||||
|
||||
if recurse is True:
|
||||
if has(v.__class__):
|
||||
rv[a.name] = asdict(
|
||||
v,
|
||||
True,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
)
|
||||
elif isinstance(v, (tuple, list, set, frozenset)):
|
||||
cf = v.__class__ if retain_collection_types is True else list
|
||||
rv[a.name] = cf(
|
||||
[
|
||||
_asdict_anything(
|
||||
i,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
)
|
||||
for i in v
|
||||
]
|
||||
)
|
||||
elif isinstance(v, dict):
|
||||
df = dict_factory
|
||||
rv[a.name] = df(
|
||||
(
|
||||
_asdict_anything(
|
||||
kk,
|
||||
filter,
|
||||
df,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
),
|
||||
_asdict_anything(
|
||||
vv,
|
||||
filter,
|
||||
df,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
),
|
||||
)
|
||||
for kk, vv in iteritems(v)
|
||||
)
|
||||
else:
|
||||
rv[a.name] = v
|
||||
else:
|
||||
rv[a.name] = v
|
||||
return rv
|
||||
|
||||
|
||||
def _asdict_anything(
|
||||
val,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
):
|
||||
"""
|
||||
``asdict`` only works on attrs instances, this works on anything.
|
||||
"""
|
||||
if getattr(val.__class__, "__attrs_attrs__", None) is not None:
|
||||
# Attrs class.
|
||||
rv = asdict(
|
||||
val,
|
||||
True,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
)
|
||||
elif isinstance(val, (tuple, list, set, frozenset)):
|
||||
cf = val.__class__ if retain_collection_types is True else list
|
||||
rv = cf(
|
||||
[
|
||||
_asdict_anything(
|
||||
i,
|
||||
filter,
|
||||
dict_factory,
|
||||
retain_collection_types,
|
||||
value_serializer,
|
||||
)
|
||||
for i in val
|
||||
]
|
||||
)
|
||||
elif isinstance(val, dict):
|
||||
df = dict_factory
|
||||
rv = df(
|
||||
(
|
||||
_asdict_anything(
|
||||
kk, filter, df, retain_collection_types, value_serializer
|
||||
),
|
||||
_asdict_anything(
|
||||
vv, filter, df, retain_collection_types, value_serializer
|
||||
),
|
||||
)
|
||||
for kk, vv in iteritems(val)
|
||||
)
|
||||
else:
|
||||
rv = val
|
||||
if value_serializer is not None:
|
||||
rv = value_serializer(None, None, rv)
|
||||
|
||||
return rv
|
||||
|
||||
|
||||
def astuple(
|
||||
inst,
|
||||
recurse=True,
|
||||
filter=None,
|
||||
tuple_factory=tuple,
|
||||
retain_collection_types=False,
|
||||
):
|
||||
"""
|
||||
Return the ``attrs`` attribute values of *inst* as a tuple.
|
||||
|
||||
Optionally recurse into other ``attrs``-decorated classes.
|
||||
|
||||
:param inst: Instance of an ``attrs``-decorated class.
|
||||
:param bool recurse: Recurse into classes that are also
|
||||
``attrs``-decorated.
|
||||
:param callable filter: A callable whose return code determines whether an
|
||||
attribute or element is included (``True``) or dropped (``False``). Is
|
||||
called with the `attr.Attribute` as the first argument and the
|
||||
value as the second argument.
|
||||
:param callable tuple_factory: A callable to produce tuples from. For
|
||||
example, to produce lists instead of tuples.
|
||||
:param bool retain_collection_types: Do not convert to ``list``
|
||||
or ``dict`` when encountering an attribute which type is
|
||||
``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is
|
||||
``True``.
|
||||
|
||||
:rtype: return type of *tuple_factory*
|
||||
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. versionadded:: 16.2.0
|
||||
"""
|
||||
attrs = fields(inst.__class__)
|
||||
rv = []
|
||||
retain = retain_collection_types # Very long. :/
|
||||
for a in attrs:
|
||||
v = getattr(inst, a.name)
|
||||
if filter is not None and not filter(a, v):
|
||||
continue
|
||||
if recurse is True:
|
||||
if has(v.__class__):
|
||||
rv.append(
|
||||
astuple(
|
||||
v,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
)
|
||||
elif isinstance(v, (tuple, list, set, frozenset)):
|
||||
cf = v.__class__ if retain is True else list
|
||||
rv.append(
|
||||
cf(
|
||||
[
|
||||
astuple(
|
||||
j,
|
||||
recurse=True,
|
||||
filter=filter,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
if has(j.__class__)
|
||||
else j
|
||||
for j in v
|
||||
]
|
||||
)
|
||||
)
|
||||
elif isinstance(v, dict):
|
||||
df = v.__class__ if retain is True else dict
|
||||
rv.append(
|
||||
df(
|
||||
(
|
||||
astuple(
|
||||
kk,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
if has(kk.__class__)
|
||||
else kk,
|
||||
astuple(
|
||||
vv,
|
||||
tuple_factory=tuple_factory,
|
||||
retain_collection_types=retain,
|
||||
)
|
||||
if has(vv.__class__)
|
||||
else vv,
|
||||
)
|
||||
for kk, vv in iteritems(v)
|
||||
)
|
||||
)
|
||||
else:
|
||||
rv.append(v)
|
||||
else:
|
||||
rv.append(v)
|
||||
|
||||
return rv if tuple_factory is list else tuple_factory(rv)
|
||||
|
||||
|
||||
def has(cls):
|
||||
"""
|
||||
Check whether *cls* is a class with ``attrs`` attributes.
|
||||
|
||||
:param type cls: Class to introspect.
|
||||
:raise TypeError: If *cls* is not a class.
|
||||
|
||||
:rtype: bool
|
||||
"""
|
||||
return getattr(cls, "__attrs_attrs__", None) is not None
|
||||
|
||||
|
||||
def assoc(inst, **changes):
|
||||
"""
|
||||
Copy *inst* and apply *changes*.
|
||||
|
||||
:param inst: Instance of a class with ``attrs`` attributes.
|
||||
:param changes: Keyword changes in the new copy.
|
||||
|
||||
:return: A copy of inst with *changes* incorporated.
|
||||
|
||||
:raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't
|
||||
be found on *cls*.
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. deprecated:: 17.1.0
|
||||
Use `evolve` instead.
|
||||
"""
|
||||
import warnings
|
||||
|
||||
warnings.warn(
|
||||
"assoc is deprecated and will be removed after 2018/01.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
new = copy.copy(inst)
|
||||
attrs = fields(inst.__class__)
|
||||
for k, v in iteritems(changes):
|
||||
a = getattr(attrs, k, NOTHING)
|
||||
if a is NOTHING:
|
||||
raise AttrsAttributeNotFoundError(
|
||||
"{k} is not an attrs attribute on {cl}.".format(
|
||||
k=k, cl=new.__class__
|
||||
)
|
||||
)
|
||||
_obj_setattr(new, k, v)
|
||||
return new
|
||||
|
||||
|
||||
def evolve(inst, **changes):
|
||||
"""
|
||||
Create a new instance, based on *inst* with *changes* applied.
|
||||
|
||||
:param inst: Instance of a class with ``attrs`` attributes.
|
||||
:param changes: Keyword changes in the new copy.
|
||||
|
||||
:return: A copy of inst with *changes* incorporated.
|
||||
|
||||
:raise TypeError: If *attr_name* couldn't be found in the class
|
||||
``__init__``.
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
cls = inst.__class__
|
||||
attrs = fields(cls)
|
||||
for a in attrs:
|
||||
if not a.init:
|
||||
continue
|
||||
attr_name = a.name # To deal with private attributes.
|
||||
init_name = attr_name if attr_name[0] != "_" else attr_name[1:]
|
||||
if init_name not in changes:
|
||||
changes[init_name] = getattr(inst, attr_name)
|
||||
|
||||
return cls(**changes)
|
||||
|
||||
|
||||
def resolve_types(cls, globalns=None, localns=None, attribs=None):
|
||||
"""
|
||||
Resolve any strings and forward annotations in type annotations.
|
||||
|
||||
This is only required if you need concrete types in `Attribute`'s *type*
|
||||
field. In other words, you don't need to resolve your types if you only
|
||||
use them for static type checking.
|
||||
|
||||
With no arguments, names will be looked up in the module in which the class
|
||||
was created. If this is not what you want, e.g. if the name only exists
|
||||
inside a method, you may pass *globalns* or *localns* to specify other
|
||||
dictionaries in which to look up these names. See the docs of
|
||||
`typing.get_type_hints` for more details.
|
||||
|
||||
:param type cls: Class to resolve.
|
||||
:param Optional[dict] globalns: Dictionary containing global variables.
|
||||
:param Optional[dict] localns: Dictionary containing local variables.
|
||||
:param Optional[list] attribs: List of attribs for the given class.
|
||||
This is necessary when calling from inside a ``field_transformer``
|
||||
since *cls* is not an ``attrs`` class yet.
|
||||
|
||||
:raise TypeError: If *cls* is not a class.
|
||||
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
|
||||
class and you didn't pass any attribs.
|
||||
:raise NameError: If types cannot be resolved because of missing variables.
|
||||
|
||||
:returns: *cls* so you can use this function also as a class decorator.
|
||||
Please note that you have to apply it **after** `attr.s`. That means
|
||||
the decorator has to come in the line **before** `attr.s`.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
.. versionadded:: 21.1.0 *attribs*
|
||||
|
||||
"""
|
||||
try:
|
||||
# Since calling get_type_hints is expensive we cache whether we've
|
||||
# done it already.
|
||||
cls.__attrs_types_resolved__
|
||||
except AttributeError:
|
||||
import typing
|
||||
|
||||
hints = typing.get_type_hints(cls, globalns=globalns, localns=localns)
|
||||
for field in fields(cls) if attribs is None else attribs:
|
||||
if field.name in hints:
|
||||
# Since fields have been frozen we must work around it.
|
||||
_obj_setattr(field, "type", hints[field.name])
|
||||
cls.__attrs_types_resolved__ = True
|
||||
|
||||
# Return the class so you can use it as a decorator too.
|
||||
return cls
|
||||
3052
lib/spack/external/attr/_make.py
vendored
Normal file
3052
lib/spack/external/attr/_make.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
158
lib/spack/external/attr/_next_gen.py
vendored
Normal file
158
lib/spack/external/attr/_next_gen.py
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
"""
|
||||
These are Python 3.6+-only and keyword-only APIs that call `attr.s` and
|
||||
`attr.ib` with different default values.
|
||||
"""
|
||||
|
||||
from functools import partial
|
||||
|
||||
from attr.exceptions import UnannotatedAttributeError
|
||||
|
||||
from . import setters
|
||||
from ._make import NOTHING, _frozen_setattrs, attrib, attrs
|
||||
|
||||
|
||||
def define(
|
||||
maybe_cls=None,
|
||||
*,
|
||||
these=None,
|
||||
repr=None,
|
||||
hash=None,
|
||||
init=None,
|
||||
slots=True,
|
||||
frozen=False,
|
||||
weakref_slot=True,
|
||||
str=False,
|
||||
auto_attribs=None,
|
||||
kw_only=False,
|
||||
cache_hash=False,
|
||||
auto_exc=True,
|
||||
eq=None,
|
||||
order=False,
|
||||
auto_detect=True,
|
||||
getstate_setstate=None,
|
||||
on_setattr=None,
|
||||
field_transformer=None,
|
||||
):
|
||||
r"""
|
||||
The only behavioral differences are the handling of the *auto_attribs*
|
||||
option:
|
||||
|
||||
:param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves
|
||||
exactly like `attr.s`. If left `None`, `attr.s` will try to guess:
|
||||
|
||||
1. If any attributes are annotated and no unannotated `attr.ib`\ s
|
||||
are found, it assumes *auto_attribs=True*.
|
||||
2. Otherwise it assumes *auto_attribs=False* and tries to collect
|
||||
`attr.ib`\ s.
|
||||
|
||||
and that mutable classes (``frozen=False``) validate on ``__setattr__``.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
|
||||
def do_it(cls, auto_attribs):
|
||||
return attrs(
|
||||
maybe_cls=cls,
|
||||
these=these,
|
||||
repr=repr,
|
||||
hash=hash,
|
||||
init=init,
|
||||
slots=slots,
|
||||
frozen=frozen,
|
||||
weakref_slot=weakref_slot,
|
||||
str=str,
|
||||
auto_attribs=auto_attribs,
|
||||
kw_only=kw_only,
|
||||
cache_hash=cache_hash,
|
||||
auto_exc=auto_exc,
|
||||
eq=eq,
|
||||
order=order,
|
||||
auto_detect=auto_detect,
|
||||
collect_by_mro=True,
|
||||
getstate_setstate=getstate_setstate,
|
||||
on_setattr=on_setattr,
|
||||
field_transformer=field_transformer,
|
||||
)
|
||||
|
||||
def wrap(cls):
|
||||
"""
|
||||
Making this a wrapper ensures this code runs during class creation.
|
||||
|
||||
We also ensure that frozen-ness of classes is inherited.
|
||||
"""
|
||||
nonlocal frozen, on_setattr
|
||||
|
||||
had_on_setattr = on_setattr not in (None, setters.NO_OP)
|
||||
|
||||
# By default, mutable classes validate on setattr.
|
||||
if frozen is False and on_setattr is None:
|
||||
on_setattr = setters.validate
|
||||
|
||||
# However, if we subclass a frozen class, we inherit the immutability
|
||||
# and disable on_setattr.
|
||||
for base_cls in cls.__bases__:
|
||||
if base_cls.__setattr__ is _frozen_setattrs:
|
||||
if had_on_setattr:
|
||||
raise ValueError(
|
||||
"Frozen classes can't use on_setattr "
|
||||
"(frozen-ness was inherited)."
|
||||
)
|
||||
|
||||
on_setattr = setters.NO_OP
|
||||
break
|
||||
|
||||
if auto_attribs is not None:
|
||||
return do_it(cls, auto_attribs)
|
||||
|
||||
try:
|
||||
return do_it(cls, True)
|
||||
except UnannotatedAttributeError:
|
||||
return do_it(cls, False)
|
||||
|
||||
# maybe_cls's type depends on the usage of the decorator. It's a class
|
||||
# if it's used as `@attrs` but ``None`` if used as `@attrs()`.
|
||||
if maybe_cls is None:
|
||||
return wrap
|
||||
else:
|
||||
return wrap(maybe_cls)
|
||||
|
||||
|
||||
mutable = define
|
||||
frozen = partial(define, frozen=True, on_setattr=None)
|
||||
|
||||
|
||||
def field(
|
||||
*,
|
||||
default=NOTHING,
|
||||
validator=None,
|
||||
repr=True,
|
||||
hash=None,
|
||||
init=True,
|
||||
metadata=None,
|
||||
converter=None,
|
||||
factory=None,
|
||||
kw_only=False,
|
||||
eq=None,
|
||||
order=None,
|
||||
on_setattr=None,
|
||||
):
|
||||
"""
|
||||
Identical to `attr.ib`, except keyword-only and with some arguments
|
||||
removed.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
return attrib(
|
||||
default=default,
|
||||
validator=validator,
|
||||
repr=repr,
|
||||
hash=hash,
|
||||
init=init,
|
||||
metadata=metadata,
|
||||
converter=converter,
|
||||
factory=factory,
|
||||
kw_only=kw_only,
|
||||
eq=eq,
|
||||
order=order,
|
||||
on_setattr=on_setattr,
|
||||
)
|
||||
85
lib/spack/external/attr/_version_info.py
vendored
Normal file
85
lib/spack/external/attr/_version_info.py
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from functools import total_ordering
|
||||
|
||||
from ._funcs import astuple
|
||||
from ._make import attrib, attrs
|
||||
|
||||
|
||||
@total_ordering
|
||||
@attrs(eq=False, order=False, slots=True, frozen=True)
|
||||
class VersionInfo(object):
|
||||
"""
|
||||
A version object that can be compared to tuple of length 1--4:
|
||||
|
||||
>>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
|
||||
True
|
||||
>>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
|
||||
True
|
||||
>>> vi = attr.VersionInfo(19, 2, 0, "final")
|
||||
>>> vi < (19, 1, 1)
|
||||
False
|
||||
>>> vi < (19,)
|
||||
False
|
||||
>>> vi == (19, 2,)
|
||||
True
|
||||
>>> vi == (19, 2, 1)
|
||||
False
|
||||
|
||||
.. versionadded:: 19.2
|
||||
"""
|
||||
|
||||
year = attrib(type=int)
|
||||
minor = attrib(type=int)
|
||||
micro = attrib(type=int)
|
||||
releaselevel = attrib(type=str)
|
||||
|
||||
@classmethod
|
||||
def _from_version_string(cls, s):
|
||||
"""
|
||||
Parse *s* and return a _VersionInfo.
|
||||
"""
|
||||
v = s.split(".")
|
||||
if len(v) == 3:
|
||||
v.append("final")
|
||||
|
||||
return cls(
|
||||
year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
|
||||
)
|
||||
|
||||
def _ensure_tuple(self, other):
|
||||
"""
|
||||
Ensure *other* is a tuple of a valid length.
|
||||
|
||||
Returns a possibly transformed *other* and ourselves as a tuple of
|
||||
the same length as *other*.
|
||||
"""
|
||||
|
||||
if self.__class__ is other.__class__:
|
||||
other = astuple(other)
|
||||
|
||||
if not isinstance(other, tuple):
|
||||
raise NotImplementedError
|
||||
|
||||
if not (1 <= len(other) <= 4):
|
||||
raise NotImplementedError
|
||||
|
||||
return astuple(self)[: len(other)], other
|
||||
|
||||
def __eq__(self, other):
|
||||
try:
|
||||
us, them = self._ensure_tuple(other)
|
||||
except NotImplementedError:
|
||||
return NotImplemented
|
||||
|
||||
return us == them
|
||||
|
||||
def __lt__(self, other):
|
||||
try:
|
||||
us, them = self._ensure_tuple(other)
|
||||
except NotImplementedError:
|
||||
return NotImplemented
|
||||
|
||||
# Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
|
||||
# have to do anything special with releaselevel for now.
|
||||
return us < them
|
||||
111
lib/spack/external/attr/converters.py
vendored
Normal file
111
lib/spack/external/attr/converters.py
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
"""
|
||||
Commonly useful converters.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ._compat import PY2
|
||||
from ._make import NOTHING, Factory, pipe
|
||||
|
||||
|
||||
if not PY2:
|
||||
import inspect
|
||||
import typing
|
||||
|
||||
|
||||
__all__ = [
|
||||
"pipe",
|
||||
"optional",
|
||||
"default_if_none",
|
||||
]
|
||||
|
||||
|
||||
def optional(converter):
|
||||
"""
|
||||
A converter that allows an attribute to be optional. An optional attribute
|
||||
is one which can be set to ``None``.
|
||||
|
||||
Type annotations will be inferred from the wrapped converter's, if it
|
||||
has any.
|
||||
|
||||
:param callable converter: the converter that is used for non-``None``
|
||||
values.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
|
||||
def optional_converter(val):
|
||||
if val is None:
|
||||
return None
|
||||
return converter(val)
|
||||
|
||||
if not PY2:
|
||||
sig = None
|
||||
try:
|
||||
sig = inspect.signature(converter)
|
||||
except (ValueError, TypeError): # inspect failed
|
||||
pass
|
||||
if sig:
|
||||
params = list(sig.parameters.values())
|
||||
if params and params[0].annotation is not inspect.Parameter.empty:
|
||||
optional_converter.__annotations__["val"] = typing.Optional[
|
||||
params[0].annotation
|
||||
]
|
||||
if sig.return_annotation is not inspect.Signature.empty:
|
||||
optional_converter.__annotations__["return"] = typing.Optional[
|
||||
sig.return_annotation
|
||||
]
|
||||
|
||||
return optional_converter
|
||||
|
||||
|
||||
def default_if_none(default=NOTHING, factory=None):
|
||||
"""
|
||||
A converter that allows to replace ``None`` values by *default* or the
|
||||
result of *factory*.
|
||||
|
||||
:param default: Value to be used if ``None`` is passed. Passing an instance
|
||||
of `attr.Factory` is supported, however the ``takes_self`` option
|
||||
is *not*.
|
||||
:param callable factory: A callable that takes no parameters whose result
|
||||
is used if ``None`` is passed.
|
||||
|
||||
:raises TypeError: If **neither** *default* or *factory* is passed.
|
||||
:raises TypeError: If **both** *default* and *factory* are passed.
|
||||
:raises ValueError: If an instance of `attr.Factory` is passed with
|
||||
``takes_self=True``.
|
||||
|
||||
.. versionadded:: 18.2.0
|
||||
"""
|
||||
if default is NOTHING and factory is None:
|
||||
raise TypeError("Must pass either `default` or `factory`.")
|
||||
|
||||
if default is not NOTHING and factory is not None:
|
||||
raise TypeError(
|
||||
"Must pass either `default` or `factory` but not both."
|
||||
)
|
||||
|
||||
if factory is not None:
|
||||
default = Factory(factory)
|
||||
|
||||
if isinstance(default, Factory):
|
||||
if default.takes_self:
|
||||
raise ValueError(
|
||||
"`takes_self` is not supported by default_if_none."
|
||||
)
|
||||
|
||||
def default_if_none_converter(val):
|
||||
if val is not None:
|
||||
return val
|
||||
|
||||
return default.factory()
|
||||
|
||||
else:
|
||||
|
||||
def default_if_none_converter(val):
|
||||
if val is not None:
|
||||
return val
|
||||
|
||||
return default
|
||||
|
||||
return default_if_none_converter
|
||||
92
lib/spack/external/attr/exceptions.py
vendored
Normal file
92
lib/spack/external/attr/exceptions.py
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
class FrozenError(AttributeError):
|
||||
"""
|
||||
A frozen/immutable instance or attribute have been attempted to be
|
||||
modified.
|
||||
|
||||
It mirrors the behavior of ``namedtuples`` by using the same error message
|
||||
and subclassing `AttributeError`.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
|
||||
msg = "can't set attribute"
|
||||
args = [msg]
|
||||
|
||||
|
||||
class FrozenInstanceError(FrozenError):
|
||||
"""
|
||||
A frozen instance has been attempted to be modified.
|
||||
|
||||
.. versionadded:: 16.1.0
|
||||
"""
|
||||
|
||||
|
||||
class FrozenAttributeError(FrozenError):
|
||||
"""
|
||||
A frozen attribute has been attempted to be modified.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
|
||||
|
||||
class AttrsAttributeNotFoundError(ValueError):
|
||||
"""
|
||||
An ``attrs`` function couldn't find an attribute that the user asked for.
|
||||
|
||||
.. versionadded:: 16.2.0
|
||||
"""
|
||||
|
||||
|
||||
class NotAnAttrsClassError(ValueError):
|
||||
"""
|
||||
A non-``attrs`` class has been passed into an ``attrs`` function.
|
||||
|
||||
.. versionadded:: 16.2.0
|
||||
"""
|
||||
|
||||
|
||||
class DefaultAlreadySetError(RuntimeError):
|
||||
"""
|
||||
A default has been set using ``attr.ib()`` and is attempted to be reset
|
||||
using the decorator.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
|
||||
|
||||
class UnannotatedAttributeError(RuntimeError):
|
||||
"""
|
||||
A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type
|
||||
annotation.
|
||||
|
||||
.. versionadded:: 17.3.0
|
||||
"""
|
||||
|
||||
|
||||
class PythonTooOldError(RuntimeError):
|
||||
"""
|
||||
It was attempted to use an ``attrs`` feature that requires a newer Python
|
||||
version.
|
||||
|
||||
.. versionadded:: 18.2.0
|
||||
"""
|
||||
|
||||
|
||||
class NotCallableError(TypeError):
|
||||
"""
|
||||
A ``attr.ib()`` requiring a callable has been set with a value
|
||||
that is not callable.
|
||||
|
||||
.. versionadded:: 19.2.0
|
||||
"""
|
||||
|
||||
def __init__(self, msg, value):
|
||||
super(TypeError, self).__init__(msg, value)
|
||||
self.msg = msg
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return str(self.msg)
|
||||
52
lib/spack/external/attr/filters.py
vendored
Normal file
52
lib/spack/external/attr/filters.py
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
"""
|
||||
Commonly useful filters for `attr.asdict`.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ._compat import isclass
|
||||
from ._make import Attribute
|
||||
|
||||
|
||||
def _split_what(what):
|
||||
"""
|
||||
Returns a tuple of `frozenset`s of classes and attributes.
|
||||
"""
|
||||
return (
|
||||
frozenset(cls for cls in what if isclass(cls)),
|
||||
frozenset(cls for cls in what if isinstance(cls, Attribute)),
|
||||
)
|
||||
|
||||
|
||||
def include(*what):
|
||||
"""
|
||||
Whitelist *what*.
|
||||
|
||||
:param what: What to whitelist.
|
||||
:type what: `list` of `type` or `attr.Attribute`\\ s
|
||||
|
||||
:rtype: `callable`
|
||||
"""
|
||||
cls, attrs = _split_what(what)
|
||||
|
||||
def include_(attribute, value):
|
||||
return value.__class__ in cls or attribute in attrs
|
||||
|
||||
return include_
|
||||
|
||||
|
||||
def exclude(*what):
|
||||
"""
|
||||
Blacklist *what*.
|
||||
|
||||
:param what: What to blacklist.
|
||||
:type what: `list` of classes or `attr.Attribute`\\ s.
|
||||
|
||||
:rtype: `callable`
|
||||
"""
|
||||
cls, attrs = _split_what(what)
|
||||
|
||||
def exclude_(attribute, value):
|
||||
return value.__class__ not in cls and attribute not in attrs
|
||||
|
||||
return exclude_
|
||||
77
lib/spack/external/attr/setters.py
vendored
Normal file
77
lib/spack/external/attr/setters.py
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
"""
|
||||
Commonly used hooks for on_setattr.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from . import _config
|
||||
from .exceptions import FrozenAttributeError
|
||||
|
||||
|
||||
def pipe(*setters):
|
||||
"""
|
||||
Run all *setters* and return the return value of the last one.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
|
||||
def wrapped_pipe(instance, attrib, new_value):
|
||||
rv = new_value
|
||||
|
||||
for setter in setters:
|
||||
rv = setter(instance, attrib, rv)
|
||||
|
||||
return rv
|
||||
|
||||
return wrapped_pipe
|
||||
|
||||
|
||||
def frozen(_, __, ___):
|
||||
"""
|
||||
Prevent an attribute to be modified.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
raise FrozenAttributeError()
|
||||
|
||||
|
||||
def validate(instance, attrib, new_value):
|
||||
"""
|
||||
Run *attrib*'s validator on *new_value* if it has one.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
if _config._run_validators is False:
|
||||
return new_value
|
||||
|
||||
v = attrib.validator
|
||||
if not v:
|
||||
return new_value
|
||||
|
||||
v(instance, attrib, new_value)
|
||||
|
||||
return new_value
|
||||
|
||||
|
||||
def convert(instance, attrib, new_value):
|
||||
"""
|
||||
Run *attrib*'s converter -- if it has one -- on *new_value* and return the
|
||||
result.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
c = attrib.converter
|
||||
if c:
|
||||
return c(new_value)
|
||||
|
||||
return new_value
|
||||
|
||||
|
||||
NO_OP = object()
|
||||
"""
|
||||
Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
|
||||
|
||||
Does not work in `pipe` or within lists.
|
||||
|
||||
.. versionadded:: 20.1.0
|
||||
"""
|
||||
379
lib/spack/external/attr/validators.py
vendored
Normal file
379
lib/spack/external/attr/validators.py
vendored
Normal file
@@ -0,0 +1,379 @@
|
||||
"""
|
||||
Commonly useful validators.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import re
|
||||
|
||||
from ._make import _AndValidator, and_, attrib, attrs
|
||||
from .exceptions import NotCallableError
|
||||
|
||||
|
||||
__all__ = [
|
||||
"and_",
|
||||
"deep_iterable",
|
||||
"deep_mapping",
|
||||
"in_",
|
||||
"instance_of",
|
||||
"is_callable",
|
||||
"matches_re",
|
||||
"optional",
|
||||
"provides",
|
||||
]
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _InstanceOfValidator(object):
|
||||
type = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not isinstance(value, self.type):
|
||||
raise TypeError(
|
||||
"'{name}' must be {type!r} (got {value!r} that is a "
|
||||
"{actual!r}).".format(
|
||||
name=attr.name,
|
||||
type=self.type,
|
||||
actual=value.__class__,
|
||||
value=value,
|
||||
),
|
||||
attr,
|
||||
self.type,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<instance_of validator for type {type!r}>".format(
|
||||
type=self.type
|
||||
)
|
||||
|
||||
|
||||
def instance_of(type):
|
||||
"""
|
||||
A validator that raises a `TypeError` if the initializer is called
|
||||
with a wrong type for this particular attribute (checks are performed using
|
||||
`isinstance` therefore it's also valid to pass a tuple of types).
|
||||
|
||||
:param type: The type to check for.
|
||||
:type type: type or tuple of types
|
||||
|
||||
:raises TypeError: With a human readable error message, the attribute
|
||||
(of type `attr.Attribute`), the expected type, and the value it
|
||||
got.
|
||||
"""
|
||||
return _InstanceOfValidator(type)
|
||||
|
||||
|
||||
@attrs(repr=False, frozen=True, slots=True)
|
||||
class _MatchesReValidator(object):
|
||||
regex = attrib()
|
||||
flags = attrib()
|
||||
match_func = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not self.match_func(value):
|
||||
raise ValueError(
|
||||
"'{name}' must match regex {regex!r}"
|
||||
" ({value!r} doesn't)".format(
|
||||
name=attr.name, regex=self.regex.pattern, value=value
|
||||
),
|
||||
attr,
|
||||
self.regex,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<matches_re validator for pattern {regex!r}>".format(
|
||||
regex=self.regex
|
||||
)
|
||||
|
||||
|
||||
def matches_re(regex, flags=0, func=None):
|
||||
r"""
|
||||
A validator that raises `ValueError` if the initializer is called
|
||||
with a string that doesn't match *regex*.
|
||||
|
||||
:param str regex: a regex string to match against
|
||||
:param int flags: flags that will be passed to the underlying re function
|
||||
(default 0)
|
||||
:param callable func: which underlying `re` function to call (options
|
||||
are `re.fullmatch`, `re.search`, `re.match`, default
|
||||
is ``None`` which means either `re.fullmatch` or an emulation of
|
||||
it on Python 2). For performance reasons, they won't be used directly
|
||||
but on a pre-`re.compile`\ ed pattern.
|
||||
|
||||
.. versionadded:: 19.2.0
|
||||
"""
|
||||
fullmatch = getattr(re, "fullmatch", None)
|
||||
valid_funcs = (fullmatch, None, re.search, re.match)
|
||||
if func not in valid_funcs:
|
||||
raise ValueError(
|
||||
"'func' must be one of %s."
|
||||
% (
|
||||
", ".join(
|
||||
sorted(
|
||||
e and e.__name__ or "None" for e in set(valid_funcs)
|
||||
)
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
pattern = re.compile(regex, flags)
|
||||
if func is re.match:
|
||||
match_func = pattern.match
|
||||
elif func is re.search:
|
||||
match_func = pattern.search
|
||||
else:
|
||||
if fullmatch:
|
||||
match_func = pattern.fullmatch
|
||||
else:
|
||||
pattern = re.compile(r"(?:{})\Z".format(regex), flags)
|
||||
match_func = pattern.match
|
||||
|
||||
return _MatchesReValidator(pattern, flags, match_func)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _ProvidesValidator(object):
|
||||
interface = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not self.interface.providedBy(value):
|
||||
raise TypeError(
|
||||
"'{name}' must provide {interface!r} which {value!r} "
|
||||
"doesn't.".format(
|
||||
name=attr.name, interface=self.interface, value=value
|
||||
),
|
||||
attr,
|
||||
self.interface,
|
||||
value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<provides validator for interface {interface!r}>".format(
|
||||
interface=self.interface
|
||||
)
|
||||
|
||||
|
||||
def provides(interface):
|
||||
"""
|
||||
A validator that raises a `TypeError` if the initializer is called
|
||||
with an object that does not provide the requested *interface* (checks are
|
||||
performed using ``interface.providedBy(value)`` (see `zope.interface
|
||||
<https://zopeinterface.readthedocs.io/en/latest/>`_).
|
||||
|
||||
:param interface: The interface to check for.
|
||||
:type interface: ``zope.interface.Interface``
|
||||
|
||||
:raises TypeError: With a human readable error message, the attribute
|
||||
(of type `attr.Attribute`), the expected interface, and the
|
||||
value it got.
|
||||
"""
|
||||
return _ProvidesValidator(interface)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _OptionalValidator(object):
|
||||
validator = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
if value is None:
|
||||
return
|
||||
|
||||
self.validator(inst, attr, value)
|
||||
|
||||
def __repr__(self):
|
||||
return "<optional validator for {what} or None>".format(
|
||||
what=repr(self.validator)
|
||||
)
|
||||
|
||||
|
||||
def optional(validator):
|
||||
"""
|
||||
A validator that makes an attribute optional. An optional attribute is one
|
||||
which can be set to ``None`` in addition to satisfying the requirements of
|
||||
the sub-validator.
|
||||
|
||||
:param validator: A validator (or a list of validators) that is used for
|
||||
non-``None`` values.
|
||||
:type validator: callable or `list` of callables.
|
||||
|
||||
.. versionadded:: 15.1.0
|
||||
.. versionchanged:: 17.1.0 *validator* can be a list of validators.
|
||||
"""
|
||||
if isinstance(validator, list):
|
||||
return _OptionalValidator(_AndValidator(validator))
|
||||
return _OptionalValidator(validator)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _InValidator(object):
|
||||
options = attrib()
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
try:
|
||||
in_options = value in self.options
|
||||
except TypeError: # e.g. `1 in "abc"`
|
||||
in_options = False
|
||||
|
||||
if not in_options:
|
||||
raise ValueError(
|
||||
"'{name}' must be in {options!r} (got {value!r})".format(
|
||||
name=attr.name, options=self.options, value=value
|
||||
)
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<in_ validator with options {options!r}>".format(
|
||||
options=self.options
|
||||
)
|
||||
|
||||
|
||||
def in_(options):
|
||||
"""
|
||||
A validator that raises a `ValueError` if the initializer is called
|
||||
with a value that does not belong in the options provided. The check is
|
||||
performed using ``value in options``.
|
||||
|
||||
:param options: Allowed options.
|
||||
:type options: list, tuple, `enum.Enum`, ...
|
||||
|
||||
:raises ValueError: With a human readable error message, the attribute (of
|
||||
type `attr.Attribute`), the expected options, and the value it
|
||||
got.
|
||||
|
||||
.. versionadded:: 17.1.0
|
||||
"""
|
||||
return _InValidator(options)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=False, hash=True)
|
||||
class _IsCallableValidator(object):
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if not callable(value):
|
||||
message = (
|
||||
"'{name}' must be callable "
|
||||
"(got {value!r} that is a {actual!r})."
|
||||
)
|
||||
raise NotCallableError(
|
||||
msg=message.format(
|
||||
name=attr.name, value=value, actual=value.__class__
|
||||
),
|
||||
value=value,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<is_callable validator>"
|
||||
|
||||
|
||||
def is_callable():
|
||||
"""
|
||||
A validator that raises a `attr.exceptions.NotCallableError` if the
|
||||
initializer is called with a value for this particular attribute
|
||||
that is not callable.
|
||||
|
||||
.. versionadded:: 19.1.0
|
||||
|
||||
:raises `attr.exceptions.NotCallableError`: With a human readable error
|
||||
message containing the attribute (`attr.Attribute`) name,
|
||||
and the value it got.
|
||||
"""
|
||||
return _IsCallableValidator()
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _DeepIterable(object):
|
||||
member_validator = attrib(validator=is_callable())
|
||||
iterable_validator = attrib(
|
||||
default=None, validator=optional(is_callable())
|
||||
)
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if self.iterable_validator is not None:
|
||||
self.iterable_validator(inst, attr, value)
|
||||
|
||||
for member in value:
|
||||
self.member_validator(inst, attr, member)
|
||||
|
||||
def __repr__(self):
|
||||
iterable_identifier = (
|
||||
""
|
||||
if self.iterable_validator is None
|
||||
else " {iterable!r}".format(iterable=self.iterable_validator)
|
||||
)
|
||||
return (
|
||||
"<deep_iterable validator for{iterable_identifier}"
|
||||
" iterables of {member!r}>"
|
||||
).format(
|
||||
iterable_identifier=iterable_identifier,
|
||||
member=self.member_validator,
|
||||
)
|
||||
|
||||
|
||||
def deep_iterable(member_validator, iterable_validator=None):
|
||||
"""
|
||||
A validator that performs deep validation of an iterable.
|
||||
|
||||
:param member_validator: Validator to apply to iterable members
|
||||
:param iterable_validator: Validator to apply to iterable itself
|
||||
(optional)
|
||||
|
||||
.. versionadded:: 19.1.0
|
||||
|
||||
:raises TypeError: if any sub-validators fail
|
||||
"""
|
||||
return _DeepIterable(member_validator, iterable_validator)
|
||||
|
||||
|
||||
@attrs(repr=False, slots=True, hash=True)
|
||||
class _DeepMapping(object):
|
||||
key_validator = attrib(validator=is_callable())
|
||||
value_validator = attrib(validator=is_callable())
|
||||
mapping_validator = attrib(default=None, validator=optional(is_callable()))
|
||||
|
||||
def __call__(self, inst, attr, value):
|
||||
"""
|
||||
We use a callable class to be able to change the ``__repr__``.
|
||||
"""
|
||||
if self.mapping_validator is not None:
|
||||
self.mapping_validator(inst, attr, value)
|
||||
|
||||
for key in value:
|
||||
self.key_validator(inst, attr, key)
|
||||
self.value_validator(inst, attr, value[key])
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<deep_mapping validator for objects mapping {key!r} to {value!r}>"
|
||||
).format(key=self.key_validator, value=self.value_validator)
|
||||
|
||||
|
||||
def deep_mapping(key_validator, value_validator, mapping_validator=None):
|
||||
"""
|
||||
A validator that performs deep validation of a dictionary.
|
||||
|
||||
:param key_validator: Validator to apply to dictionary keys
|
||||
:param value_validator: Validator to apply to dictionary values
|
||||
:param mapping_validator: Validator to apply to top-level mapping
|
||||
attribute (optional)
|
||||
|
||||
.. versionadded:: 19.1.0
|
||||
|
||||
:raises TypeError: if any sub-validators fail
|
||||
"""
|
||||
return _DeepMapping(key_validator, value_validator, mapping_validator)
|
||||
104
lib/spack/external/jsonschema/README.rst
vendored
104
lib/spack/external/jsonschema/README.rst
vendored
@@ -1,104 +0,0 @@
|
||||
==========
|
||||
jsonschema
|
||||
==========
|
||||
|
||||
``jsonschema`` is an implementation of `JSON Schema <http://json-schema.org>`_
|
||||
for Python (supporting 2.6+ including Python 3).
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
>>> from jsonschema import validate
|
||||
|
||||
>>> # A sample schema, like what we'd get from json.load()
|
||||
>>> schema = {
|
||||
... "type" : "object",
|
||||
... "properties" : {
|
||||
... "price" : {"type" : "number"},
|
||||
... "name" : {"type" : "string"},
|
||||
... },
|
||||
... }
|
||||
|
||||
>>> # If no exception is raised by validate(), the instance is valid.
|
||||
>>> validate({"name" : "Eggs", "price" : 34.99}, schema)
|
||||
|
||||
>>> validate(
|
||||
... {"name" : "Eggs", "price" : "Invalid"}, schema
|
||||
... ) # doctest: +IGNORE_EXCEPTION_DETAIL
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ValidationError: 'Invalid' is not of type 'number'
|
||||
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
* Full support for
|
||||
`Draft 3 <https://python-jsonschema.readthedocs.org/en/latest/validate/#jsonschema.Draft3Validator>`_
|
||||
**and** `Draft 4 <https://python-jsonschema.readthedocs.org/en/latest/validate/#jsonschema.Draft4Validator>`_
|
||||
of the schema.
|
||||
|
||||
* `Lazy validation <https://python-jsonschema.readthedocs.org/en/latest/validate/#jsonschema.IValidator.iter_errors>`_
|
||||
that can iteratively report *all* validation errors.
|
||||
|
||||
* Small and extensible
|
||||
|
||||
* `Programmatic querying <https://python-jsonschema.readthedocs.org/en/latest/errors/#module-jsonschema>`_
|
||||
of which properties or items failed validation.
|
||||
|
||||
|
||||
Release Notes
|
||||
-------------
|
||||
|
||||
* A simple CLI was added for validation
|
||||
* Validation errors now keep full absolute paths and absolute schema paths in
|
||||
their ``absolute_path`` and ``absolute_schema_path`` attributes. The ``path``
|
||||
and ``schema_path`` attributes are deprecated in favor of ``relative_path``
|
||||
and ``relative_schema_path``\ .
|
||||
|
||||
*Note:* Support for Python 3.2 was dropped in this release, and installation
|
||||
now uses setuptools.
|
||||
|
||||
|
||||
Running the Test Suite
|
||||
----------------------
|
||||
|
||||
``jsonschema`` uses the wonderful `Tox <http://tox.readthedocs.org>`_ for its
|
||||
test suite. (It really is wonderful, if for some reason you haven't heard of
|
||||
it, you really should use it for your projects).
|
||||
|
||||
Assuming you have ``tox`` installed (perhaps via ``pip install tox`` or your
|
||||
package manager), just run ``tox`` in the directory of your source checkout to
|
||||
run ``jsonschema``'s test suite on all of the versions of Python ``jsonschema``
|
||||
supports. Note that you'll need to have all of those versions installed in
|
||||
order to run the tests on each of them, otherwise ``tox`` will skip (and fail)
|
||||
the tests on that version.
|
||||
|
||||
Of course you're also free to just run the tests on a single version with your
|
||||
favorite test runner. The tests live in the ``jsonschema.tests`` package.
|
||||
|
||||
|
||||
Community
|
||||
---------
|
||||
|
||||
There's a `mailing list <https://groups.google.com/forum/#!forum/jsonschema>`_
|
||||
for this implementation on Google Groups.
|
||||
|
||||
Please join, and feel free to send questions there.
|
||||
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
I'm Julian Berman.
|
||||
|
||||
``jsonschema`` is on `GitHub <http://github.com/Julian/jsonschema>`_.
|
||||
|
||||
Get in touch, via GitHub or otherwise, if you've got something to contribute,
|
||||
it'd be most welcome!
|
||||
|
||||
You can also generally find me on Freenode (nick: ``tos9``) in various
|
||||
channels, including ``#python``.
|
||||
|
||||
If you feel overwhelmingly grateful, you can woo me with beer money on
|
||||
`Gittip <https://www.gittip.com/Julian/>`_ or via Google Wallet with the email
|
||||
in my GitHub profile.
|
||||
31
lib/spack/external/jsonschema/__init__.py
vendored
31
lib/spack/external/jsonschema/__init__.py
vendored
@@ -4,23 +4,34 @@
|
||||
The main functionality is provided by the validator classes for each of the
|
||||
supported JSON Schema versions.
|
||||
|
||||
Most commonly, :func:`validate` is the quickest way to simply validate a given
|
||||
Most commonly, `validate` is the quickest way to simply validate a given
|
||||
instance under a schema, and will create a validator for you.
|
||||
|
||||
"""
|
||||
|
||||
from jsonschema.exceptions import (
|
||||
ErrorTree, FormatError, RefResolutionError, SchemaError, ValidationError
|
||||
)
|
||||
from jsonschema._format import (
|
||||
FormatChecker, draft3_format_checker, draft4_format_checker,
|
||||
FormatChecker,
|
||||
draft3_format_checker,
|
||||
draft4_format_checker,
|
||||
draft6_format_checker,
|
||||
draft7_format_checker,
|
||||
)
|
||||
from jsonschema._types import TypeChecker
|
||||
from jsonschema.validators import (
|
||||
Draft3Validator, Draft4Validator, RefResolver, validate
|
||||
Draft3Validator,
|
||||
Draft4Validator,
|
||||
Draft6Validator,
|
||||
Draft7Validator,
|
||||
RefResolver,
|
||||
validate,
|
||||
)
|
||||
|
||||
|
||||
__version__ = "2.4.0"
|
||||
|
||||
|
||||
# flake8: noqa
|
||||
# try:
|
||||
# from importlib import metadata
|
||||
# except ImportError: # for Python<3.8
|
||||
# import importlib_metadata as metadata
|
||||
# __version__ = metadata.version("jsonschema")
|
||||
# set the version manually here, as we don't install dist-info or egg-info
|
||||
# files for vendored spack externals.
|
||||
__version__ = '3.2.0'
|
||||
|
||||
295
lib/spack/external/jsonschema/_format.py
vendored
295
lib/spack/external/jsonschema/_format.py
vendored
@@ -1,6 +1,7 @@
|
||||
import datetime
|
||||
import re
|
||||
import socket
|
||||
import struct
|
||||
|
||||
from jsonschema.compat import str_types
|
||||
from jsonschema.exceptions import FormatError
|
||||
@@ -14,17 +15,19 @@ class FormatChecker(object):
|
||||
validation. If validation is desired however, instances of this class can
|
||||
be hooked into validators to enable format validation.
|
||||
|
||||
:class:`FormatChecker` objects always return ``True`` when asked about
|
||||
`FormatChecker` objects always return ``True`` when asked about
|
||||
formats that they do not know how to validate.
|
||||
|
||||
To check a custom format using a function that takes an instance and
|
||||
returns a ``bool``, use the :meth:`FormatChecker.checks` or
|
||||
:meth:`FormatChecker.cls_checks` decorators.
|
||||
returns a ``bool``, use the `FormatChecker.checks` or
|
||||
`FormatChecker.cls_checks` decorators.
|
||||
|
||||
:argument iterable formats: the known formats to validate. This argument
|
||||
can be used to limit which formats will be used
|
||||
during validation.
|
||||
Arguments:
|
||||
|
||||
formats (~collections.Iterable):
|
||||
|
||||
The known formats to validate. This argument can be used to
|
||||
limit which formats will be used during validation.
|
||||
"""
|
||||
|
||||
checkers = {}
|
||||
@@ -35,16 +38,27 @@ def __init__(self, formats=None):
|
||||
else:
|
||||
self.checkers = dict((k, self.checkers[k]) for k in formats)
|
||||
|
||||
def __repr__(self):
|
||||
return "<FormatChecker checkers={}>".format(sorted(self.checkers))
|
||||
|
||||
def checks(self, format, raises=()):
|
||||
"""
|
||||
Register a decorated function as validating a new format.
|
||||
|
||||
:argument str format: the format that the decorated function will check
|
||||
:argument Exception raises: the exception(s) raised by the decorated
|
||||
function when an invalid instance is found. The exception object
|
||||
will be accessible as the :attr:`ValidationError.cause` attribute
|
||||
of the resulting validation error.
|
||||
Arguments:
|
||||
|
||||
format (str):
|
||||
|
||||
The format that the decorated function will check.
|
||||
|
||||
raises (Exception):
|
||||
|
||||
The exception(s) raised by the decorated function when an
|
||||
invalid instance is found.
|
||||
|
||||
The exception object will be accessible as the
|
||||
`jsonschema.exceptions.ValidationError.cause` attribute of the
|
||||
resulting validation error.
|
||||
"""
|
||||
|
||||
def _checks(func):
|
||||
@@ -58,11 +72,20 @@ def check(self, instance, format):
|
||||
"""
|
||||
Check whether the instance conforms to the given format.
|
||||
|
||||
:argument instance: the instance to check
|
||||
:type: any primitive type (str, number, bool)
|
||||
:argument str format: the format that instance should conform to
|
||||
:raises: :exc:`FormatError` if instance does not conform to format
|
||||
Arguments:
|
||||
|
||||
instance (*any primitive type*, i.e. str, number, bool):
|
||||
|
||||
The instance to check
|
||||
|
||||
format (str):
|
||||
|
||||
The format that instance should conform to
|
||||
|
||||
|
||||
Raises:
|
||||
|
||||
FormatError: if the instance does not conform to ``format``
|
||||
"""
|
||||
|
||||
if format not in self.checkers:
|
||||
@@ -83,11 +106,19 @@ def conforms(self, instance, format):
|
||||
"""
|
||||
Check whether the instance conforms to the given format.
|
||||
|
||||
:argument instance: the instance to check
|
||||
:type: any primitive type (str, number, bool)
|
||||
:argument str format: the format that instance should conform to
|
||||
:rtype: bool
|
||||
Arguments:
|
||||
|
||||
instance (*any primitive type*, i.e. str, number, bool):
|
||||
|
||||
The instance to check
|
||||
|
||||
format (str):
|
||||
|
||||
The format that instance should conform to
|
||||
|
||||
Returns:
|
||||
|
||||
bool: whether it conformed
|
||||
"""
|
||||
|
||||
try:
|
||||
@@ -98,25 +129,55 @@ def conforms(self, instance, format):
|
||||
return True
|
||||
|
||||
|
||||
_draft_checkers = {"draft3": [], "draft4": []}
|
||||
draft3_format_checker = FormatChecker()
|
||||
draft4_format_checker = FormatChecker()
|
||||
draft6_format_checker = FormatChecker()
|
||||
draft7_format_checker = FormatChecker()
|
||||
|
||||
|
||||
def _checks_drafts(both=None, draft3=None, draft4=None, raises=()):
|
||||
draft3 = draft3 or both
|
||||
draft4 = draft4 or both
|
||||
_draft_checkers = dict(
|
||||
draft3=draft3_format_checker,
|
||||
draft4=draft4_format_checker,
|
||||
draft6=draft6_format_checker,
|
||||
draft7=draft7_format_checker,
|
||||
)
|
||||
|
||||
|
||||
def _checks_drafts(
|
||||
name=None,
|
||||
draft3=None,
|
||||
draft4=None,
|
||||
draft6=None,
|
||||
draft7=None,
|
||||
raises=(),
|
||||
):
|
||||
draft3 = draft3 or name
|
||||
draft4 = draft4 or name
|
||||
draft6 = draft6 or name
|
||||
draft7 = draft7 or name
|
||||
|
||||
def wrap(func):
|
||||
if draft3:
|
||||
_draft_checkers["draft3"].append(draft3)
|
||||
func = FormatChecker.cls_checks(draft3, raises)(func)
|
||||
func = _draft_checkers["draft3"].checks(draft3, raises)(func)
|
||||
if draft4:
|
||||
_draft_checkers["draft4"].append(draft4)
|
||||
func = FormatChecker.cls_checks(draft4, raises)(func)
|
||||
func = _draft_checkers["draft4"].checks(draft4, raises)(func)
|
||||
if draft6:
|
||||
func = _draft_checkers["draft6"].checks(draft6, raises)(func)
|
||||
if draft7:
|
||||
func = _draft_checkers["draft7"].checks(draft7, raises)(func)
|
||||
|
||||
# Oy. This is bad global state, but relied upon for now, until
|
||||
# deprecation. See https://github.com/Julian/jsonschema/issues/519
|
||||
# and test_format_checkers_come_with_defaults
|
||||
FormatChecker.cls_checks(draft7 or draft6 or draft4 or draft3, raises)(
|
||||
func,
|
||||
)
|
||||
return func
|
||||
return wrap
|
||||
|
||||
|
||||
@_checks_drafts("email")
|
||||
@_checks_drafts(name="idn-email")
|
||||
@_checks_drafts(name="email")
|
||||
def is_email(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
@@ -125,7 +186,10 @@ def is_email(instance):
|
||||
|
||||
_ipv4_re = re.compile(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
|
||||
|
||||
@_checks_drafts(draft3="ip-address", draft4="ipv4")
|
||||
|
||||
@_checks_drafts(
|
||||
draft3="ip-address", draft4="ipv4", draft6="ipv4", draft7="ipv4",
|
||||
)
|
||||
def is_ipv4(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
@@ -135,7 +199,11 @@ def is_ipv4(instance):
|
||||
|
||||
|
||||
if hasattr(socket, "inet_pton"):
|
||||
@_checks_drafts("ipv6", raises=socket.error)
|
||||
# FIXME: Really this only should raise struct.error, but see the sadness
|
||||
# that is https://twistedmatrix.com/trac/ticket/9409
|
||||
@_checks_drafts(
|
||||
name="ipv6", raises=(socket.error, struct.error, ValueError),
|
||||
)
|
||||
def is_ipv6(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
@@ -144,7 +212,13 @@ def is_ipv6(instance):
|
||||
|
||||
_host_name_re = re.compile(r"^[A-Za-z0-9][A-Za-z0-9\.\-]{1,255}$")
|
||||
|
||||
@_checks_drafts(draft3="host-name", draft4="hostname")
|
||||
|
||||
@_checks_drafts(
|
||||
draft3="host-name",
|
||||
draft4="hostname",
|
||||
draft6="hostname",
|
||||
draft7="hostname",
|
||||
)
|
||||
def is_host_name(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
@@ -158,46 +232,103 @@ def is_host_name(instance):
|
||||
|
||||
|
||||
try:
|
||||
import rfc3987
|
||||
# The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
|
||||
import idna
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts("uri", raises=ValueError)
|
||||
@_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
|
||||
def is_idn_host_name(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
idna.encode(instance)
|
||||
return True
|
||||
|
||||
|
||||
try:
|
||||
import rfc3987
|
||||
except ImportError:
|
||||
try:
|
||||
from rfc3986_validator import validate_rfc3986
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts(name="uri")
|
||||
def is_uri(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return validate_rfc3986(instance, rule="URI")
|
||||
|
||||
@_checks_drafts(
|
||||
draft6="uri-reference",
|
||||
draft7="uri-reference",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_uri_reference(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return validate_rfc3986(instance, rule="URI_reference")
|
||||
|
||||
else:
|
||||
@_checks_drafts(draft7="iri", raises=ValueError)
|
||||
def is_iri(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="IRI")
|
||||
|
||||
@_checks_drafts(draft7="iri-reference", raises=ValueError)
|
||||
def is_iri_reference(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="IRI_reference")
|
||||
|
||||
@_checks_drafts(name="uri", raises=ValueError)
|
||||
def is_uri(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="URI")
|
||||
|
||||
@_checks_drafts(
|
||||
draft6="uri-reference",
|
||||
draft7="uri-reference",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_uri_reference(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="URI_reference")
|
||||
|
||||
|
||||
try:
|
||||
import strict_rfc3339
|
||||
from strict_rfc3339 import validate_rfc3339
|
||||
except ImportError:
|
||||
try:
|
||||
import isodate
|
||||
from rfc3339_validator import validate_rfc3339
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts("date-time", raises=(ValueError, isodate.ISO8601Error))
|
||||
def is_date(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return isodate.parse_datetime(instance)
|
||||
else:
|
||||
@_checks_drafts("date-time")
|
||||
def is_date(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return strict_rfc3339.validate_rfc3339(instance)
|
||||
validate_rfc3339 = None
|
||||
|
||||
if validate_rfc3339:
|
||||
@_checks_drafts(name="date-time")
|
||||
def is_datetime(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return validate_rfc3339(instance)
|
||||
|
||||
@_checks_drafts(draft7="time")
|
||||
def is_time(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return is_datetime("1970-01-01T" + instance)
|
||||
|
||||
|
||||
@_checks_drafts("regex", raises=re.error)
|
||||
@_checks_drafts(name="regex", raises=re.error)
|
||||
def is_regex(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return re.compile(instance)
|
||||
|
||||
|
||||
@_checks_drafts(draft3="date", raises=ValueError)
|
||||
@_checks_drafts(draft3="date", draft7="date", raises=ValueError)
|
||||
def is_date(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
@@ -205,7 +336,7 @@ def is_date(instance):
|
||||
|
||||
|
||||
@_checks_drafts(draft3="time", raises=ValueError)
|
||||
def is_time(instance):
|
||||
def is_draft3_time(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return datetime.datetime.strptime(instance, "%H:%M:%S")
|
||||
@@ -219,7 +350,6 @@ def is_time(instance):
|
||||
def is_css_color_code(instance):
|
||||
return webcolors.normalize_hex(instance)
|
||||
|
||||
|
||||
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
|
||||
def is_css21_color(instance):
|
||||
if (
|
||||
@@ -229,12 +359,67 @@ def is_css21_color(instance):
|
||||
return True
|
||||
return is_css_color_code(instance)
|
||||
|
||||
|
||||
def is_css3_color(instance):
|
||||
if instance.lower() in webcolors.css3_names_to_hex:
|
||||
return True
|
||||
return is_css_color_code(instance)
|
||||
|
||||
|
||||
draft3_format_checker = FormatChecker(_draft_checkers["draft3"])
|
||||
draft4_format_checker = FormatChecker(_draft_checkers["draft4"])
|
||||
try:
|
||||
import jsonpointer
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts(
|
||||
draft6="json-pointer",
|
||||
draft7="json-pointer",
|
||||
raises=jsonpointer.JsonPointerException,
|
||||
)
|
||||
def is_json_pointer(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return jsonpointer.JsonPointer(instance)
|
||||
|
||||
# TODO: I don't want to maintain this, so it
|
||||
# needs to go either into jsonpointer (pending
|
||||
# https://github.com/stefankoegl/python-json-pointer/issues/34) or
|
||||
# into a new external library.
|
||||
@_checks_drafts(
|
||||
draft7="relative-json-pointer",
|
||||
raises=jsonpointer.JsonPointerException,
|
||||
)
|
||||
def is_relative_json_pointer(instance):
|
||||
# Definition taken from:
|
||||
# https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
non_negative_integer, rest = [], ""
|
||||
for i, character in enumerate(instance):
|
||||
if character.isdigit():
|
||||
non_negative_integer.append(character)
|
||||
continue
|
||||
|
||||
if not non_negative_integer:
|
||||
return False
|
||||
|
||||
rest = instance[i:]
|
||||
break
|
||||
return (rest == "#") or jsonpointer.JsonPointer(rest)
|
||||
|
||||
|
||||
try:
|
||||
import uritemplate.exceptions
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts(
|
||||
draft6="uri-template",
|
||||
draft7="uri-template",
|
||||
raises=uritemplate.exceptions.InvalidTemplate,
|
||||
)
|
||||
def is_uri_template(
|
||||
instance,
|
||||
template_validator=uritemplate.Validator().force_balanced_braces(),
|
||||
):
|
||||
template = uritemplate.URITemplate(instance)
|
||||
return template_validator.validate(template)
|
||||
|
||||
141
lib/spack/external/jsonschema/_legacy_validators.py
vendored
Normal file
141
lib/spack/external/jsonschema/_legacy_validators.py
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
from jsonschema import _utils
|
||||
from jsonschema.compat import iteritems
|
||||
from jsonschema.exceptions import ValidationError
|
||||
|
||||
|
||||
def dependencies_draft3(validator, dependencies, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
for property, dependency in iteritems(dependencies):
|
||||
if property not in instance:
|
||||
continue
|
||||
|
||||
if validator.is_type(dependency, "object"):
|
||||
for error in validator.descend(
|
||||
instance, dependency, schema_path=property,
|
||||
):
|
||||
yield error
|
||||
elif validator.is_type(dependency, "string"):
|
||||
if dependency not in instance:
|
||||
yield ValidationError(
|
||||
"%r is a dependency of %r" % (dependency, property)
|
||||
)
|
||||
else:
|
||||
for each in dependency:
|
||||
if each not in instance:
|
||||
message = "%r is a dependency of %r"
|
||||
yield ValidationError(message % (each, property))
|
||||
|
||||
|
||||
def disallow_draft3(validator, disallow, instance, schema):
|
||||
for disallowed in _utils.ensure_list(disallow):
|
||||
if validator.is_valid(instance, {"type": [disallowed]}):
|
||||
yield ValidationError(
|
||||
"%r is disallowed for %r" % (disallowed, instance)
|
||||
)
|
||||
|
||||
|
||||
def extends_draft3(validator, extends, instance, schema):
|
||||
if validator.is_type(extends, "object"):
|
||||
for error in validator.descend(instance, extends):
|
||||
yield error
|
||||
return
|
||||
for index, subschema in enumerate(extends):
|
||||
for error in validator.descend(instance, subschema, schema_path=index):
|
||||
yield error
|
||||
|
||||
|
||||
def items_draft3_draft4(validator, items, instance, schema):
|
||||
if not validator.is_type(instance, "array"):
|
||||
return
|
||||
|
||||
if validator.is_type(items, "object"):
|
||||
for index, item in enumerate(instance):
|
||||
for error in validator.descend(item, items, path=index):
|
||||
yield error
|
||||
else:
|
||||
for (index, item), subschema in zip(enumerate(instance), items):
|
||||
for error in validator.descend(
|
||||
item, subschema, path=index, schema_path=index,
|
||||
):
|
||||
yield error
|
||||
|
||||
|
||||
def minimum_draft3_draft4(validator, minimum, instance, schema):
|
||||
if not validator.is_type(instance, "number"):
|
||||
return
|
||||
|
||||
if schema.get("exclusiveMinimum", False):
|
||||
failed = instance <= minimum
|
||||
cmp = "less than or equal to"
|
||||
else:
|
||||
failed = instance < minimum
|
||||
cmp = "less than"
|
||||
|
||||
if failed:
|
||||
yield ValidationError(
|
||||
"%r is %s the minimum of %r" % (instance, cmp, minimum)
|
||||
)
|
||||
|
||||
|
||||
def maximum_draft3_draft4(validator, maximum, instance, schema):
|
||||
if not validator.is_type(instance, "number"):
|
||||
return
|
||||
|
||||
if schema.get("exclusiveMaximum", False):
|
||||
failed = instance >= maximum
|
||||
cmp = "greater than or equal to"
|
||||
else:
|
||||
failed = instance > maximum
|
||||
cmp = "greater than"
|
||||
|
||||
if failed:
|
||||
yield ValidationError(
|
||||
"%r is %s the maximum of %r" % (instance, cmp, maximum)
|
||||
)
|
||||
|
||||
|
||||
def properties_draft3(validator, properties, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
for property, subschema in iteritems(properties):
|
||||
if property in instance:
|
||||
for error in validator.descend(
|
||||
instance[property],
|
||||
subschema,
|
||||
path=property,
|
||||
schema_path=property,
|
||||
):
|
||||
yield error
|
||||
elif subschema.get("required", False):
|
||||
error = ValidationError("%r is a required property" % property)
|
||||
error._set(
|
||||
validator="required",
|
||||
validator_value=subschema["required"],
|
||||
instance=instance,
|
||||
schema=schema,
|
||||
)
|
||||
error.path.appendleft(property)
|
||||
error.schema_path.extend([property, "required"])
|
||||
yield error
|
||||
|
||||
|
||||
def type_draft3(validator, types, instance, schema):
|
||||
types = _utils.ensure_list(types)
|
||||
|
||||
all_errors = []
|
||||
for index, type in enumerate(types):
|
||||
if validator.is_type(type, "object"):
|
||||
errors = list(validator.descend(instance, type, schema_path=index))
|
||||
if not errors:
|
||||
return
|
||||
all_errors.extend(errors)
|
||||
else:
|
||||
if validator.is_type(instance, type):
|
||||
return
|
||||
else:
|
||||
yield ValidationError(
|
||||
_utils.types_msg(instance, types), context=all_errors,
|
||||
)
|
||||
188
lib/spack/external/jsonschema/_types.py
vendored
Normal file
188
lib/spack/external/jsonschema/_types.py
vendored
Normal file
@@ -0,0 +1,188 @@
|
||||
import numbers
|
||||
|
||||
from pyrsistent import pmap
|
||||
import attr
|
||||
|
||||
from jsonschema.compat import int_types, str_types
|
||||
from jsonschema.exceptions import UndefinedTypeCheck
|
||||
|
||||
|
||||
def is_array(checker, instance):
|
||||
return isinstance(instance, list)
|
||||
|
||||
|
||||
def is_bool(checker, instance):
|
||||
return isinstance(instance, bool)
|
||||
|
||||
|
||||
def is_integer(checker, instance):
|
||||
# bool inherits from int, so ensure bools aren't reported as ints
|
||||
if isinstance(instance, bool):
|
||||
return False
|
||||
return isinstance(instance, int_types)
|
||||
|
||||
|
||||
def is_null(checker, instance):
|
||||
return instance is None
|
||||
|
||||
|
||||
def is_number(checker, instance):
|
||||
# bool inherits from int, so ensure bools aren't reported as ints
|
||||
if isinstance(instance, bool):
|
||||
return False
|
||||
return isinstance(instance, numbers.Number)
|
||||
|
||||
|
||||
def is_object(checker, instance):
|
||||
return isinstance(instance, dict)
|
||||
|
||||
|
||||
def is_string(checker, instance):
|
||||
return isinstance(instance, str_types)
|
||||
|
||||
|
||||
def is_any(checker, instance):
|
||||
return True
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class TypeChecker(object):
|
||||
"""
|
||||
A ``type`` property checker.
|
||||
|
||||
A `TypeChecker` performs type checking for an `IValidator`. Type
|
||||
checks to perform are updated using `TypeChecker.redefine` or
|
||||
`TypeChecker.redefine_many` and removed via `TypeChecker.remove`.
|
||||
Each of these return a new `TypeChecker` object.
|
||||
|
||||
Arguments:
|
||||
|
||||
type_checkers (dict):
|
||||
|
||||
The initial mapping of types to their checking functions.
|
||||
"""
|
||||
_type_checkers = attr.ib(default=pmap(), converter=pmap)
|
||||
|
||||
def is_type(self, instance, type):
|
||||
"""
|
||||
Check if the instance is of the appropriate type.
|
||||
|
||||
Arguments:
|
||||
|
||||
instance (object):
|
||||
|
||||
The instance to check
|
||||
|
||||
type (str):
|
||||
|
||||
The name of the type that is expected.
|
||||
|
||||
Returns:
|
||||
|
||||
bool: Whether it conformed.
|
||||
|
||||
|
||||
Raises:
|
||||
|
||||
`jsonschema.exceptions.UndefinedTypeCheck`:
|
||||
if type is unknown to this object.
|
||||
"""
|
||||
try:
|
||||
fn = self._type_checkers[type]
|
||||
except KeyError:
|
||||
raise UndefinedTypeCheck(type)
|
||||
|
||||
return fn(self, instance)
|
||||
|
||||
def redefine(self, type, fn):
|
||||
"""
|
||||
Produce a new checker with the given type redefined.
|
||||
|
||||
Arguments:
|
||||
|
||||
type (str):
|
||||
|
||||
The name of the type to check.
|
||||
|
||||
fn (collections.Callable):
|
||||
|
||||
A function taking exactly two parameters - the type
|
||||
checker calling the function and the instance to check.
|
||||
The function should return true if instance is of this
|
||||
type and false otherwise.
|
||||
|
||||
Returns:
|
||||
|
||||
A new `TypeChecker` instance.
|
||||
"""
|
||||
return self.redefine_many({type: fn})
|
||||
|
||||
def redefine_many(self, definitions=()):
|
||||
"""
|
||||
Produce a new checker with the given types redefined.
|
||||
|
||||
Arguments:
|
||||
|
||||
definitions (dict):
|
||||
|
||||
A dictionary mapping types to their checking functions.
|
||||
|
||||
Returns:
|
||||
|
||||
A new `TypeChecker` instance.
|
||||
"""
|
||||
return attr.evolve(
|
||||
self, type_checkers=self._type_checkers.update(definitions),
|
||||
)
|
||||
|
||||
def remove(self, *types):
|
||||
"""
|
||||
Produce a new checker with the given types forgotten.
|
||||
|
||||
Arguments:
|
||||
|
||||
types (~collections.Iterable):
|
||||
|
||||
the names of the types to remove.
|
||||
|
||||
Returns:
|
||||
|
||||
A new `TypeChecker` instance
|
||||
|
||||
Raises:
|
||||
|
||||
`jsonschema.exceptions.UndefinedTypeCheck`:
|
||||
|
||||
if any given type is unknown to this object
|
||||
"""
|
||||
|
||||
checkers = self._type_checkers
|
||||
for each in types:
|
||||
try:
|
||||
checkers = checkers.remove(each)
|
||||
except KeyError:
|
||||
raise UndefinedTypeCheck(each)
|
||||
return attr.evolve(self, type_checkers=checkers)
|
||||
|
||||
|
||||
draft3_type_checker = TypeChecker(
|
||||
{
|
||||
u"any": is_any,
|
||||
u"array": is_array,
|
||||
u"boolean": is_bool,
|
||||
u"integer": is_integer,
|
||||
u"object": is_object,
|
||||
u"null": is_null,
|
||||
u"number": is_number,
|
||||
u"string": is_string,
|
||||
},
|
||||
)
|
||||
draft4_type_checker = draft3_type_checker.remove(u"any")
|
||||
draft6_type_checker = draft4_type_checker.redefine(
|
||||
u"integer",
|
||||
lambda checker, instance: (
|
||||
is_integer(checker, instance) or
|
||||
isinstance(instance, float) and instance.is_integer()
|
||||
),
|
||||
)
|
||||
draft7_type_checker = draft6_type_checker
|
||||
29
lib/spack/external/jsonschema/_utils.py
vendored
29
lib/spack/external/jsonschema/_utils.py
vendored
@@ -3,13 +3,12 @@
|
||||
import pkgutil
|
||||
import re
|
||||
|
||||
from jsonschema.compat import str_types, MutableMapping, urlsplit
|
||||
from jsonschema.compat import MutableMapping, str_types, urlsplit
|
||||
|
||||
|
||||
class URIDict(MutableMapping):
|
||||
"""
|
||||
Dictionary which uses normalized URIs as keys.
|
||||
|
||||
"""
|
||||
|
||||
def normalize(self, uri):
|
||||
@@ -41,7 +40,6 @@ def __repr__(self):
|
||||
class Unset(object):
|
||||
"""
|
||||
An as-of-yet unset attribute or unprovided default parameter.
|
||||
|
||||
"""
|
||||
|
||||
def __repr__(self):
|
||||
@@ -51,17 +49,15 @@ def __repr__(self):
|
||||
def load_schema(name):
|
||||
"""
|
||||
Load a schema from ./schemas/``name``.json and return it.
|
||||
|
||||
"""
|
||||
|
||||
data = pkgutil.get_data(__package__, "schemas/{0}.json".format(name))
|
||||
data = pkgutil.get_data("jsonschema", "schemas/{0}.json".format(name))
|
||||
return json.loads(data.decode("utf-8"))
|
||||
|
||||
|
||||
def indent(string, times=1):
|
||||
"""
|
||||
A dumb version of :func:`textwrap.indent` from Python 3.3.
|
||||
|
||||
A dumb version of `textwrap.indent` from Python 3.3.
|
||||
"""
|
||||
|
||||
return "\n".join(" " * (4 * times) + line for line in string.splitlines())
|
||||
@@ -73,8 +69,11 @@ def format_as_index(indices):
|
||||
|
||||
For example, [1, 2, "foo"] -> [1][2]["foo"]
|
||||
|
||||
:type indices: sequence
|
||||
Arguments:
|
||||
|
||||
indices (sequence):
|
||||
|
||||
The indices to format.
|
||||
"""
|
||||
|
||||
if not indices:
|
||||
@@ -90,7 +89,6 @@ def find_additional_properties(instance, schema):
|
||||
/ or ``patternProperties``.
|
||||
|
||||
Assumes ``instance`` is dict-like already.
|
||||
|
||||
"""
|
||||
|
||||
properties = schema.get("properties", {})
|
||||
@@ -105,7 +103,6 @@ def find_additional_properties(instance, schema):
|
||||
def extras_msg(extras):
|
||||
"""
|
||||
Create an error message for extra items or properties.
|
||||
|
||||
"""
|
||||
|
||||
if len(extras) == 1:
|
||||
@@ -123,7 +120,6 @@ def types_msg(instance, types):
|
||||
be considered to be a description of that object and used as its type.
|
||||
|
||||
Otherwise the message is simply the reprs of the given ``types``.
|
||||
|
||||
"""
|
||||
|
||||
reprs = []
|
||||
@@ -143,7 +139,6 @@ def flatten(suitable_for_isinstance):
|
||||
* an arbitrary nested tree of tuples
|
||||
|
||||
Return a flattened tuple of the given argument.
|
||||
|
||||
"""
|
||||
|
||||
types = set()
|
||||
@@ -163,7 +158,6 @@ def ensure_list(thing):
|
||||
Wrap ``thing`` in a list if it's a single str.
|
||||
|
||||
Otherwise, return it unchanged.
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(thing, str_types):
|
||||
@@ -171,10 +165,16 @@ def ensure_list(thing):
|
||||
return thing
|
||||
|
||||
|
||||
def equal(one, two):
|
||||
"""
|
||||
Check if two things are equal, but evade booleans and ints being equal.
|
||||
"""
|
||||
return unbool(one) == unbool(two)
|
||||
|
||||
|
||||
def unbool(element, true=object(), false=object()):
|
||||
"""
|
||||
A hack to make True and 1 and False and 0 unique for ``uniq``.
|
||||
|
||||
"""
|
||||
|
||||
if element is True:
|
||||
@@ -191,7 +191,6 @@ def uniq(container):
|
||||
Successively tries first to rely that the elements are hashable, then
|
||||
falls back on them being sortable, and finally falls back on brute
|
||||
force.
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
|
||||
285
lib/spack/external/jsonschema/_validators.py
vendored
285
lib/spack/external/jsonschema/_validators.py
vendored
@@ -1,13 +1,18 @@
|
||||
import re
|
||||
|
||||
from jsonschema import _utils
|
||||
from jsonschema._utils import (
|
||||
ensure_list,
|
||||
equal,
|
||||
extras_msg,
|
||||
find_additional_properties,
|
||||
types_msg,
|
||||
unbool,
|
||||
uniq,
|
||||
)
|
||||
from jsonschema.exceptions import FormatError, ValidationError
|
||||
from jsonschema.compat import iteritems
|
||||
|
||||
|
||||
FLOAT_TOLERANCE = 10 ** -15
|
||||
|
||||
|
||||
def patternProperties(validator, patternProperties, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
@@ -21,35 +26,60 @@ def patternProperties(validator, patternProperties, instance, schema):
|
||||
yield error
|
||||
|
||||
|
||||
def propertyNames(validator, propertyNames, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
for property in instance:
|
||||
for error in validator.descend(
|
||||
instance=property,
|
||||
schema=propertyNames,
|
||||
):
|
||||
yield error
|
||||
|
||||
|
||||
def additionalProperties(validator, aP, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
extras = set(_utils.find_additional_properties(instance, schema))
|
||||
extras = set(find_additional_properties(instance, schema))
|
||||
|
||||
if validator.is_type(aP, "object"):
|
||||
for extra in extras:
|
||||
for error in validator.descend(instance[extra], aP, path=extra):
|
||||
yield error
|
||||
elif not aP and extras:
|
||||
error = "Additional properties are not allowed (%s %s unexpected)"
|
||||
yield ValidationError(error % _utils.extras_msg(extras))
|
||||
if "patternProperties" in schema:
|
||||
patterns = sorted(schema["patternProperties"])
|
||||
if len(extras) == 1:
|
||||
verb = "does"
|
||||
else:
|
||||
verb = "do"
|
||||
error = "%s %s not match any of the regexes: %s" % (
|
||||
", ".join(map(repr, sorted(extras))),
|
||||
verb,
|
||||
", ".join(map(repr, patterns)),
|
||||
)
|
||||
yield ValidationError(error)
|
||||
else:
|
||||
error = "Additional properties are not allowed (%s %s unexpected)"
|
||||
yield ValidationError(error % extras_msg(extras))
|
||||
|
||||
|
||||
def items(validator, items, instance, schema):
|
||||
if not validator.is_type(instance, "array"):
|
||||
return
|
||||
|
||||
if validator.is_type(items, "object"):
|
||||
for index, item in enumerate(instance):
|
||||
for error in validator.descend(item, items, path=index):
|
||||
yield error
|
||||
else:
|
||||
if validator.is_type(items, "array"):
|
||||
for (index, item), subschema in zip(enumerate(instance), items):
|
||||
for error in validator.descend(
|
||||
item, subschema, path=index, schema_path=index,
|
||||
):
|
||||
yield error
|
||||
else:
|
||||
for index, item in enumerate(instance):
|
||||
for error in validator.descend(item, items, path=index):
|
||||
yield error
|
||||
|
||||
|
||||
def additionalItems(validator, aI, instance, schema):
|
||||
@@ -68,7 +98,46 @@ def additionalItems(validator, aI, instance, schema):
|
||||
error = "Additional items are not allowed (%s %s unexpected)"
|
||||
yield ValidationError(
|
||||
error %
|
||||
_utils.extras_msg(instance[len(schema.get("items", [])):])
|
||||
extras_msg(instance[len(schema.get("items", [])):])
|
||||
)
|
||||
|
||||
|
||||
def const(validator, const, instance, schema):
|
||||
if not equal(instance, const):
|
||||
yield ValidationError("%r was expected" % (const,))
|
||||
|
||||
|
||||
def contains(validator, contains, instance, schema):
|
||||
if not validator.is_type(instance, "array"):
|
||||
return
|
||||
|
||||
if not any(validator.is_valid(element, contains) for element in instance):
|
||||
yield ValidationError(
|
||||
"None of %r are valid under the given schema" % (instance,)
|
||||
)
|
||||
|
||||
|
||||
def exclusiveMinimum(validator, minimum, instance, schema):
|
||||
if not validator.is_type(instance, "number"):
|
||||
return
|
||||
|
||||
if instance <= minimum:
|
||||
yield ValidationError(
|
||||
"%r is less than or equal to the minimum of %r" % (
|
||||
instance, minimum,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def exclusiveMaximum(validator, maximum, instance, schema):
|
||||
if not validator.is_type(instance, "number"):
|
||||
return
|
||||
|
||||
if instance >= maximum:
|
||||
yield ValidationError(
|
||||
"%r is greater than or equal to the maximum of %r" % (
|
||||
instance, maximum,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -76,16 +145,9 @@ def minimum(validator, minimum, instance, schema):
|
||||
if not validator.is_type(instance, "number"):
|
||||
return
|
||||
|
||||
if schema.get("exclusiveMinimum", False):
|
||||
failed = float(instance) <= minimum
|
||||
cmp = "less than or equal to"
|
||||
else:
|
||||
failed = float(instance) < minimum
|
||||
cmp = "less than"
|
||||
|
||||
if failed:
|
||||
if instance < minimum:
|
||||
yield ValidationError(
|
||||
"%r is %s the minimum of %r" % (instance, cmp, minimum)
|
||||
"%r is less than the minimum of %r" % (instance, minimum)
|
||||
)
|
||||
|
||||
|
||||
@@ -93,16 +155,9 @@ def maximum(validator, maximum, instance, schema):
|
||||
if not validator.is_type(instance, "number"):
|
||||
return
|
||||
|
||||
if schema.get("exclusiveMaximum", False):
|
||||
failed = instance >= maximum
|
||||
cmp = "greater than or equal to"
|
||||
else:
|
||||
failed = instance > maximum
|
||||
cmp = "greater than"
|
||||
|
||||
if failed:
|
||||
if instance > maximum:
|
||||
yield ValidationError(
|
||||
"%r is %s the maximum of %r" % (instance, cmp, maximum)
|
||||
"%r is greater than the maximum of %r" % (instance, maximum)
|
||||
)
|
||||
|
||||
|
||||
@@ -111,8 +166,8 @@ def multipleOf(validator, dB, instance, schema):
|
||||
return
|
||||
|
||||
if isinstance(dB, float):
|
||||
mod = instance % dB
|
||||
failed = (mod > FLOAT_TOLERANCE) and (dB - mod) > FLOAT_TOLERANCE
|
||||
quotient = instance / dB
|
||||
failed = int(quotient) != quotient
|
||||
else:
|
||||
failed = instance % dB
|
||||
|
||||
@@ -134,9 +189,9 @@ def uniqueItems(validator, uI, instance, schema):
|
||||
if (
|
||||
uI and
|
||||
validator.is_type(instance, "array") and
|
||||
not _utils.uniq(instance)
|
||||
not uniq(instance)
|
||||
):
|
||||
yield ValidationError("%r has non-unique elements" % instance)
|
||||
yield ValidationError("%r has non-unique elements" % (instance,))
|
||||
|
||||
|
||||
def pattern(validator, patrn, instance, schema):
|
||||
@@ -173,104 +228,52 @@ def dependencies(validator, dependencies, instance, schema):
|
||||
if property not in instance:
|
||||
continue
|
||||
|
||||
if validator.is_type(dependency, "object"):
|
||||
if validator.is_type(dependency, "array"):
|
||||
for each in dependency:
|
||||
if each not in instance:
|
||||
message = "%r is a dependency of %r"
|
||||
yield ValidationError(message % (each, property))
|
||||
else:
|
||||
for error in validator.descend(
|
||||
instance, dependency, schema_path=property,
|
||||
):
|
||||
yield error
|
||||
else:
|
||||
dependencies = _utils.ensure_list(dependency)
|
||||
for dependency in dependencies:
|
||||
if dependency not in instance:
|
||||
yield ValidationError(
|
||||
"%r is a dependency of %r" % (dependency, property)
|
||||
)
|
||||
|
||||
|
||||
def enum(validator, enums, instance, schema):
|
||||
if instance not in enums:
|
||||
if instance == 0 or instance == 1:
|
||||
unbooled = unbool(instance)
|
||||
if all(unbooled != unbool(each) for each in enums):
|
||||
yield ValidationError("%r is not one of %r" % (instance, enums))
|
||||
elif instance not in enums:
|
||||
yield ValidationError("%r is not one of %r" % (instance, enums))
|
||||
|
||||
|
||||
def ref(validator, ref, instance, schema):
|
||||
with validator.resolver.resolving(ref) as resolved:
|
||||
for error in validator.descend(instance, resolved):
|
||||
yield error
|
||||
|
||||
|
||||
def type_draft3(validator, types, instance, schema):
|
||||
types = _utils.ensure_list(types)
|
||||
|
||||
all_errors = []
|
||||
for index, type in enumerate(types):
|
||||
if type == "any":
|
||||
return
|
||||
if validator.is_type(type, "object"):
|
||||
errors = list(validator.descend(instance, type, schema_path=index))
|
||||
if not errors:
|
||||
return
|
||||
all_errors.extend(errors)
|
||||
else:
|
||||
if validator.is_type(instance, type):
|
||||
return
|
||||
else:
|
||||
yield ValidationError(
|
||||
_utils.types_msg(instance, types), context=all_errors,
|
||||
)
|
||||
|
||||
|
||||
def properties_draft3(validator, properties, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
for property, subschema in iteritems(properties):
|
||||
if property in instance:
|
||||
for error in validator.descend(
|
||||
instance[property],
|
||||
subschema,
|
||||
path=property,
|
||||
schema_path=property,
|
||||
):
|
||||
resolve = getattr(validator.resolver, "resolve", None)
|
||||
if resolve is None:
|
||||
with validator.resolver.resolving(ref) as resolved:
|
||||
for error in validator.descend(instance, resolved):
|
||||
yield error
|
||||
elif subschema.get("required", False):
|
||||
error = ValidationError("%r is a required property" % property)
|
||||
error._set(
|
||||
validator="required",
|
||||
validator_value=subschema["required"],
|
||||
instance=instance,
|
||||
schema=schema,
|
||||
)
|
||||
error.path.appendleft(property)
|
||||
error.schema_path.extend([property, "required"])
|
||||
yield error
|
||||
else:
|
||||
scope, resolved = validator.resolver.resolve(ref)
|
||||
validator.resolver.push_scope(scope)
|
||||
|
||||
try:
|
||||
for error in validator.descend(instance, resolved):
|
||||
yield error
|
||||
finally:
|
||||
validator.resolver.pop_scope()
|
||||
|
||||
|
||||
def disallow_draft3(validator, disallow, instance, schema):
|
||||
for disallowed in _utils.ensure_list(disallow):
|
||||
if validator.is_valid(instance, {"type" : [disallowed]}):
|
||||
yield ValidationError(
|
||||
"%r is disallowed for %r" % (disallowed, instance)
|
||||
)
|
||||
|
||||
|
||||
def extends_draft3(validator, extends, instance, schema):
|
||||
if validator.is_type(extends, "object"):
|
||||
for error in validator.descend(instance, extends):
|
||||
yield error
|
||||
return
|
||||
for index, subschema in enumerate(extends):
|
||||
for error in validator.descend(instance, subschema, schema_path=index):
|
||||
yield error
|
||||
|
||||
|
||||
def type_draft4(validator, types, instance, schema):
|
||||
types = _utils.ensure_list(types)
|
||||
def type(validator, types, instance, schema):
|
||||
types = ensure_list(types)
|
||||
|
||||
if not any(validator.is_type(instance, type) for type in types):
|
||||
yield ValidationError(_utils.types_msg(instance, types))
|
||||
yield ValidationError(types_msg(instance, types))
|
||||
|
||||
|
||||
def properties_draft4(validator, properties, instance, schema):
|
||||
def properties(validator, properties, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
@@ -285,7 +288,7 @@ def properties_draft4(validator, properties, instance, schema):
|
||||
yield error
|
||||
|
||||
|
||||
def required_draft4(validator, required, instance, schema):
|
||||
def required(validator, required, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
for property in required:
|
||||
@@ -293,27 +296,41 @@ def required_draft4(validator, required, instance, schema):
|
||||
yield ValidationError("%r is a required property" % property)
|
||||
|
||||
|
||||
def minProperties_draft4(validator, mP, instance, schema):
|
||||
def minProperties(validator, mP, instance, schema):
|
||||
if validator.is_type(instance, "object") and len(instance) < mP:
|
||||
yield ValidationError(
|
||||
"%r does not have enough properties" % (instance,)
|
||||
)
|
||||
|
||||
|
||||
def maxProperties_draft4(validator, mP, instance, schema):
|
||||
def maxProperties(validator, mP, instance, schema):
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
if validator.is_type(instance, "object") and len(instance) > mP:
|
||||
yield ValidationError("%r has too many properties" % (instance,))
|
||||
|
||||
|
||||
def allOf_draft4(validator, allOf, instance, schema):
|
||||
def allOf(validator, allOf, instance, schema):
|
||||
for index, subschema in enumerate(allOf):
|
||||
for error in validator.descend(instance, subschema, schema_path=index):
|
||||
yield error
|
||||
|
||||
|
||||
def oneOf_draft4(validator, oneOf, instance, schema):
|
||||
def anyOf(validator, anyOf, instance, schema):
|
||||
all_errors = []
|
||||
for index, subschema in enumerate(anyOf):
|
||||
errs = list(validator.descend(instance, subschema, schema_path=index))
|
||||
if not errs:
|
||||
break
|
||||
all_errors.extend(errs)
|
||||
else:
|
||||
yield ValidationError(
|
||||
"%r is not valid under any of the given schemas" % (instance,),
|
||||
context=all_errors,
|
||||
)
|
||||
|
||||
|
||||
def oneOf(validator, oneOf, instance, schema):
|
||||
subschemas = enumerate(oneOf)
|
||||
all_errors = []
|
||||
for index, subschema in subschemas:
|
||||
@@ -337,22 +354,20 @@ def oneOf_draft4(validator, oneOf, instance, schema):
|
||||
)
|
||||
|
||||
|
||||
def anyOf_draft4(validator, anyOf, instance, schema):
|
||||
all_errors = []
|
||||
for index, subschema in enumerate(anyOf):
|
||||
errs = list(validator.descend(instance, subschema, schema_path=index))
|
||||
if not errs:
|
||||
break
|
||||
all_errors.extend(errs)
|
||||
else:
|
||||
yield ValidationError(
|
||||
"%r is not valid under any of the given schemas" % (instance,),
|
||||
context=all_errors,
|
||||
)
|
||||
|
||||
|
||||
def not_draft4(validator, not_schema, instance, schema):
|
||||
def not_(validator, not_schema, instance, schema):
|
||||
if validator.is_valid(instance, not_schema):
|
||||
yield ValidationError(
|
||||
"%r is not allowed for %r" % (not_schema, instance)
|
||||
)
|
||||
|
||||
|
||||
def if_(validator, if_schema, instance, schema):
|
||||
if validator.is_valid(instance, if_schema):
|
||||
if u"then" in schema:
|
||||
then = schema[u"then"]
|
||||
for error in validator.descend(instance, then, schema_path="then"):
|
||||
yield error
|
||||
elif u"else" in schema:
|
||||
else_ = schema[u"else"]
|
||||
for error in validator.descend(instance, else_, schema_path="else"):
|
||||
yield error
|
||||
|
||||
36
lib/spack/external/jsonschema/cli.py
vendored
36
lib/spack/external/jsonschema/cli.py
vendored
@@ -1,8 +1,12 @@
|
||||
"""
|
||||
The ``jsonschema`` command line.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
|
||||
from jsonschema import __version__
|
||||
from jsonschema._reflect import namedAny
|
||||
from jsonschema.validators import validator_for
|
||||
|
||||
@@ -26,26 +30,37 @@ def _json_file(path):
|
||||
action="append",
|
||||
dest="instances",
|
||||
type=_json_file,
|
||||
help="a path to a JSON instance to validate "
|
||||
"(may be specified multiple times)",
|
||||
help=(
|
||||
"a path to a JSON instance (i.e. filename.json) "
|
||||
"to validate (may be specified multiple times)"
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"-F", "--error-format",
|
||||
default="{error.instance}: {error.message}\n",
|
||||
help="the format to use for each error output message, specified in "
|
||||
"a form suitable for passing to str.format, which will be called "
|
||||
"with 'error' for each error",
|
||||
help=(
|
||||
"the format to use for each error output message, specified in "
|
||||
"a form suitable for passing to str.format, which will be called "
|
||||
"with 'error' for each error"
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"-V", "--validator",
|
||||
type=_namedAnyWithDefault,
|
||||
help="the fully qualified object name of a validator to use, or, for "
|
||||
"validators that are registered with jsonschema, simply the name "
|
||||
"of the class.",
|
||||
help=(
|
||||
"the fully qualified object name of a validator to use, or, for "
|
||||
"validators that are registered with jsonschema, simply the name "
|
||||
"of the class."
|
||||
),
|
||||
)
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
action="version",
|
||||
version=__version__,
|
||||
)
|
||||
parser.add_argument(
|
||||
"schema",
|
||||
help="the JSON Schema to validate with",
|
||||
help="the JSON Schema to validate with (i.e. schema.json)",
|
||||
type=_json_file,
|
||||
)
|
||||
|
||||
@@ -64,6 +79,9 @@ def main(args=sys.argv[1:]):
|
||||
def run(arguments, stdout=sys.stdout, stderr=sys.stderr):
|
||||
error_format = arguments["error_format"]
|
||||
validator = arguments["validator"](schema=arguments["schema"])
|
||||
|
||||
validator.check_schema(arguments["schema"])
|
||||
|
||||
errored = False
|
||||
for instance in arguments["instances"] or ():
|
||||
for error in validator.iter_errors(instance):
|
||||
|
||||
46
lib/spack/external/jsonschema/compat.py
vendored
46
lib/spack/external/jsonschema/compat.py
vendored
@@ -1,52 +1,54 @@
|
||||
from __future__ import unicode_literals
|
||||
import sys
|
||||
"""
|
||||
Python 2/3 compatibility helpers.
|
||||
|
||||
Note: This module is *not* public API.
|
||||
"""
|
||||
import contextlib
|
||||
import operator
|
||||
import sys
|
||||
|
||||
|
||||
try:
|
||||
from collections import MutableMapping, Sequence # noqa
|
||||
except ImportError:
|
||||
from collections.abc import MutableMapping, Sequence # noqa
|
||||
except ImportError:
|
||||
from collections import MutableMapping, Sequence # noqa
|
||||
|
||||
PY3 = sys.version_info[0] >= 3
|
||||
|
||||
if PY3:
|
||||
zip = zip
|
||||
from io import StringIO
|
||||
from functools import lru_cache
|
||||
from io import StringIO as NativeIO
|
||||
from urllib.parse import (
|
||||
unquote, urljoin, urlunsplit, SplitResult, urlsplit as _urlsplit
|
||||
unquote, urljoin, urlunsplit, SplitResult, urlsplit
|
||||
)
|
||||
from urllib.request import urlopen
|
||||
from urllib.request import pathname2url, urlopen
|
||||
str_types = str,
|
||||
int_types = int,
|
||||
iteritems = operator.methodcaller("items")
|
||||
else:
|
||||
from itertools import izip as zip # noqa
|
||||
from StringIO import StringIO
|
||||
from urlparse import (
|
||||
urljoin, urlunsplit, SplitResult, urlsplit as _urlsplit # noqa
|
||||
)
|
||||
from urllib import unquote # noqa
|
||||
from urllib2 import urlopen # noqa
|
||||
from io import BytesIO as NativeIO
|
||||
from urlparse import urljoin, urlunsplit, SplitResult, urlsplit
|
||||
from urllib import pathname2url, unquote # noqa
|
||||
import urllib2 # noqa
|
||||
def urlopen(*args, **kwargs):
|
||||
return contextlib.closing(urllib2.urlopen(*args, **kwargs))
|
||||
|
||||
str_types = basestring
|
||||
int_types = int, long
|
||||
iteritems = operator.methodcaller("iteritems")
|
||||
|
||||
|
||||
# On python < 3.3 fragments are not handled properly with unknown schemes
|
||||
def urlsplit(url):
|
||||
scheme, netloc, path, query, fragment = _urlsplit(url)
|
||||
if "#" in path:
|
||||
path, fragment = path.split("#", 1)
|
||||
return SplitResult(scheme, netloc, path, query, fragment)
|
||||
from functools32 import lru_cache
|
||||
|
||||
|
||||
def urldefrag(url):
|
||||
if "#" in url:
|
||||
s, n, p, q, frag = urlsplit(url)
|
||||
defrag = urlunsplit((s, n, p, q, ''))
|
||||
defrag = urlunsplit((s, n, p, q, ""))
|
||||
else:
|
||||
defrag = url
|
||||
frag = ''
|
||||
frag = ""
|
||||
return defrag, frag
|
||||
|
||||
|
||||
|
||||
164
lib/spack/external/jsonschema/exceptions.py
vendored
164
lib/spack/external/jsonschema/exceptions.py
vendored
@@ -1,8 +1,13 @@
|
||||
"""
|
||||
Validation errors, and some surrounding helpers.
|
||||
"""
|
||||
from collections import defaultdict, deque
|
||||
import itertools
|
||||
import pprint
|
||||
import textwrap
|
||||
|
||||
import attr
|
||||
|
||||
from jsonschema import _utils
|
||||
from jsonschema.compat import PY3, iteritems
|
||||
|
||||
@@ -27,6 +32,18 @@ def __init__(
|
||||
schema_path=(),
|
||||
parent=None,
|
||||
):
|
||||
super(_Error, self).__init__(
|
||||
message,
|
||||
validator,
|
||||
path,
|
||||
cause,
|
||||
context,
|
||||
validator_value,
|
||||
instance,
|
||||
schema,
|
||||
schema_path,
|
||||
parent,
|
||||
)
|
||||
self.message = message
|
||||
self.path = self.relative_path = deque(path)
|
||||
self.schema_path = self.relative_schema_path = deque(schema_path)
|
||||
@@ -44,9 +61,6 @@ def __init__(
|
||||
def __repr__(self):
|
||||
return "<%s: %r>" % (self.__class__.__name__, self.message)
|
||||
|
||||
def __str__(self):
|
||||
return unicode(self).encode("utf-8")
|
||||
|
||||
def __unicode__(self):
|
||||
essential_for_verbose = (
|
||||
self.validator, self.validator_value, self.instance, self.schema,
|
||||
@@ -58,22 +72,27 @@ def __unicode__(self):
|
||||
pinstance = pprint.pformat(self.instance, width=72)
|
||||
return self.message + textwrap.dedent("""
|
||||
|
||||
Failed validating %r in schema%s:
|
||||
Failed validating %r in %s%s:
|
||||
%s
|
||||
|
||||
On instance%s:
|
||||
On %s%s:
|
||||
%s
|
||||
""".rstrip()
|
||||
) % (
|
||||
self.validator,
|
||||
self._word_for_schema_in_error_message,
|
||||
_utils.format_as_index(list(self.relative_schema_path)[:-1]),
|
||||
_utils.indent(pschema),
|
||||
self._word_for_instance_in_error_message,
|
||||
_utils.format_as_index(self.relative_path),
|
||||
_utils.indent(pinstance),
|
||||
)
|
||||
|
||||
if PY3:
|
||||
__str__ = __unicode__
|
||||
else:
|
||||
def __str__(self):
|
||||
return unicode(self).encode("utf-8")
|
||||
|
||||
@classmethod
|
||||
def create_from(cls, other):
|
||||
@@ -86,7 +105,7 @@ def absolute_path(self):
|
||||
return self.relative_path
|
||||
|
||||
path = deque(self.relative_path)
|
||||
path.extendleft(parent.absolute_path)
|
||||
path.extendleft(reversed(parent.absolute_path))
|
||||
return path
|
||||
|
||||
@property
|
||||
@@ -96,7 +115,7 @@ def absolute_schema_path(self):
|
||||
return self.relative_schema_path
|
||||
|
||||
path = deque(self.relative_schema_path)
|
||||
path.extendleft(parent.absolute_schema_path)
|
||||
path.extendleft(reversed(parent.absolute_schema_path))
|
||||
return path
|
||||
|
||||
def _set(self, **kwargs):
|
||||
@@ -113,26 +132,63 @@ def _contents(self):
|
||||
|
||||
|
||||
class ValidationError(_Error):
|
||||
pass
|
||||
"""
|
||||
An instance was invalid under a provided schema.
|
||||
"""
|
||||
|
||||
_word_for_schema_in_error_message = "schema"
|
||||
_word_for_instance_in_error_message = "instance"
|
||||
|
||||
|
||||
class SchemaError(_Error):
|
||||
pass
|
||||
"""
|
||||
A schema was invalid under its corresponding metaschema.
|
||||
"""
|
||||
|
||||
_word_for_schema_in_error_message = "metaschema"
|
||||
_word_for_instance_in_error_message = "schema"
|
||||
|
||||
|
||||
@attr.s(hash=True)
|
||||
class RefResolutionError(Exception):
|
||||
pass
|
||||
"""
|
||||
A ref could not be resolved.
|
||||
"""
|
||||
|
||||
_cause = attr.ib()
|
||||
|
||||
def __str__(self):
|
||||
return str(self._cause)
|
||||
|
||||
|
||||
class UndefinedTypeCheck(Exception):
|
||||
"""
|
||||
A type checker was asked to check a type it did not have registered.
|
||||
"""
|
||||
|
||||
def __init__(self, type):
|
||||
self.type = type
|
||||
|
||||
def __unicode__(self):
|
||||
return "Type %r is unknown to this type checker" % self.type
|
||||
|
||||
if PY3:
|
||||
__str__ = __unicode__
|
||||
else:
|
||||
def __str__(self):
|
||||
return unicode(self).encode("utf-8")
|
||||
|
||||
|
||||
class UnknownType(Exception):
|
||||
"""
|
||||
A validator was asked to validate an instance against an unknown type.
|
||||
"""
|
||||
|
||||
def __init__(self, type, instance, schema):
|
||||
self.type = type
|
||||
self.instance = instance
|
||||
self.schema = schema
|
||||
|
||||
def __str__(self):
|
||||
return unicode(self).encode("utf-8")
|
||||
|
||||
def __unicode__(self):
|
||||
pschema = pprint.pformat(self.schema, width=72)
|
||||
pinstance = pprint.pformat(self.instance, width=72)
|
||||
@@ -147,29 +203,34 @@ def __unicode__(self):
|
||||
|
||||
if PY3:
|
||||
__str__ = __unicode__
|
||||
|
||||
else:
|
||||
def __str__(self):
|
||||
return unicode(self).encode("utf-8")
|
||||
|
||||
|
||||
class FormatError(Exception):
|
||||
"""
|
||||
Validating a format failed.
|
||||
"""
|
||||
|
||||
def __init__(self, message, cause=None):
|
||||
super(FormatError, self).__init__(message, cause)
|
||||
self.message = message
|
||||
self.cause = self.__cause__ = cause
|
||||
|
||||
def __str__(self):
|
||||
return self.message.encode("utf-8")
|
||||
|
||||
def __unicode__(self):
|
||||
return self.message
|
||||
|
||||
if PY3:
|
||||
__str__ = __unicode__
|
||||
else:
|
||||
def __str__(self):
|
||||
return self.message.encode("utf-8")
|
||||
|
||||
|
||||
class ErrorTree(object):
|
||||
"""
|
||||
ErrorTrees make it easier to check which validations failed.
|
||||
|
||||
"""
|
||||
|
||||
_instance = _unset
|
||||
@@ -184,12 +245,11 @@ def __init__(self, errors=()):
|
||||
container = container[element]
|
||||
container.errors[error.validator] = error
|
||||
|
||||
self._instance = error.instance
|
||||
container._instance = error.instance
|
||||
|
||||
def __contains__(self, index):
|
||||
"""
|
||||
Check whether ``instance[index]`` has any errors.
|
||||
|
||||
"""
|
||||
|
||||
return index in self._contents
|
||||
@@ -201,8 +261,7 @@ def __getitem__(self, index):
|
||||
If the index is not in the instance that this tree corresponds to and
|
||||
is not known by this tree, whatever error would be raised by
|
||||
``instance.__getitem__`` will be propagated (usually this is some
|
||||
subclass of :class:`LookupError`.
|
||||
|
||||
subclass of `exceptions.LookupError`.
|
||||
"""
|
||||
|
||||
if self._instance is not _unset and index not in self:
|
||||
@@ -210,22 +269,22 @@ def __getitem__(self, index):
|
||||
return self._contents[index]
|
||||
|
||||
def __setitem__(self, index, value):
|
||||
"""
|
||||
Add an error to the tree at the given ``index``.
|
||||
"""
|
||||
self._contents[index] = value
|
||||
|
||||
def __iter__(self):
|
||||
"""
|
||||
Iterate (non-recursively) over the indices in the instance with errors.
|
||||
|
||||
"""
|
||||
|
||||
return iter(self._contents)
|
||||
|
||||
def __len__(self):
|
||||
"""
|
||||
Same as :attr:`total_errors`.
|
||||
|
||||
Return the `total_errors`.
|
||||
"""
|
||||
|
||||
return self.total_errors
|
||||
|
||||
def __repr__(self):
|
||||
@@ -235,7 +294,6 @@ def __repr__(self):
|
||||
def total_errors(self):
|
||||
"""
|
||||
The total number of errors in the entire tree, including children.
|
||||
|
||||
"""
|
||||
|
||||
child_errors = sum(len(tree) for _, tree in iteritems(self._contents))
|
||||
@@ -243,6 +301,21 @@ def total_errors(self):
|
||||
|
||||
|
||||
def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES):
|
||||
"""
|
||||
Create a key function that can be used to sort errors by relevance.
|
||||
|
||||
Arguments:
|
||||
weak (set):
|
||||
a collection of validator names to consider to be "weak".
|
||||
If there are two errors at the same level of the instance
|
||||
and one is in the set of weak validator names, the other
|
||||
error will take priority. By default, :validator:`anyOf` and
|
||||
:validator:`oneOf` are considered weak validators and will
|
||||
be superseded by other same-level validation errors.
|
||||
|
||||
strong (set):
|
||||
a collection of validator names to consider to be "strong"
|
||||
"""
|
||||
def relevance(error):
|
||||
validator = error.validator
|
||||
return -len(error.path), validator not in weak, validator in strong
|
||||
@@ -253,6 +326,43 @@ def relevance(error):
|
||||
|
||||
|
||||
def best_match(errors, key=relevance):
|
||||
"""
|
||||
Try to find an error that appears to be the best match among given errors.
|
||||
|
||||
In general, errors that are higher up in the instance (i.e. for which
|
||||
`ValidationError.path` is shorter) are considered better matches,
|
||||
since they indicate "more" is wrong with the instance.
|
||||
|
||||
If the resulting match is either :validator:`oneOf` or :validator:`anyOf`,
|
||||
the *opposite* assumption is made -- i.e. the deepest error is picked,
|
||||
since these validators only need to match once, and any other errors may
|
||||
not be relevant.
|
||||
|
||||
Arguments:
|
||||
errors (collections.Iterable):
|
||||
|
||||
the errors to select from. Do not provide a mixture of
|
||||
errors from different validation attempts (i.e. from
|
||||
different instances or schemas), since it won't produce
|
||||
sensical output.
|
||||
|
||||
key (collections.Callable):
|
||||
|
||||
the key to use when sorting errors. See `relevance` and
|
||||
transitively `by_relevance` for more details (the default is
|
||||
to sort with the defaults of that function). Changing the
|
||||
default is only useful if you want to change the function
|
||||
that rates errors but still want the error context descent
|
||||
done by this function.
|
||||
|
||||
Returns:
|
||||
the best matching error, or ``None`` if the iterable was empty
|
||||
|
||||
.. note::
|
||||
|
||||
This function is a heuristic. Its return value may change for a given
|
||||
set of inputs from version to version if better heuristics are added.
|
||||
"""
|
||||
errors = iter(errors)
|
||||
best = next(errors, None)
|
||||
if best is None:
|
||||
|
||||
@@ -80,9 +80,7 @@
|
||||
"type": "number"
|
||||
},
|
||||
"enum": {
|
||||
"minItems": 1,
|
||||
"type": "array",
|
||||
"uniqueItems": true
|
||||
"type": "array"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"default": false,
|
||||
|
||||
@@ -111,9 +111,7 @@
|
||||
"type": "string"
|
||||
},
|
||||
"enum": {
|
||||
"minItems": 1,
|
||||
"type": "array",
|
||||
"uniqueItems": true
|
||||
"type": "array"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"default": false,
|
||||
@@ -123,6 +121,9 @@
|
||||
"default": false,
|
||||
"type": "boolean"
|
||||
},
|
||||
"format": {
|
||||
"type": "string"
|
||||
},
|
||||
"id": {
|
||||
"format": "uri",
|
||||
"type": "string"
|
||||
|
||||
153
lib/spack/external/jsonschema/schemas/draft6.json
vendored
Normal file
153
lib/spack/external/jsonschema/schemas/draft6.json
vendored
Normal file
@@ -0,0 +1,153 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-06/schema#",
|
||||
"$id": "http://json-schema.org/draft-06/schema#",
|
||||
"title": "Core schema meta-schema",
|
||||
"definitions": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$ref": "#" }
|
||||
},
|
||||
"nonNegativeInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"nonNegativeIntegerDefault0": {
|
||||
"allOf": [
|
||||
{ "$ref": "#/definitions/nonNegativeInteger" },
|
||||
{ "default": 0 }
|
||||
]
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [
|
||||
"array",
|
||||
"boolean",
|
||||
"integer",
|
||||
"null",
|
||||
"number",
|
||||
"object",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"uniqueItems": true,
|
||||
"default": []
|
||||
}
|
||||
},
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"$id": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"$ref": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": {},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": {}
|
||||
},
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"exclusiveMinimum": 0
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"additionalItems": { "$ref": "#" },
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/schemaArray" }
|
||||
],
|
||||
"default": {}
|
||||
},
|
||||
"maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"contains": { "$ref": "#" },
|
||||
"maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"required": { "$ref": "#/definitions/stringArray" },
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"definitions": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"propertyNames": { "format": "regex" },
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/stringArray" }
|
||||
]
|
||||
}
|
||||
},
|
||||
"propertyNames": { "$ref": "#" },
|
||||
"const": {},
|
||||
"enum": {
|
||||
"type": "array"
|
||||
},
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/definitions/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/definitions/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"format": { "type": "string" },
|
||||
"allOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"anyOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"oneOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"not": { "$ref": "#" }
|
||||
},
|
||||
"default": {}
|
||||
}
|
||||
166
lib/spack/external/jsonschema/schemas/draft7.json
vendored
Normal file
166
lib/spack/external/jsonschema/schemas/draft7.json
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "http://json-schema.org/draft-07/schema#",
|
||||
"title": "Core schema meta-schema",
|
||||
"definitions": {
|
||||
"schemaArray": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$ref": "#" }
|
||||
},
|
||||
"nonNegativeInteger": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"nonNegativeIntegerDefault0": {
|
||||
"allOf": [
|
||||
{ "$ref": "#/definitions/nonNegativeInteger" },
|
||||
{ "default": 0 }
|
||||
]
|
||||
},
|
||||
"simpleTypes": {
|
||||
"enum": [
|
||||
"array",
|
||||
"boolean",
|
||||
"integer",
|
||||
"null",
|
||||
"number",
|
||||
"object",
|
||||
"string"
|
||||
]
|
||||
},
|
||||
"stringArray": {
|
||||
"type": "array",
|
||||
"items": { "type": "string" },
|
||||
"uniqueItems": true,
|
||||
"default": []
|
||||
}
|
||||
},
|
||||
"type": ["object", "boolean"],
|
||||
"properties": {
|
||||
"$id": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$schema": {
|
||||
"type": "string",
|
||||
"format": "uri"
|
||||
},
|
||||
"$ref": {
|
||||
"type": "string",
|
||||
"format": "uri-reference"
|
||||
},
|
||||
"$comment": {
|
||||
"type": "string"
|
||||
},
|
||||
"title": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"default": true,
|
||||
"readOnly": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"examples": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
},
|
||||
"multipleOf": {
|
||||
"type": "number",
|
||||
"exclusiveMinimum": 0
|
||||
},
|
||||
"maximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMaximum": {
|
||||
"type": "number"
|
||||
},
|
||||
"minimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"exclusiveMinimum": {
|
||||
"type": "number"
|
||||
},
|
||||
"maxLength": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"pattern": {
|
||||
"type": "string",
|
||||
"format": "regex"
|
||||
},
|
||||
"additionalItems": { "$ref": "#" },
|
||||
"items": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/schemaArray" }
|
||||
],
|
||||
"default": true
|
||||
},
|
||||
"maxItems": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"uniqueItems": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"contains": { "$ref": "#" },
|
||||
"maxProperties": { "$ref": "#/definitions/nonNegativeInteger" },
|
||||
"minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" },
|
||||
"required": { "$ref": "#/definitions/stringArray" },
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"definitions": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"properties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"default": {}
|
||||
},
|
||||
"patternProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "$ref": "#" },
|
||||
"propertyNames": { "format": "regex" },
|
||||
"default": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#" },
|
||||
{ "$ref": "#/definitions/stringArray" }
|
||||
]
|
||||
}
|
||||
},
|
||||
"propertyNames": { "$ref": "#" },
|
||||
"const": true,
|
||||
"enum": {
|
||||
"type": "array",
|
||||
"items": true
|
||||
},
|
||||
"type": {
|
||||
"anyOf": [
|
||||
{ "$ref": "#/definitions/simpleTypes" },
|
||||
{
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/definitions/simpleTypes" },
|
||||
"minItems": 1,
|
||||
"uniqueItems": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"format": { "type": "string" },
|
||||
"contentMediaType": { "type": "string" },
|
||||
"contentEncoding": { "type": "string" },
|
||||
"if": {"$ref": "#"},
|
||||
"then": {"$ref": "#"},
|
||||
"else": {"$ref": "#"},
|
||||
"allOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"anyOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"oneOf": { "$ref": "#/definitions/schemaArray" },
|
||||
"not": { "$ref": "#" }
|
||||
},
|
||||
"default": true
|
||||
}
|
||||
15
lib/spack/external/jsonschema/tests/compat.py
vendored
15
lib/spack/external/jsonschema/tests/compat.py
vendored
@@ -1,15 +0,0 @@
|
||||
import sys
|
||||
|
||||
|
||||
if sys.version_info[:2] < (2, 7): # pragma: no cover
|
||||
import unittest2 as unittest
|
||||
else:
|
||||
import unittest
|
||||
|
||||
try:
|
||||
from unittest import mock
|
||||
except ImportError:
|
||||
import mock
|
||||
|
||||
|
||||
# flake8: noqa
|
||||
110
lib/spack/external/jsonschema/tests/test_cli.py
vendored
110
lib/spack/external/jsonschema/tests/test_cli.py
vendored
@@ -1,110 +0,0 @@
|
||||
from jsonschema import Draft4Validator, ValidationError, cli
|
||||
from jsonschema.compat import StringIO
|
||||
from jsonschema.tests.compat import mock, unittest
|
||||
|
||||
|
||||
def fake_validator(*errors):
|
||||
errors = list(reversed(errors))
|
||||
|
||||
class FakeValidator(object):
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def iter_errors(self, instance):
|
||||
if errors:
|
||||
return errors.pop()
|
||||
return []
|
||||
return FakeValidator
|
||||
|
||||
|
||||
class TestParser(unittest.TestCase):
|
||||
FakeValidator = fake_validator()
|
||||
|
||||
def setUp(self):
|
||||
mock_open = mock.mock_open()
|
||||
patch_open = mock.patch.object(cli, "open", mock_open, create=True)
|
||||
patch_open.start()
|
||||
self.addCleanup(patch_open.stop)
|
||||
|
||||
mock_json_load = mock.Mock()
|
||||
mock_json_load.return_value = {}
|
||||
patch_json_load = mock.patch("json.load")
|
||||
patch_json_load.start()
|
||||
self.addCleanup(patch_json_load.stop)
|
||||
|
||||
def test_find_validator_by_fully_qualified_object_name(self):
|
||||
arguments = cli.parse_args(
|
||||
[
|
||||
"--validator",
|
||||
"jsonschema.tests.test_cli.TestParser.FakeValidator",
|
||||
"--instance", "foo.json",
|
||||
"schema.json",
|
||||
]
|
||||
)
|
||||
self.assertIs(arguments["validator"], self.FakeValidator)
|
||||
|
||||
def test_find_validator_in_jsonschema(self):
|
||||
arguments = cli.parse_args(
|
||||
[
|
||||
"--validator", "Draft4Validator",
|
||||
"--instance", "foo.json",
|
||||
"schema.json",
|
||||
]
|
||||
)
|
||||
self.assertIs(arguments["validator"], Draft4Validator)
|
||||
|
||||
|
||||
class TestCLI(unittest.TestCase):
|
||||
def test_successful_validation(self):
|
||||
stdout, stderr = StringIO(), StringIO()
|
||||
exit_code = cli.run(
|
||||
{
|
||||
"validator": fake_validator(),
|
||||
"schema": {},
|
||||
"instances": [1],
|
||||
"error_format": "{error.message}",
|
||||
},
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
)
|
||||
self.assertFalse(stdout.getvalue())
|
||||
self.assertFalse(stderr.getvalue())
|
||||
self.assertEqual(exit_code, 0)
|
||||
|
||||
def test_unsuccessful_validation(self):
|
||||
error = ValidationError("I am an error!", instance=1)
|
||||
stdout, stderr = StringIO(), StringIO()
|
||||
exit_code = cli.run(
|
||||
{
|
||||
"validator": fake_validator([error]),
|
||||
"schema": {},
|
||||
"instances": [1],
|
||||
"error_format": "{error.instance} - {error.message}",
|
||||
},
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
)
|
||||
self.assertFalse(stdout.getvalue())
|
||||
self.assertEqual(stderr.getvalue(), "1 - I am an error!")
|
||||
self.assertEqual(exit_code, 1)
|
||||
|
||||
def test_unsuccessful_validation_multiple_instances(self):
|
||||
first_errors = [
|
||||
ValidationError("9", instance=1),
|
||||
ValidationError("8", instance=1),
|
||||
]
|
||||
second_errors = [ValidationError("7", instance=2)]
|
||||
stdout, stderr = StringIO(), StringIO()
|
||||
exit_code = cli.run(
|
||||
{
|
||||
"validator": fake_validator(first_errors, second_errors),
|
||||
"schema": {},
|
||||
"instances": [1, 2],
|
||||
"error_format": "{error.instance} - {error.message}\t",
|
||||
},
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
)
|
||||
self.assertFalse(stdout.getvalue())
|
||||
self.assertEqual(stderr.getvalue(), "1 - 9\t1 - 8\t2 - 7\t")
|
||||
self.assertEqual(exit_code, 1)
|
||||
@@ -1,382 +0,0 @@
|
||||
import textwrap
|
||||
|
||||
from jsonschema import Draft4Validator, exceptions
|
||||
from jsonschema.compat import PY3
|
||||
from jsonschema.tests.compat import mock, unittest
|
||||
|
||||
|
||||
class TestBestMatch(unittest.TestCase):
|
||||
def best_match(self, errors):
|
||||
errors = list(errors)
|
||||
best = exceptions.best_match(errors)
|
||||
reversed_best = exceptions.best_match(reversed(errors))
|
||||
self.assertEqual(
|
||||
best,
|
||||
reversed_best,
|
||||
msg="Didn't return a consistent best match!\n"
|
||||
"Got: {0}\n\nThen: {1}".format(best, reversed_best),
|
||||
)
|
||||
return best
|
||||
|
||||
def test_shallower_errors_are_better_matches(self):
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"properties" : {
|
||||
"foo" : {
|
||||
"minProperties" : 2,
|
||||
"properties" : {"bar" : {"type" : "object"}},
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({"foo" : {"bar" : []}}))
|
||||
self.assertEqual(best.validator, "minProperties")
|
||||
|
||||
def test_oneOf_and_anyOf_are_weak_matches(self):
|
||||
"""
|
||||
A property you *must* match is probably better than one you have to
|
||||
match a part of.
|
||||
|
||||
"""
|
||||
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"minProperties" : 2,
|
||||
"anyOf" : [{"type" : "string"}, {"type" : "number"}],
|
||||
"oneOf" : [{"type" : "string"}, {"type" : "number"}],
|
||||
}
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({}))
|
||||
self.assertEqual(best.validator, "minProperties")
|
||||
|
||||
def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self):
|
||||
"""
|
||||
If the most relevant error is an anyOf, then we traverse its context
|
||||
and select the otherwise *least* relevant error, since in this case
|
||||
that means the most specific, deep, error inside the instance.
|
||||
|
||||
I.e. since only one of the schemas must match, we look for the most
|
||||
relevant one.
|
||||
|
||||
"""
|
||||
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"properties" : {
|
||||
"foo" : {
|
||||
"anyOf" : [
|
||||
{"type" : "string"},
|
||||
{"properties" : {"bar" : {"type" : "array"}}},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
|
||||
self.assertEqual(best.validator_value, "array")
|
||||
|
||||
def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self):
|
||||
"""
|
||||
If the most relevant error is an oneOf, then we traverse its context
|
||||
and select the otherwise *least* relevant error, since in this case
|
||||
that means the most specific, deep, error inside the instance.
|
||||
|
||||
I.e. since only one of the schemas must match, we look for the most
|
||||
relevant one.
|
||||
|
||||
"""
|
||||
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"properties" : {
|
||||
"foo" : {
|
||||
"oneOf" : [
|
||||
{"type" : "string"},
|
||||
{"properties" : {"bar" : {"type" : "array"}}},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
|
||||
self.assertEqual(best.validator_value, "array")
|
||||
|
||||
def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self):
|
||||
"""
|
||||
Now, if the error is allOf, we traverse but select the *most* relevant
|
||||
error from the context, because all schemas here must match anyways.
|
||||
|
||||
"""
|
||||
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"properties" : {
|
||||
"foo" : {
|
||||
"allOf" : [
|
||||
{"type" : "string"},
|
||||
{"properties" : {"bar" : {"type" : "array"}}},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
|
||||
self.assertEqual(best.validator_value, "string")
|
||||
|
||||
def test_nested_context_for_oneOf(self):
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"properties" : {
|
||||
"foo" : {
|
||||
"oneOf" : [
|
||||
{"type" : "string"},
|
||||
{
|
||||
"oneOf" : [
|
||||
{"type" : "string"},
|
||||
{
|
||||
"properties" : {
|
||||
"bar" : {"type" : "array"}
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
|
||||
self.assertEqual(best.validator_value, "array")
|
||||
|
||||
def test_one_error(self):
|
||||
validator = Draft4Validator({"minProperties" : 2})
|
||||
error, = validator.iter_errors({})
|
||||
self.assertEqual(
|
||||
exceptions.best_match(validator.iter_errors({})).validator,
|
||||
"minProperties",
|
||||
)
|
||||
|
||||
def test_no_errors(self):
|
||||
validator = Draft4Validator({})
|
||||
self.assertIsNone(exceptions.best_match(validator.iter_errors({})))
|
||||
|
||||
|
||||
class TestByRelevance(unittest.TestCase):
|
||||
def test_short_paths_are_better_matches(self):
|
||||
shallow = exceptions.ValidationError("Oh no!", path=["baz"])
|
||||
deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"])
|
||||
match = max([shallow, deep], key=exceptions.relevance)
|
||||
self.assertIs(match, shallow)
|
||||
|
||||
match = max([deep, shallow], key=exceptions.relevance)
|
||||
self.assertIs(match, shallow)
|
||||
|
||||
def test_global_errors_are_even_better_matches(self):
|
||||
shallow = exceptions.ValidationError("Oh no!", path=[])
|
||||
deep = exceptions.ValidationError("Oh yes!", path=["foo"])
|
||||
|
||||
errors = sorted([shallow, deep], key=exceptions.relevance)
|
||||
self.assertEqual(
|
||||
[list(error.path) for error in errors],
|
||||
[["foo"], []],
|
||||
)
|
||||
|
||||
errors = sorted([deep, shallow], key=exceptions.relevance)
|
||||
self.assertEqual(
|
||||
[list(error.path) for error in errors],
|
||||
[["foo"], []],
|
||||
)
|
||||
|
||||
def test_weak_validators_are_lower_priority(self):
|
||||
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
|
||||
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
|
||||
|
||||
best_match = exceptions.by_relevance(weak="a")
|
||||
|
||||
match = max([weak, normal], key=best_match)
|
||||
self.assertIs(match, normal)
|
||||
|
||||
match = max([normal, weak], key=best_match)
|
||||
self.assertIs(match, normal)
|
||||
|
||||
def test_strong_validators_are_higher_priority(self):
|
||||
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
|
||||
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
|
||||
strong = exceptions.ValidationError("Oh fine!", path=[], validator="c")
|
||||
|
||||
best_match = exceptions.by_relevance(weak="a", strong="c")
|
||||
|
||||
match = max([weak, normal, strong], key=best_match)
|
||||
self.assertIs(match, strong)
|
||||
|
||||
match = max([strong, normal, weak], key=best_match)
|
||||
self.assertIs(match, strong)
|
||||
|
||||
|
||||
class TestErrorTree(unittest.TestCase):
|
||||
def test_it_knows_how_many_total_errors_it_contains(self):
|
||||
errors = [mock.MagicMock() for _ in range(8)]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertEqual(tree.total_errors, 8)
|
||||
|
||||
def test_it_contains_an_item_if_the_item_had_an_error(self):
|
||||
errors = [exceptions.ValidationError("a message", path=["bar"])]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertIn("bar", tree)
|
||||
|
||||
def test_it_does_not_contain_an_item_if_the_item_had_no_error(self):
|
||||
errors = [exceptions.ValidationError("a message", path=["bar"])]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertNotIn("foo", tree)
|
||||
|
||||
def test_validators_that_failed_appear_in_errors_dict(self):
|
||||
error = exceptions.ValidationError("a message", validator="foo")
|
||||
tree = exceptions.ErrorTree([error])
|
||||
self.assertEqual(tree.errors, {"foo" : error})
|
||||
|
||||
def test_it_creates_a_child_tree_for_each_nested_path(self):
|
||||
errors = [
|
||||
exceptions.ValidationError("a bar message", path=["bar"]),
|
||||
exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]),
|
||||
]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertIn(0, tree["bar"])
|
||||
self.assertNotIn(1, tree["bar"])
|
||||
|
||||
def test_children_have_their_errors_dicts_built(self):
|
||||
e1, e2 = (
|
||||
exceptions.ValidationError("1", validator="foo", path=["bar", 0]),
|
||||
exceptions.ValidationError("2", validator="quux", path=["bar", 0]),
|
||||
)
|
||||
tree = exceptions.ErrorTree([e1, e2])
|
||||
self.assertEqual(tree["bar"][0].errors, {"foo" : e1, "quux" : e2})
|
||||
|
||||
def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self):
|
||||
error = exceptions.ValidationError("123", validator="foo", instance=[])
|
||||
tree = exceptions.ErrorTree([error])
|
||||
|
||||
with self.assertRaises(IndexError):
|
||||
tree[0]
|
||||
|
||||
def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self):
|
||||
"""
|
||||
If a validator is dumb (like :validator:`required` in draft 3) and
|
||||
refers to a path that isn't in the instance, the tree still properly
|
||||
returns a subtree for that path.
|
||||
|
||||
"""
|
||||
|
||||
error = exceptions.ValidationError(
|
||||
"a message", validator="foo", instance={}, path=["foo"],
|
||||
)
|
||||
tree = exceptions.ErrorTree([error])
|
||||
self.assertIsInstance(tree["foo"], exceptions.ErrorTree)
|
||||
|
||||
|
||||
class TestErrorReprStr(unittest.TestCase):
|
||||
def make_error(self, **kwargs):
|
||||
defaults = dict(
|
||||
message=u"hello",
|
||||
validator=u"type",
|
||||
validator_value=u"string",
|
||||
instance=5,
|
||||
schema={u"type": u"string"},
|
||||
)
|
||||
defaults.update(kwargs)
|
||||
return exceptions.ValidationError(**defaults)
|
||||
|
||||
def assertShows(self, expected, **kwargs):
|
||||
if PY3:
|
||||
expected = expected.replace("u'", "'")
|
||||
expected = textwrap.dedent(expected).rstrip("\n")
|
||||
|
||||
error = self.make_error(**kwargs)
|
||||
message_line, _, rest = str(error).partition("\n")
|
||||
self.assertEqual(message_line, error.message)
|
||||
self.assertEqual(rest, expected)
|
||||
|
||||
def test_repr(self):
|
||||
self.assertEqual(
|
||||
repr(exceptions.ValidationError(message="Hello!")),
|
||||
"<ValidationError: %r>" % "Hello!",
|
||||
)
|
||||
|
||||
def test_unset_error(self):
|
||||
error = exceptions.ValidationError("message")
|
||||
self.assertEqual(str(error), "message")
|
||||
|
||||
kwargs = {
|
||||
"validator": "type",
|
||||
"validator_value": "string",
|
||||
"instance": 5,
|
||||
"schema": {"type": "string"}
|
||||
}
|
||||
# Just the message should show if any of the attributes are unset
|
||||
for attr in kwargs:
|
||||
k = dict(kwargs)
|
||||
del k[attr]
|
||||
error = exceptions.ValidationError("message", **k)
|
||||
self.assertEqual(str(error), "message")
|
||||
|
||||
def test_empty_paths(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating u'type' in schema:
|
||||
{u'type': u'string'}
|
||||
|
||||
On instance:
|
||||
5
|
||||
""",
|
||||
path=[],
|
||||
schema_path=[],
|
||||
)
|
||||
|
||||
def test_one_item_paths(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating u'type' in schema:
|
||||
{u'type': u'string'}
|
||||
|
||||
On instance[0]:
|
||||
5
|
||||
""",
|
||||
path=[0],
|
||||
schema_path=["items"],
|
||||
)
|
||||
|
||||
def test_multiple_item_paths(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating u'type' in schema[u'items'][0]:
|
||||
{u'type': u'string'}
|
||||
|
||||
On instance[0][u'a']:
|
||||
5
|
||||
""",
|
||||
path=[0, u"a"],
|
||||
schema_path=[u"items", 0, 1],
|
||||
)
|
||||
|
||||
def test_uses_pprint(self):
|
||||
with mock.patch("pprint.pformat") as pformat:
|
||||
str(self.make_error())
|
||||
self.assertEqual(pformat.call_count, 2) # schema + instance
|
||||
|
||||
def test_str_works_with_instances_having_overriden_eq_operator(self):
|
||||
"""
|
||||
Check for https://github.com/Julian/jsonschema/issues/164 which
|
||||
rendered exceptions unusable when a `ValidationError` involved
|
||||
instances with an `__eq__` method that returned truthy values.
|
||||
|
||||
"""
|
||||
|
||||
instance = mock.MagicMock()
|
||||
error = exceptions.ValidationError(
|
||||
"a message",
|
||||
validator="foo",
|
||||
instance=instance,
|
||||
validator_value="some",
|
||||
schema="schema",
|
||||
)
|
||||
str(error)
|
||||
self.assertFalse(instance.__eq__.called)
|
||||
@@ -1,63 +0,0 @@
|
||||
"""
|
||||
Tests for the parts of jsonschema related to the :validator:`format` property.
|
||||
|
||||
"""
|
||||
|
||||
from jsonschema.tests.compat import mock, unittest
|
||||
|
||||
from jsonschema import FormatError, ValidationError, FormatChecker
|
||||
from jsonschema.validators import Draft4Validator
|
||||
|
||||
|
||||
class TestFormatChecker(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.fn = mock.Mock()
|
||||
|
||||
def test_it_can_validate_no_formats(self):
|
||||
checker = FormatChecker(formats=())
|
||||
self.assertFalse(checker.checkers)
|
||||
|
||||
def test_it_raises_a_key_error_for_unknown_formats(self):
|
||||
with self.assertRaises(KeyError):
|
||||
FormatChecker(formats=["o noes"])
|
||||
|
||||
def test_it_can_register_cls_checkers(self):
|
||||
with mock.patch.dict(FormatChecker.checkers, clear=True):
|
||||
FormatChecker.cls_checks("new")(self.fn)
|
||||
self.assertEqual(FormatChecker.checkers, {"new" : (self.fn, ())})
|
||||
|
||||
def test_it_can_register_checkers(self):
|
||||
checker = FormatChecker()
|
||||
checker.checks("new")(self.fn)
|
||||
self.assertEqual(
|
||||
checker.checkers,
|
||||
dict(FormatChecker.checkers, new=(self.fn, ()))
|
||||
)
|
||||
|
||||
def test_it_catches_registered_errors(self):
|
||||
checker = FormatChecker()
|
||||
cause = self.fn.side_effect = ValueError()
|
||||
|
||||
checker.checks("foo", raises=ValueError)(self.fn)
|
||||
|
||||
with self.assertRaises(FormatError) as cm:
|
||||
checker.check("bar", "foo")
|
||||
|
||||
self.assertIs(cm.exception.cause, cause)
|
||||
self.assertIs(cm.exception.__cause__, cause)
|
||||
|
||||
# Unregistered errors should not be caught
|
||||
self.fn.side_effect = AttributeError
|
||||
with self.assertRaises(AttributeError):
|
||||
checker.check("bar", "foo")
|
||||
|
||||
def test_format_error_causes_become_validation_error_causes(self):
|
||||
checker = FormatChecker()
|
||||
checker.checks("foo", raises=ValueError)(self.fn)
|
||||
cause = self.fn.side_effect = ValueError()
|
||||
validator = Draft4Validator({"format" : "foo"}, format_checker=checker)
|
||||
|
||||
with self.assertRaises(ValidationError) as cm:
|
||||
validator.validate("bar")
|
||||
|
||||
self.assertIs(cm.exception.__cause__, cause)
|
||||
@@ -1,290 +0,0 @@
|
||||
"""
|
||||
Test runner for the JSON Schema official test suite
|
||||
|
||||
Tests comprehensive correctness of each draft's validator.
|
||||
|
||||
See https://github.com/json-schema/JSON-Schema-Test-Suite for details.
|
||||
|
||||
"""
|
||||
|
||||
from contextlib import closing
|
||||
from decimal import Decimal
|
||||
import glob
|
||||
import json
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
try:
|
||||
from sys import pypy_version_info
|
||||
except ImportError:
|
||||
pypy_version_info = None
|
||||
|
||||
from jsonschema import (
|
||||
FormatError, SchemaError, ValidationError, Draft3Validator,
|
||||
Draft4Validator, FormatChecker, draft3_format_checker,
|
||||
draft4_format_checker, validate,
|
||||
)
|
||||
from jsonschema.compat import PY3
|
||||
from jsonschema.tests.compat import mock, unittest
|
||||
import jsonschema
|
||||
|
||||
|
||||
REPO_ROOT = os.path.join(os.path.dirname(jsonschema.__file__), os.path.pardir)
|
||||
SUITE = os.getenv("JSON_SCHEMA_TEST_SUITE", os.path.join(REPO_ROOT, "json"))
|
||||
|
||||
if not os.path.isdir(SUITE):
|
||||
raise ValueError(
|
||||
"Can't find the JSON-Schema-Test-Suite directory. Set the "
|
||||
"'JSON_SCHEMA_TEST_SUITE' environment variable or run the tests from "
|
||||
"alongside a checkout of the suite."
|
||||
)
|
||||
|
||||
TESTS_DIR = os.path.join(SUITE, "tests")
|
||||
JSONSCHEMA_SUITE = os.path.join(SUITE, "bin", "jsonschema_suite")
|
||||
|
||||
remotes_stdout = subprocess.Popen(
|
||||
["python", JSONSCHEMA_SUITE, "remotes"], stdout=subprocess.PIPE,
|
||||
).stdout
|
||||
|
||||
with closing(remotes_stdout):
|
||||
if PY3:
|
||||
remotes_stdout = io.TextIOWrapper(remotes_stdout)
|
||||
REMOTES = json.load(remotes_stdout)
|
||||
|
||||
|
||||
def make_case(schema, data, valid, name):
|
||||
if valid:
|
||||
def test_case(self):
|
||||
kwargs = getattr(self, "validator_kwargs", {})
|
||||
validate(data, schema, cls=self.validator_class, **kwargs)
|
||||
else:
|
||||
def test_case(self):
|
||||
kwargs = getattr(self, "validator_kwargs", {})
|
||||
with self.assertRaises(ValidationError):
|
||||
validate(data, schema, cls=self.validator_class, **kwargs)
|
||||
|
||||
if not PY3:
|
||||
name = name.encode("utf-8")
|
||||
test_case.__name__ = name
|
||||
|
||||
return test_case
|
||||
|
||||
|
||||
def maybe_skip(skip, test_case, case, test):
|
||||
if skip is not None:
|
||||
reason = skip(case, test)
|
||||
if reason is not None:
|
||||
test_case = unittest.skip(reason)(test_case)
|
||||
return test_case
|
||||
|
||||
|
||||
def load_json_cases(tests_glob, ignore_glob="", basedir=TESTS_DIR, skip=None):
|
||||
if ignore_glob:
|
||||
ignore_glob = os.path.join(basedir, ignore_glob)
|
||||
|
||||
def add_test_methods(test_class):
|
||||
ignored = set(glob.iglob(ignore_glob))
|
||||
|
||||
for filename in glob.iglob(os.path.join(basedir, tests_glob)):
|
||||
if filename in ignored:
|
||||
continue
|
||||
|
||||
validating, _ = os.path.splitext(os.path.basename(filename))
|
||||
id = itertools.count(1)
|
||||
|
||||
with open(filename) as test_file:
|
||||
for case in json.load(test_file):
|
||||
for test in case["tests"]:
|
||||
name = "test_%s_%s_%s" % (
|
||||
validating,
|
||||
next(id),
|
||||
re.sub(r"[\W ]+", "_", test["description"]),
|
||||
)
|
||||
assert not hasattr(test_class, name), name
|
||||
|
||||
test_case = make_case(
|
||||
data=test["data"],
|
||||
schema=case["schema"],
|
||||
valid=test["valid"],
|
||||
name=name,
|
||||
)
|
||||
test_case = maybe_skip(skip, test_case, case, test)
|
||||
setattr(test_class, name, test_case)
|
||||
|
||||
return test_class
|
||||
return add_test_methods
|
||||
|
||||
|
||||
class TypesMixin(object):
|
||||
@unittest.skipIf(PY3, "In Python 3 json.load always produces unicode")
|
||||
def test_string_a_bytestring_is_a_string(self):
|
||||
self.validator_class({"type" : "string"}).validate(b"foo")
|
||||
|
||||
|
||||
class DecimalMixin(object):
|
||||
def test_it_can_validate_with_decimals(self):
|
||||
schema = {"type" : "number"}
|
||||
validator = self.validator_class(
|
||||
schema, types={"number" : (int, float, Decimal)}
|
||||
)
|
||||
|
||||
for valid in [1, 1.1, Decimal(1) / Decimal(8)]:
|
||||
validator.validate(valid)
|
||||
|
||||
for invalid in ["foo", {}, [], True, None]:
|
||||
with self.assertRaises(ValidationError):
|
||||
validator.validate(invalid)
|
||||
|
||||
|
||||
def missing_format(checker):
|
||||
def missing_format(case, test):
|
||||
format = case["schema"].get("format")
|
||||
if format not in checker.checkers:
|
||||
return "Format checker {0!r} not found.".format(format)
|
||||
elif (
|
||||
format == "date-time" and
|
||||
pypy_version_info is not None and
|
||||
pypy_version_info[:2] <= (1, 9)
|
||||
):
|
||||
# datetime.datetime is overzealous about typechecking in <=1.9
|
||||
return "datetime.datetime is broken on this version of PyPy."
|
||||
return missing_format
|
||||
|
||||
|
||||
class FormatMixin(object):
|
||||
def test_it_returns_true_for_formats_it_does_not_know_about(self):
|
||||
validator = self.validator_class(
|
||||
{"format" : "carrot"}, format_checker=FormatChecker(),
|
||||
)
|
||||
validator.validate("bugs")
|
||||
|
||||
def test_it_does_not_validate_formats_by_default(self):
|
||||
validator = self.validator_class({})
|
||||
self.assertIsNone(validator.format_checker)
|
||||
|
||||
def test_it_validates_formats_if_a_checker_is_provided(self):
|
||||
checker = mock.Mock(spec=FormatChecker)
|
||||
validator = self.validator_class(
|
||||
{"format" : "foo"}, format_checker=checker,
|
||||
)
|
||||
|
||||
validator.validate("bar")
|
||||
|
||||
checker.check.assert_called_once_with("bar", "foo")
|
||||
|
||||
cause = ValueError()
|
||||
checker.check.side_effect = FormatError('aoeu', cause=cause)
|
||||
|
||||
with self.assertRaises(ValidationError) as cm:
|
||||
validator.validate("bar")
|
||||
# Make sure original cause is attached
|
||||
self.assertIs(cm.exception.cause, cause)
|
||||
|
||||
def test_it_validates_formats_of_any_type(self):
|
||||
checker = mock.Mock(spec=FormatChecker)
|
||||
validator = self.validator_class(
|
||||
{"format" : "foo"}, format_checker=checker,
|
||||
)
|
||||
|
||||
validator.validate([1, 2, 3])
|
||||
|
||||
checker.check.assert_called_once_with([1, 2, 3], "foo")
|
||||
|
||||
cause = ValueError()
|
||||
checker.check.side_effect = FormatError('aoeu', cause=cause)
|
||||
|
||||
with self.assertRaises(ValidationError) as cm:
|
||||
validator.validate([1, 2, 3])
|
||||
# Make sure original cause is attached
|
||||
self.assertIs(cm.exception.cause, cause)
|
||||
|
||||
|
||||
if sys.maxunicode == 2 ** 16 - 1: # This is a narrow build.
|
||||
def narrow_unicode_build(case, test):
|
||||
if "supplementary Unicode" in test["description"]:
|
||||
return "Not running surrogate Unicode case, this Python is narrow."
|
||||
else:
|
||||
def narrow_unicode_build(case, test): # This isn't, skip nothing.
|
||||
return
|
||||
|
||||
|
||||
@load_json_cases(
|
||||
"draft3/*.json",
|
||||
skip=narrow_unicode_build,
|
||||
ignore_glob="draft3/refRemote.json",
|
||||
)
|
||||
@load_json_cases(
|
||||
"draft3/optional/format.json", skip=missing_format(draft3_format_checker)
|
||||
)
|
||||
@load_json_cases("draft3/optional/bignum.json")
|
||||
@load_json_cases("draft3/optional/zeroTerminatedFloats.json")
|
||||
class TestDraft3(unittest.TestCase, TypesMixin, DecimalMixin, FormatMixin):
|
||||
validator_class = Draft3Validator
|
||||
validator_kwargs = {"format_checker" : draft3_format_checker}
|
||||
|
||||
def test_any_type_is_valid_for_type_any(self):
|
||||
validator = self.validator_class({"type" : "any"})
|
||||
validator.validate(mock.Mock())
|
||||
|
||||
# TODO: we're in need of more meta schema tests
|
||||
def test_invalid_properties(self):
|
||||
with self.assertRaises(SchemaError):
|
||||
validate({}, {"properties": {"test": True}},
|
||||
cls=self.validator_class)
|
||||
|
||||
def test_minItems_invalid_string(self):
|
||||
with self.assertRaises(SchemaError):
|
||||
# needs to be an integer
|
||||
validate([1], {"minItems" : "1"}, cls=self.validator_class)
|
||||
|
||||
|
||||
@load_json_cases(
|
||||
"draft4/*.json",
|
||||
skip=narrow_unicode_build,
|
||||
ignore_glob="draft4/refRemote.json",
|
||||
)
|
||||
@load_json_cases(
|
||||
"draft4/optional/format.json", skip=missing_format(draft4_format_checker)
|
||||
)
|
||||
@load_json_cases("draft4/optional/bignum.json")
|
||||
@load_json_cases("draft4/optional/zeroTerminatedFloats.json")
|
||||
class TestDraft4(unittest.TestCase, TypesMixin, DecimalMixin, FormatMixin):
|
||||
validator_class = Draft4Validator
|
||||
validator_kwargs = {"format_checker" : draft4_format_checker}
|
||||
|
||||
# TODO: we're in need of more meta schema tests
|
||||
def test_invalid_properties(self):
|
||||
with self.assertRaises(SchemaError):
|
||||
validate({}, {"properties": {"test": True}},
|
||||
cls=self.validator_class)
|
||||
|
||||
def test_minItems_invalid_string(self):
|
||||
with self.assertRaises(SchemaError):
|
||||
# needs to be an integer
|
||||
validate([1], {"minItems" : "1"}, cls=self.validator_class)
|
||||
|
||||
|
||||
class RemoteRefResolutionMixin(object):
|
||||
def setUp(self):
|
||||
patch = mock.patch("jsonschema.validators.requests")
|
||||
requests = patch.start()
|
||||
requests.get.side_effect = self.resolve
|
||||
self.addCleanup(patch.stop)
|
||||
|
||||
def resolve(self, reference):
|
||||
_, _, reference = reference.partition("http://localhost:1234/")
|
||||
return mock.Mock(**{"json.return_value" : REMOTES.get(reference)})
|
||||
|
||||
|
||||
@load_json_cases("draft3/refRemote.json")
|
||||
class Draft3RemoteResolution(RemoteRefResolutionMixin, unittest.TestCase):
|
||||
validator_class = Draft3Validator
|
||||
|
||||
|
||||
@load_json_cases("draft4/refRemote.json")
|
||||
class Draft4RemoteResolution(RemoteRefResolutionMixin, unittest.TestCase):
|
||||
validator_class = Draft4Validator
|
||||
@@ -1,786 +0,0 @@
|
||||
from collections import deque
|
||||
from contextlib import contextmanager
|
||||
import json
|
||||
|
||||
from jsonschema import FormatChecker, ValidationError
|
||||
from jsonschema.tests.compat import mock, unittest
|
||||
from jsonschema.validators import (
|
||||
RefResolutionError, UnknownType, Draft3Validator,
|
||||
Draft4Validator, RefResolver, create, extend, validator_for, validate,
|
||||
)
|
||||
|
||||
|
||||
class TestCreateAndExtend(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.meta_schema = {u"properties" : {u"smelly" : {}}}
|
||||
self.smelly = mock.MagicMock()
|
||||
self.validators = {u"smelly" : self.smelly}
|
||||
self.types = {u"dict" : dict}
|
||||
self.Validator = create(
|
||||
meta_schema=self.meta_schema,
|
||||
validators=self.validators,
|
||||
default_types=self.types,
|
||||
)
|
||||
|
||||
self.validator_value = 12
|
||||
self.schema = {u"smelly" : self.validator_value}
|
||||
self.validator = self.Validator(self.schema)
|
||||
|
||||
def test_attrs(self):
|
||||
self.assertEqual(self.Validator.VALIDATORS, self.validators)
|
||||
self.assertEqual(self.Validator.META_SCHEMA, self.meta_schema)
|
||||
self.assertEqual(self.Validator.DEFAULT_TYPES, self.types)
|
||||
|
||||
def test_init(self):
|
||||
self.assertEqual(self.validator.schema, self.schema)
|
||||
|
||||
def test_iter_errors(self):
|
||||
instance = "hello"
|
||||
|
||||
self.smelly.return_value = []
|
||||
self.assertEqual(list(self.validator.iter_errors(instance)), [])
|
||||
|
||||
error = mock.Mock()
|
||||
self.smelly.return_value = [error]
|
||||
self.assertEqual(list(self.validator.iter_errors(instance)), [error])
|
||||
|
||||
self.smelly.assert_called_with(
|
||||
self.validator, self.validator_value, instance, self.schema,
|
||||
)
|
||||
|
||||
def test_if_a_version_is_provided_it_is_registered(self):
|
||||
with mock.patch("jsonschema.validators.validates") as validates:
|
||||
validates.side_effect = lambda version : lambda cls : cls
|
||||
Validator = create(meta_schema={u"id" : ""}, version="my version")
|
||||
validates.assert_called_once_with("my version")
|
||||
self.assertEqual(Validator.__name__, "MyVersionValidator")
|
||||
|
||||
def test_if_a_version_is_not_provided_it_is_not_registered(self):
|
||||
with mock.patch("jsonschema.validators.validates") as validates:
|
||||
create(meta_schema={u"id" : "id"})
|
||||
self.assertFalse(validates.called)
|
||||
|
||||
def test_extend(self):
|
||||
validators = dict(self.Validator.VALIDATORS)
|
||||
new = mock.Mock()
|
||||
|
||||
Extended = extend(self.Validator, validators={u"a new one" : new})
|
||||
|
||||
validators.update([(u"a new one", new)])
|
||||
self.assertEqual(Extended.VALIDATORS, validators)
|
||||
self.assertNotIn(u"a new one", self.Validator.VALIDATORS)
|
||||
|
||||
self.assertEqual(Extended.META_SCHEMA, self.Validator.META_SCHEMA)
|
||||
self.assertEqual(Extended.DEFAULT_TYPES, self.Validator.DEFAULT_TYPES)
|
||||
|
||||
|
||||
class TestIterErrors(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.validator = Draft3Validator({})
|
||||
|
||||
def test_iter_errors(self):
|
||||
instance = [1, 2]
|
||||
schema = {
|
||||
u"disallow" : u"array",
|
||||
u"enum" : [["a", "b", "c"], ["d", "e", "f"]],
|
||||
u"minItems" : 3
|
||||
}
|
||||
|
||||
got = (e.message for e in self.validator.iter_errors(instance, schema))
|
||||
expected = [
|
||||
"%r is disallowed for [1, 2]" % (schema["disallow"],),
|
||||
"[1, 2] is too short",
|
||||
"[1, 2] is not one of %r" % (schema["enum"],),
|
||||
]
|
||||
self.assertEqual(sorted(got), sorted(expected))
|
||||
|
||||
def test_iter_errors_multiple_failures_one_validator(self):
|
||||
instance = {"foo" : 2, "bar" : [1], "baz" : 15, "quux" : "spam"}
|
||||
schema = {
|
||||
u"properties" : {
|
||||
"foo" : {u"type" : "string"},
|
||||
"bar" : {u"minItems" : 2},
|
||||
"baz" : {u"maximum" : 10, u"enum" : [2, 4, 6, 8]},
|
||||
}
|
||||
}
|
||||
|
||||
errors = list(self.validator.iter_errors(instance, schema))
|
||||
self.assertEqual(len(errors), 4)
|
||||
|
||||
|
||||
class TestValidationErrorMessages(unittest.TestCase):
|
||||
def message_for(self, instance, schema, *args, **kwargs):
|
||||
kwargs.setdefault("cls", Draft3Validator)
|
||||
with self.assertRaises(ValidationError) as e:
|
||||
validate(instance, schema, *args, **kwargs)
|
||||
return e.exception.message
|
||||
|
||||
def test_single_type_failure(self):
|
||||
message = self.message_for(instance=1, schema={u"type" : u"string"})
|
||||
self.assertEqual(message, "1 is not of type %r" % u"string")
|
||||
|
||||
def test_single_type_list_failure(self):
|
||||
message = self.message_for(instance=1, schema={u"type" : [u"string"]})
|
||||
self.assertEqual(message, "1 is not of type %r" % u"string")
|
||||
|
||||
def test_multiple_type_failure(self):
|
||||
types = u"string", u"object"
|
||||
message = self.message_for(instance=1, schema={u"type" : list(types)})
|
||||
self.assertEqual(message, "1 is not of type %r, %r" % types)
|
||||
|
||||
def test_object_without_title_type_failure(self):
|
||||
type = {u"type" : [{u"minimum" : 3}]}
|
||||
message = self.message_for(instance=1, schema={u"type" : [type]})
|
||||
self.assertEqual(message, "1 is not of type %r" % (type,))
|
||||
|
||||
def test_object_with_name_type_failure(self):
|
||||
name = "Foo"
|
||||
schema = {u"type" : [{u"name" : name, u"minimum" : 3}]}
|
||||
message = self.message_for(instance=1, schema=schema)
|
||||
self.assertEqual(message, "1 is not of type %r" % (name,))
|
||||
|
||||
def test_minimum(self):
|
||||
message = self.message_for(instance=1, schema={"minimum" : 2})
|
||||
self.assertEqual(message, "1 is less than the minimum of 2")
|
||||
|
||||
def test_maximum(self):
|
||||
message = self.message_for(instance=1, schema={"maximum" : 0})
|
||||
self.assertEqual(message, "1 is greater than the maximum of 0")
|
||||
|
||||
def test_dependencies_failure_has_single_element_not_list(self):
|
||||
depend, on = "bar", "foo"
|
||||
schema = {u"dependencies" : {depend : on}}
|
||||
message = self.message_for({"bar" : 2}, schema)
|
||||
self.assertEqual(message, "%r is a dependency of %r" % (on, depend))
|
||||
|
||||
def test_additionalItems_single_failure(self):
|
||||
message = self.message_for(
|
||||
[2], {u"items" : [], u"additionalItems" : False},
|
||||
)
|
||||
self.assertIn("(2 was unexpected)", message)
|
||||
|
||||
def test_additionalItems_multiple_failures(self):
|
||||
message = self.message_for(
|
||||
[1, 2, 3], {u"items" : [], u"additionalItems" : False}
|
||||
)
|
||||
self.assertIn("(1, 2, 3 were unexpected)", message)
|
||||
|
||||
def test_additionalProperties_single_failure(self):
|
||||
additional = "foo"
|
||||
schema = {u"additionalProperties" : False}
|
||||
message = self.message_for({additional : 2}, schema)
|
||||
self.assertIn("(%r was unexpected)" % (additional,), message)
|
||||
|
||||
def test_additionalProperties_multiple_failures(self):
|
||||
schema = {u"additionalProperties" : False}
|
||||
message = self.message_for(dict.fromkeys(["foo", "bar"]), schema)
|
||||
|
||||
self.assertIn(repr("foo"), message)
|
||||
self.assertIn(repr("bar"), message)
|
||||
self.assertIn("were unexpected)", message)
|
||||
|
||||
def test_invalid_format_default_message(self):
|
||||
checker = FormatChecker(formats=())
|
||||
check_fn = mock.Mock(return_value=False)
|
||||
checker.checks(u"thing")(check_fn)
|
||||
|
||||
schema = {u"format" : u"thing"}
|
||||
message = self.message_for("bla", schema, format_checker=checker)
|
||||
|
||||
self.assertIn(repr("bla"), message)
|
||||
self.assertIn(repr("thing"), message)
|
||||
self.assertIn("is not a", message)
|
||||
|
||||
|
||||
class TestValidationErrorDetails(unittest.TestCase):
|
||||
# TODO: These really need unit tests for each individual validator, rather
|
||||
# than just these higher level tests.
|
||||
def test_anyOf(self):
|
||||
instance = 5
|
||||
schema = {
|
||||
"anyOf": [
|
||||
{"minimum": 20},
|
||||
{"type": "string"}
|
||||
]
|
||||
}
|
||||
|
||||
validator = Draft4Validator(schema)
|
||||
errors = list(validator.iter_errors(instance))
|
||||
self.assertEqual(len(errors), 1)
|
||||
e = errors[0]
|
||||
|
||||
self.assertEqual(e.validator, "anyOf")
|
||||
self.assertEqual(e.validator_value, schema["anyOf"])
|
||||
self.assertEqual(e.instance, instance)
|
||||
self.assertEqual(e.schema, schema)
|
||||
self.assertIsNone(e.parent)
|
||||
|
||||
self.assertEqual(e.path, deque([]))
|
||||
self.assertEqual(e.relative_path, deque([]))
|
||||
self.assertEqual(e.absolute_path, deque([]))
|
||||
|
||||
self.assertEqual(e.schema_path, deque(["anyOf"]))
|
||||
self.assertEqual(e.relative_schema_path, deque(["anyOf"]))
|
||||
self.assertEqual(e.absolute_schema_path, deque(["anyOf"]))
|
||||
|
||||
self.assertEqual(len(e.context), 2)
|
||||
|
||||
e1, e2 = sorted_errors(e.context)
|
||||
|
||||
self.assertEqual(e1.validator, "minimum")
|
||||
self.assertEqual(e1.validator_value, schema["anyOf"][0]["minimum"])
|
||||
self.assertEqual(e1.instance, instance)
|
||||
self.assertEqual(e1.schema, schema["anyOf"][0])
|
||||
self.assertIs(e1.parent, e)
|
||||
|
||||
self.assertEqual(e1.path, deque([]))
|
||||
self.assertEqual(e1.absolute_path, deque([]))
|
||||
self.assertEqual(e1.relative_path, deque([]))
|
||||
|
||||
self.assertEqual(e1.schema_path, deque([0, "minimum"]))
|
||||
self.assertEqual(e1.relative_schema_path, deque([0, "minimum"]))
|
||||
self.assertEqual(
|
||||
e1.absolute_schema_path, deque(["anyOf", 0, "minimum"]),
|
||||
)
|
||||
|
||||
self.assertFalse(e1.context)
|
||||
|
||||
self.assertEqual(e2.validator, "type")
|
||||
self.assertEqual(e2.validator_value, schema["anyOf"][1]["type"])
|
||||
self.assertEqual(e2.instance, instance)
|
||||
self.assertEqual(e2.schema, schema["anyOf"][1])
|
||||
self.assertIs(e2.parent, e)
|
||||
|
||||
self.assertEqual(e2.path, deque([]))
|
||||
self.assertEqual(e2.relative_path, deque([]))
|
||||
self.assertEqual(e2.absolute_path, deque([]))
|
||||
|
||||
self.assertEqual(e2.schema_path, deque([1, "type"]))
|
||||
self.assertEqual(e2.relative_schema_path, deque([1, "type"]))
|
||||
self.assertEqual(e2.absolute_schema_path, deque(["anyOf", 1, "type"]))
|
||||
|
||||
self.assertEqual(len(e2.context), 0)
|
||||
|
||||
def test_type(self):
|
||||
instance = {"foo": 1}
|
||||
schema = {
|
||||
"type": [
|
||||
{"type": "integer"},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"foo": {"enum": [2]}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
validator = Draft3Validator(schema)
|
||||
errors = list(validator.iter_errors(instance))
|
||||
self.assertEqual(len(errors), 1)
|
||||
e = errors[0]
|
||||
|
||||
self.assertEqual(e.validator, "type")
|
||||
self.assertEqual(e.validator_value, schema["type"])
|
||||
self.assertEqual(e.instance, instance)
|
||||
self.assertEqual(e.schema, schema)
|
||||
self.assertIsNone(e.parent)
|
||||
|
||||
self.assertEqual(e.path, deque([]))
|
||||
self.assertEqual(e.relative_path, deque([]))
|
||||
self.assertEqual(e.absolute_path, deque([]))
|
||||
|
||||
self.assertEqual(e.schema_path, deque(["type"]))
|
||||
self.assertEqual(e.relative_schema_path, deque(["type"]))
|
||||
self.assertEqual(e.absolute_schema_path, deque(["type"]))
|
||||
|
||||
self.assertEqual(len(e.context), 2)
|
||||
|
||||
e1, e2 = sorted_errors(e.context)
|
||||
|
||||
self.assertEqual(e1.validator, "type")
|
||||
self.assertEqual(e1.validator_value, schema["type"][0]["type"])
|
||||
self.assertEqual(e1.instance, instance)
|
||||
self.assertEqual(e1.schema, schema["type"][0])
|
||||
self.assertIs(e1.parent, e)
|
||||
|
||||
self.assertEqual(e1.path, deque([]))
|
||||
self.assertEqual(e1.relative_path, deque([]))
|
||||
self.assertEqual(e1.absolute_path, deque([]))
|
||||
|
||||
self.assertEqual(e1.schema_path, deque([0, "type"]))
|
||||
self.assertEqual(e1.relative_schema_path, deque([0, "type"]))
|
||||
self.assertEqual(e1.absolute_schema_path, deque(["type", 0, "type"]))
|
||||
|
||||
self.assertFalse(e1.context)
|
||||
|
||||
self.assertEqual(e2.validator, "enum")
|
||||
self.assertEqual(e2.validator_value, [2])
|
||||
self.assertEqual(e2.instance, 1)
|
||||
self.assertEqual(e2.schema, {u"enum" : [2]})
|
||||
self.assertIs(e2.parent, e)
|
||||
|
||||
self.assertEqual(e2.path, deque(["foo"]))
|
||||
self.assertEqual(e2.relative_path, deque(["foo"]))
|
||||
self.assertEqual(e2.absolute_path, deque(["foo"]))
|
||||
|
||||
self.assertEqual(
|
||||
e2.schema_path, deque([1, "properties", "foo", "enum"]),
|
||||
)
|
||||
self.assertEqual(
|
||||
e2.relative_schema_path, deque([1, "properties", "foo", "enum"]),
|
||||
)
|
||||
self.assertEqual(
|
||||
e2.absolute_schema_path,
|
||||
deque(["type", 1, "properties", "foo", "enum"]),
|
||||
)
|
||||
|
||||
self.assertFalse(e2.context)
|
||||
|
||||
def test_single_nesting(self):
|
||||
instance = {"foo" : 2, "bar" : [1], "baz" : 15, "quux" : "spam"}
|
||||
schema = {
|
||||
"properties" : {
|
||||
"foo" : {"type" : "string"},
|
||||
"bar" : {"minItems" : 2},
|
||||
"baz" : {"maximum" : 10, "enum" : [2, 4, 6, 8]},
|
||||
}
|
||||
}
|
||||
|
||||
validator = Draft3Validator(schema)
|
||||
errors = validator.iter_errors(instance)
|
||||
e1, e2, e3, e4 = sorted_errors(errors)
|
||||
|
||||
self.assertEqual(e1.path, deque(["bar"]))
|
||||
self.assertEqual(e2.path, deque(["baz"]))
|
||||
self.assertEqual(e3.path, deque(["baz"]))
|
||||
self.assertEqual(e4.path, deque(["foo"]))
|
||||
|
||||
self.assertEqual(e1.relative_path, deque(["bar"]))
|
||||
self.assertEqual(e2.relative_path, deque(["baz"]))
|
||||
self.assertEqual(e3.relative_path, deque(["baz"]))
|
||||
self.assertEqual(e4.relative_path, deque(["foo"]))
|
||||
|
||||
self.assertEqual(e1.absolute_path, deque(["bar"]))
|
||||
self.assertEqual(e2.absolute_path, deque(["baz"]))
|
||||
self.assertEqual(e3.absolute_path, deque(["baz"]))
|
||||
self.assertEqual(e4.absolute_path, deque(["foo"]))
|
||||
|
||||
self.assertEqual(e1.validator, "minItems")
|
||||
self.assertEqual(e2.validator, "enum")
|
||||
self.assertEqual(e3.validator, "maximum")
|
||||
self.assertEqual(e4.validator, "type")
|
||||
|
||||
def test_multiple_nesting(self):
|
||||
instance = [1, {"foo" : 2, "bar" : {"baz" : [1]}}, "quux"]
|
||||
schema = {
|
||||
"type" : "string",
|
||||
"items" : {
|
||||
"type" : ["string", "object"],
|
||||
"properties" : {
|
||||
"foo" : {"enum" : [1, 3]},
|
||||
"bar" : {
|
||||
"type" : "array",
|
||||
"properties" : {
|
||||
"bar" : {"required" : True},
|
||||
"baz" : {"minItems" : 2},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
validator = Draft3Validator(schema)
|
||||
errors = validator.iter_errors(instance)
|
||||
e1, e2, e3, e4, e5, e6 = sorted_errors(errors)
|
||||
|
||||
self.assertEqual(e1.path, deque([]))
|
||||
self.assertEqual(e2.path, deque([0]))
|
||||
self.assertEqual(e3.path, deque([1, "bar"]))
|
||||
self.assertEqual(e4.path, deque([1, "bar", "bar"]))
|
||||
self.assertEqual(e5.path, deque([1, "bar", "baz"]))
|
||||
self.assertEqual(e6.path, deque([1, "foo"]))
|
||||
|
||||
self.assertEqual(e1.schema_path, deque(["type"]))
|
||||
self.assertEqual(e2.schema_path, deque(["items", "type"]))
|
||||
self.assertEqual(
|
||||
list(e3.schema_path), ["items", "properties", "bar", "type"],
|
||||
)
|
||||
self.assertEqual(
|
||||
list(e4.schema_path),
|
||||
["items", "properties", "bar", "properties", "bar", "required"],
|
||||
)
|
||||
self.assertEqual(
|
||||
list(e5.schema_path),
|
||||
["items", "properties", "bar", "properties", "baz", "minItems"]
|
||||
)
|
||||
self.assertEqual(
|
||||
list(e6.schema_path), ["items", "properties", "foo", "enum"],
|
||||
)
|
||||
|
||||
self.assertEqual(e1.validator, "type")
|
||||
self.assertEqual(e2.validator, "type")
|
||||
self.assertEqual(e3.validator, "type")
|
||||
self.assertEqual(e4.validator, "required")
|
||||
self.assertEqual(e5.validator, "minItems")
|
||||
self.assertEqual(e6.validator, "enum")
|
||||
|
||||
def test_additionalProperties(self):
|
||||
instance = {"bar": "bar", "foo": 2}
|
||||
schema = {
|
||||
"additionalProperties" : {"type": "integer", "minimum": 5}
|
||||
}
|
||||
|
||||
validator = Draft3Validator(schema)
|
||||
errors = validator.iter_errors(instance)
|
||||
e1, e2 = sorted_errors(errors)
|
||||
|
||||
self.assertEqual(e1.path, deque(["bar"]))
|
||||
self.assertEqual(e2.path, deque(["foo"]))
|
||||
|
||||
self.assertEqual(e1.validator, "type")
|
||||
self.assertEqual(e2.validator, "minimum")
|
||||
|
||||
def test_patternProperties(self):
|
||||
instance = {"bar": 1, "foo": 2}
|
||||
schema = {
|
||||
"patternProperties" : {
|
||||
"bar": {"type": "string"},
|
||||
"foo": {"minimum": 5}
|
||||
}
|
||||
}
|
||||
|
||||
validator = Draft3Validator(schema)
|
||||
errors = validator.iter_errors(instance)
|
||||
e1, e2 = sorted_errors(errors)
|
||||
|
||||
self.assertEqual(e1.path, deque(["bar"]))
|
||||
self.assertEqual(e2.path, deque(["foo"]))
|
||||
|
||||
self.assertEqual(e1.validator, "type")
|
||||
self.assertEqual(e2.validator, "minimum")
|
||||
|
||||
def test_additionalItems(self):
|
||||
instance = ["foo", 1]
|
||||
schema = {
|
||||
"items": [],
|
||||
"additionalItems" : {"type": "integer", "minimum": 5}
|
||||
}
|
||||
|
||||
validator = Draft3Validator(schema)
|
||||
errors = validator.iter_errors(instance)
|
||||
e1, e2 = sorted_errors(errors)
|
||||
|
||||
self.assertEqual(e1.path, deque([0]))
|
||||
self.assertEqual(e2.path, deque([1]))
|
||||
|
||||
self.assertEqual(e1.validator, "type")
|
||||
self.assertEqual(e2.validator, "minimum")
|
||||
|
||||
def test_additionalItems_with_items(self):
|
||||
instance = ["foo", "bar", 1]
|
||||
schema = {
|
||||
"items": [{}],
|
||||
"additionalItems" : {"type": "integer", "minimum": 5}
|
||||
}
|
||||
|
||||
validator = Draft3Validator(schema)
|
||||
errors = validator.iter_errors(instance)
|
||||
e1, e2 = sorted_errors(errors)
|
||||
|
||||
self.assertEqual(e1.path, deque([1]))
|
||||
self.assertEqual(e2.path, deque([2]))
|
||||
|
||||
self.assertEqual(e1.validator, "type")
|
||||
self.assertEqual(e2.validator, "minimum")
|
||||
|
||||
|
||||
class ValidatorTestMixin(object):
|
||||
def setUp(self):
|
||||
self.instance = mock.Mock()
|
||||
self.schema = {}
|
||||
self.resolver = mock.Mock()
|
||||
self.validator = self.validator_class(self.schema)
|
||||
|
||||
def test_valid_instances_are_valid(self):
|
||||
errors = iter([])
|
||||
|
||||
with mock.patch.object(
|
||||
self.validator, "iter_errors", return_value=errors,
|
||||
):
|
||||
self.assertTrue(
|
||||
self.validator.is_valid(self.instance, self.schema)
|
||||
)
|
||||
|
||||
def test_invalid_instances_are_not_valid(self):
|
||||
errors = iter([mock.Mock()])
|
||||
|
||||
with mock.patch.object(
|
||||
self.validator, "iter_errors", return_value=errors,
|
||||
):
|
||||
self.assertFalse(
|
||||
self.validator.is_valid(self.instance, self.schema)
|
||||
)
|
||||
|
||||
def test_non_existent_properties_are_ignored(self):
|
||||
instance, my_property, my_value = mock.Mock(), mock.Mock(), mock.Mock()
|
||||
validate(instance=instance, schema={my_property : my_value})
|
||||
|
||||
def test_it_creates_a_ref_resolver_if_not_provided(self):
|
||||
self.assertIsInstance(self.validator.resolver, RefResolver)
|
||||
|
||||
def test_it_delegates_to_a_ref_resolver(self):
|
||||
resolver = RefResolver("", {})
|
||||
schema = {"$ref" : mock.Mock()}
|
||||
|
||||
@contextmanager
|
||||
def resolving():
|
||||
yield {"type": "integer"}
|
||||
|
||||
with mock.patch.object(resolver, "resolving") as resolve:
|
||||
resolve.return_value = resolving()
|
||||
with self.assertRaises(ValidationError):
|
||||
self.validator_class(schema, resolver=resolver).validate(None)
|
||||
|
||||
resolve.assert_called_once_with(schema["$ref"])
|
||||
|
||||
def test_is_type_is_true_for_valid_type(self):
|
||||
self.assertTrue(self.validator.is_type("foo", "string"))
|
||||
|
||||
def test_is_type_is_false_for_invalid_type(self):
|
||||
self.assertFalse(self.validator.is_type("foo", "array"))
|
||||
|
||||
def test_is_type_evades_bool_inheriting_from_int(self):
|
||||
self.assertFalse(self.validator.is_type(True, "integer"))
|
||||
self.assertFalse(self.validator.is_type(True, "number"))
|
||||
|
||||
def test_is_type_raises_exception_for_unknown_type(self):
|
||||
with self.assertRaises(UnknownType):
|
||||
self.validator.is_type("foo", object())
|
||||
|
||||
|
||||
class TestDraft3Validator(ValidatorTestMixin, unittest.TestCase):
|
||||
validator_class = Draft3Validator
|
||||
|
||||
def test_is_type_is_true_for_any_type(self):
|
||||
self.assertTrue(self.validator.is_valid(mock.Mock(), {"type": "any"}))
|
||||
|
||||
def test_is_type_does_not_evade_bool_if_it_is_being_tested(self):
|
||||
self.assertTrue(self.validator.is_type(True, "boolean"))
|
||||
self.assertTrue(self.validator.is_valid(True, {"type": "any"}))
|
||||
|
||||
def test_non_string_custom_types(self):
|
||||
schema = {'type': [None]}
|
||||
cls = self.validator_class(schema, types={None: type(None)})
|
||||
cls.validate(None, schema)
|
||||
|
||||
|
||||
class TestDraft4Validator(ValidatorTestMixin, unittest.TestCase):
|
||||
validator_class = Draft4Validator
|
||||
|
||||
|
||||
class TestBuiltinFormats(unittest.TestCase):
|
||||
"""
|
||||
The built-in (specification-defined) formats do not raise type errors.
|
||||
|
||||
If an instance or value is not a string, it should be ignored.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
for format in FormatChecker.checkers:
|
||||
def test(self, format=format):
|
||||
v = Draft4Validator({"format": format}, format_checker=FormatChecker())
|
||||
v.validate(123)
|
||||
|
||||
name = "test_{0}_ignores_non_strings".format(format)
|
||||
test.__name__ = name
|
||||
setattr(TestBuiltinFormats, name, test)
|
||||
del test # Ugh py.test. Stop discovering top level tests.
|
||||
|
||||
|
||||
class TestValidatorFor(unittest.TestCase):
|
||||
def test_draft_3(self):
|
||||
schema = {"$schema" : "http://json-schema.org/draft-03/schema"}
|
||||
self.assertIs(validator_for(schema), Draft3Validator)
|
||||
|
||||
schema = {"$schema" : "http://json-schema.org/draft-03/schema#"}
|
||||
self.assertIs(validator_for(schema), Draft3Validator)
|
||||
|
||||
def test_draft_4(self):
|
||||
schema = {"$schema" : "http://json-schema.org/draft-04/schema"}
|
||||
self.assertIs(validator_for(schema), Draft4Validator)
|
||||
|
||||
schema = {"$schema" : "http://json-schema.org/draft-04/schema#"}
|
||||
self.assertIs(validator_for(schema), Draft4Validator)
|
||||
|
||||
def test_custom_validator(self):
|
||||
Validator = create(meta_schema={"id" : "meta schema id"}, version="12")
|
||||
schema = {"$schema" : "meta schema id"}
|
||||
self.assertIs(validator_for(schema), Validator)
|
||||
|
||||
def test_validator_for_jsonschema_default(self):
|
||||
self.assertIs(validator_for({}), Draft4Validator)
|
||||
|
||||
def test_validator_for_custom_default(self):
|
||||
self.assertIs(validator_for({}, default=None), None)
|
||||
|
||||
|
||||
class TestValidate(unittest.TestCase):
|
||||
def test_draft3_validator_is_chosen(self):
|
||||
schema = {"$schema" : "http://json-schema.org/draft-03/schema#"}
|
||||
with mock.patch.object(Draft3Validator, "check_schema") as chk_schema:
|
||||
validate({}, schema)
|
||||
chk_schema.assert_called_once_with(schema)
|
||||
# Make sure it works without the empty fragment
|
||||
schema = {"$schema" : "http://json-schema.org/draft-03/schema"}
|
||||
with mock.patch.object(Draft3Validator, "check_schema") as chk_schema:
|
||||
validate({}, schema)
|
||||
chk_schema.assert_called_once_with(schema)
|
||||
|
||||
def test_draft4_validator_is_chosen(self):
|
||||
schema = {"$schema" : "http://json-schema.org/draft-04/schema#"}
|
||||
with mock.patch.object(Draft4Validator, "check_schema") as chk_schema:
|
||||
validate({}, schema)
|
||||
chk_schema.assert_called_once_with(schema)
|
||||
|
||||
def test_draft4_validator_is_the_default(self):
|
||||
with mock.patch.object(Draft4Validator, "check_schema") as chk_schema:
|
||||
validate({}, {})
|
||||
chk_schema.assert_called_once_with({})
|
||||
|
||||
|
||||
class TestRefResolver(unittest.TestCase):
|
||||
|
||||
base_uri = ""
|
||||
stored_uri = "foo://stored"
|
||||
stored_schema = {"stored" : "schema"}
|
||||
|
||||
def setUp(self):
|
||||
self.referrer = {}
|
||||
self.store = {self.stored_uri : self.stored_schema}
|
||||
self.resolver = RefResolver(self.base_uri, self.referrer, self.store)
|
||||
|
||||
def test_it_does_not_retrieve_schema_urls_from_the_network(self):
|
||||
ref = Draft3Validator.META_SCHEMA["id"]
|
||||
with mock.patch.object(self.resolver, "resolve_remote") as remote:
|
||||
with self.resolver.resolving(ref) as resolved:
|
||||
self.assertEqual(resolved, Draft3Validator.META_SCHEMA)
|
||||
self.assertFalse(remote.called)
|
||||
|
||||
def test_it_resolves_local_refs(self):
|
||||
ref = "#/properties/foo"
|
||||
self.referrer["properties"] = {"foo" : object()}
|
||||
with self.resolver.resolving(ref) as resolved:
|
||||
self.assertEqual(resolved, self.referrer["properties"]["foo"])
|
||||
|
||||
def test_it_resolves_local_refs_with_id(self):
|
||||
schema = {"id": "foo://bar/schema#", "a": {"foo": "bar"}}
|
||||
resolver = RefResolver.from_schema(schema)
|
||||
with resolver.resolving("#/a") as resolved:
|
||||
self.assertEqual(resolved, schema["a"])
|
||||
with resolver.resolving("foo://bar/schema#/a") as resolved:
|
||||
self.assertEqual(resolved, schema["a"])
|
||||
|
||||
def test_it_retrieves_stored_refs(self):
|
||||
with self.resolver.resolving(self.stored_uri) as resolved:
|
||||
self.assertIs(resolved, self.stored_schema)
|
||||
|
||||
self.resolver.store["cached_ref"] = {"foo" : 12}
|
||||
with self.resolver.resolving("cached_ref#/foo") as resolved:
|
||||
self.assertEqual(resolved, 12)
|
||||
|
||||
def test_it_retrieves_unstored_refs_via_requests(self):
|
||||
ref = "http://bar#baz"
|
||||
schema = {"baz" : 12}
|
||||
|
||||
with mock.patch("jsonschema.validators.requests") as requests:
|
||||
requests.get.return_value.json.return_value = schema
|
||||
with self.resolver.resolving(ref) as resolved:
|
||||
self.assertEqual(resolved, 12)
|
||||
requests.get.assert_called_once_with("http://bar")
|
||||
|
||||
def test_it_retrieves_unstored_refs_via_urlopen(self):
|
||||
ref = "http://bar#baz"
|
||||
schema = {"baz" : 12}
|
||||
|
||||
with mock.patch("jsonschema.validators.requests", None):
|
||||
with mock.patch("jsonschema.validators.urlopen") as urlopen:
|
||||
urlopen.return_value.read.return_value = (
|
||||
json.dumps(schema).encode("utf8"))
|
||||
with self.resolver.resolving(ref) as resolved:
|
||||
self.assertEqual(resolved, 12)
|
||||
urlopen.assert_called_once_with("http://bar")
|
||||
|
||||
def test_it_can_construct_a_base_uri_from_a_schema(self):
|
||||
schema = {"id" : "foo"}
|
||||
resolver = RefResolver.from_schema(schema)
|
||||
self.assertEqual(resolver.base_uri, "foo")
|
||||
with resolver.resolving("") as resolved:
|
||||
self.assertEqual(resolved, schema)
|
||||
with resolver.resolving("#") as resolved:
|
||||
self.assertEqual(resolved, schema)
|
||||
with resolver.resolving("foo") as resolved:
|
||||
self.assertEqual(resolved, schema)
|
||||
with resolver.resolving("foo#") as resolved:
|
||||
self.assertEqual(resolved, schema)
|
||||
|
||||
def test_it_can_construct_a_base_uri_from_a_schema_without_id(self):
|
||||
schema = {}
|
||||
resolver = RefResolver.from_schema(schema)
|
||||
self.assertEqual(resolver.base_uri, "")
|
||||
with resolver.resolving("") as resolved:
|
||||
self.assertEqual(resolved, schema)
|
||||
with resolver.resolving("#") as resolved:
|
||||
self.assertEqual(resolved, schema)
|
||||
|
||||
def test_custom_uri_scheme_handlers(self):
|
||||
schema = {"foo": "bar"}
|
||||
ref = "foo://bar"
|
||||
foo_handler = mock.Mock(return_value=schema)
|
||||
resolver = RefResolver("", {}, handlers={"foo": foo_handler})
|
||||
with resolver.resolving(ref) as resolved:
|
||||
self.assertEqual(resolved, schema)
|
||||
foo_handler.assert_called_once_with(ref)
|
||||
|
||||
def test_cache_remote_on(self):
|
||||
ref = "foo://bar"
|
||||
foo_handler = mock.Mock()
|
||||
resolver = RefResolver(
|
||||
"", {}, cache_remote=True, handlers={"foo" : foo_handler},
|
||||
)
|
||||
with resolver.resolving(ref):
|
||||
pass
|
||||
with resolver.resolving(ref):
|
||||
pass
|
||||
foo_handler.assert_called_once_with(ref)
|
||||
|
||||
def test_cache_remote_off(self):
|
||||
ref = "foo://bar"
|
||||
foo_handler = mock.Mock()
|
||||
resolver = RefResolver(
|
||||
"", {}, cache_remote=False, handlers={"foo" : foo_handler},
|
||||
)
|
||||
with resolver.resolving(ref):
|
||||
pass
|
||||
with resolver.resolving(ref):
|
||||
pass
|
||||
self.assertEqual(foo_handler.call_count, 2)
|
||||
|
||||
def test_if_you_give_it_junk_you_get_a_resolution_error(self):
|
||||
ref = "foo://bar"
|
||||
foo_handler = mock.Mock(side_effect=ValueError("Oh no! What's this?"))
|
||||
resolver = RefResolver("", {}, handlers={"foo" : foo_handler})
|
||||
with self.assertRaises(RefResolutionError) as err:
|
||||
with resolver.resolving(ref):
|
||||
pass
|
||||
self.assertEqual(str(err.exception), "Oh no! What's this?")
|
||||
|
||||
|
||||
def sorted_errors(errors):
|
||||
def key(error):
|
||||
return (
|
||||
[str(e) for e in error.path],
|
||||
[str(e) for e in error.schema_path]
|
||||
)
|
||||
return sorted(errors, key=key)
|
||||
897
lib/spack/external/jsonschema/validators.py
vendored
897
lib/spack/external/jsonschema/validators.py
vendored
File diff suppressed because it is too large
Load Diff
289
lib/spack/external/py2/functools32/LICENSE
vendored
Normal file
289
lib/spack/external/py2/functools32/LICENSE
vendored
Normal file
@@ -0,0 +1,289 @@
|
||||
A. HISTORY OF THE SOFTWARE
|
||||
==========================
|
||||
|
||||
Python was created in the early 1990s by Guido van Rossum at Stichting
|
||||
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
|
||||
as a successor of a language called ABC. Guido remains Python's
|
||||
principal author, although it includes many contributions from others.
|
||||
|
||||
In 1995, Guido continued his work on Python at the Corporation for
|
||||
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
|
||||
in Reston, Virginia where he released several versions of the
|
||||
software.
|
||||
|
||||
In May 2000, Guido and the Python core development team moved to
|
||||
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
||||
year, the PythonLabs team moved to Digital Creations (now Zope
|
||||
Corporation, see http://www.zope.com). In 2001, the Python Software
|
||||
Foundation (PSF, see http://www.python.org/psf/) was formed, a
|
||||
non-profit organization created specifically to own Python-related
|
||||
Intellectual Property. Zope Corporation is a sponsoring member of
|
||||
the PSF.
|
||||
|
||||
All Python releases are Open Source (see http://www.opensource.org for
|
||||
the Open Source Definition). Historically, most, but not all, Python
|
||||
releases have also been GPL-compatible; the table below summarizes
|
||||
the various releases.
|
||||
|
||||
Release Derived Year Owner GPL-
|
||||
from compatible? (1)
|
||||
|
||||
0.9.0 thru 1.2 1991-1995 CWI yes
|
||||
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
||||
1.6 1.5.2 2000 CNRI no
|
||||
2.0 1.6 2000 BeOpen.com no
|
||||
1.6.1 1.6 2001 CNRI yes (2)
|
||||
2.1 2.0+1.6.1 2001 PSF no
|
||||
2.0.1 2.0+1.6.1 2001 PSF yes
|
||||
2.1.1 2.1+2.0.1 2001 PSF yes
|
||||
2.2 2.1.1 2001 PSF yes
|
||||
2.1.2 2.1.1 2002 PSF yes
|
||||
2.1.3 2.1.2 2002 PSF yes
|
||||
2.2.1 2.2 2002 PSF yes
|
||||
2.2.2 2.2.1 2002 PSF yes
|
||||
2.2.3 2.2.2 2003 PSF yes
|
||||
2.3 2.2.2 2002-2003 PSF yes
|
||||
2.3.1 2.3 2002-2003 PSF yes
|
||||
2.3.2 2.3.1 2002-2003 PSF yes
|
||||
2.3.3 2.3.2 2002-2003 PSF yes
|
||||
2.3.4 2.3.3 2004 PSF yes
|
||||
2.3.5 2.3.4 2005 PSF yes
|
||||
2.4 2.3 2004 PSF yes
|
||||
2.4.1 2.4 2005 PSF yes
|
||||
2.4.2 2.4.1 2005 PSF yes
|
||||
2.4.3 2.4.2 2006 PSF yes
|
||||
2.4.4 2.4.3 2006 PSF yes
|
||||
2.5 2.4 2006 PSF yes
|
||||
2.5.1 2.5 2007 PSF yes
|
||||
2.5.2 2.5.1 2008 PSF yes
|
||||
2.5.3 2.5.2 2008 PSF yes
|
||||
2.6 2.5 2008 PSF yes
|
||||
2.6.1 2.6 2008 PSF yes
|
||||
2.6.2 2.6.1 2009 PSF yes
|
||||
2.6.3 2.6.2 2009 PSF yes
|
||||
2.6.4 2.6.3 2009 PSF yes
|
||||
2.6.5 2.6.4 2010 PSF yes
|
||||
3.0 2.6 2008 PSF yes
|
||||
3.0.1 3.0 2009 PSF yes
|
||||
3.1 3.0.1 2009 PSF yes
|
||||
3.1.1 3.1 2009 PSF yes
|
||||
3.1.2 3.1.1 2010 PSF yes
|
||||
3.1.3 3.1.2 2010 PSF yes
|
||||
3.1.4 3.1.3 2011 PSF yes
|
||||
3.2 3.1 2011 PSF yes
|
||||
3.2.1 3.2 2011 PSF yes
|
||||
3.2.2 3.2.1 2011 PSF yes
|
||||
3.2.3 3.2.2 2012 PSF yes
|
||||
|
||||
Footnotes:
|
||||
|
||||
(1) GPL-compatible doesn't mean that we're distributing Python under
|
||||
the GPL. All Python licenses, unlike the GPL, let you distribute
|
||||
a modified version without making your changes open source. The
|
||||
GPL-compatible licenses make it possible to combine Python with
|
||||
other software that is released under the GPL; the others don't.
|
||||
|
||||
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
||||
because its license has a choice of law clause. According to
|
||||
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
||||
is "not incompatible" with the GPL.
|
||||
|
||||
Thanks to the many outside volunteers who have worked under Guido's
|
||||
direction to make these releases possible.
|
||||
|
||||
|
||||
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
||||
===============================================================
|
||||
|
||||
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
||||
--------------------------------------------
|
||||
|
||||
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
||||
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
||||
otherwise using this software ("Python") in source or binary form and
|
||||
its associated documentation.
|
||||
|
||||
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
||||
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
||||
analyze, test, perform and/or display publicly, prepare derivative works,
|
||||
distribute, and otherwise use Python alone or in any derivative version,
|
||||
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
||||
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
||||
2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python
|
||||
alone or in any derivative version prepared by Licensee.
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on
|
||||
or incorporates Python or any part thereof, and wants to make
|
||||
the derivative work available to others as provided herein, then
|
||||
Licensee hereby agrees to include in any such work a brief summary of
|
||||
the changes made to Python.
|
||||
|
||||
4. PSF is making Python available to Licensee on an "AS IS"
|
||||
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
6. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
7. Nothing in this License Agreement shall be deemed to create any
|
||||
relationship of agency, partnership, or joint venture between PSF and
|
||||
Licensee. This License Agreement does not grant permission to use PSF
|
||||
trademarks or trade name in a trademark sense to endorse or promote
|
||||
products or services of Licensee, or any third party.
|
||||
|
||||
8. By copying, installing or otherwise using Python, Licensee
|
||||
agrees to be bound by the terms and conditions of this License
|
||||
Agreement.
|
||||
|
||||
|
||||
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
||||
-------------------------------------------
|
||||
|
||||
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
||||
|
||||
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
||||
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
||||
Individual or Organization ("Licensee") accessing and otherwise using
|
||||
this software in source or binary form and its associated
|
||||
documentation ("the Software").
|
||||
|
||||
2. Subject to the terms and conditions of this BeOpen Python License
|
||||
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
||||
royalty-free, world-wide license to reproduce, analyze, test, perform
|
||||
and/or display publicly, prepare derivative works, distribute, and
|
||||
otherwise use the Software alone or in any derivative version,
|
||||
provided, however, that the BeOpen Python License is retained in the
|
||||
Software, alone or in any derivative version prepared by Licensee.
|
||||
|
||||
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
||||
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
||||
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
||||
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
||||
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
5. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
6. This License Agreement shall be governed by and interpreted in all
|
||||
respects by the law of the State of California, excluding conflict of
|
||||
law provisions. Nothing in this License Agreement shall be deemed to
|
||||
create any relationship of agency, partnership, or joint venture
|
||||
between BeOpen and Licensee. This License Agreement does not grant
|
||||
permission to use BeOpen trademarks or trade names in a trademark
|
||||
sense to endorse or promote products or services of Licensee, or any
|
||||
third party. As an exception, the "BeOpen Python" logos available at
|
||||
http://www.pythonlabs.com/logos.html may be used according to the
|
||||
permissions granted on that web page.
|
||||
|
||||
7. By copying, installing or otherwise using the software, Licensee
|
||||
agrees to be bound by the terms and conditions of this License
|
||||
Agreement.
|
||||
|
||||
|
||||
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
||||
---------------------------------------
|
||||
|
||||
1. This LICENSE AGREEMENT is between the Corporation for National
|
||||
Research Initiatives, having an office at 1895 Preston White Drive,
|
||||
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
||||
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
||||
source or binary form and its associated documentation.
|
||||
|
||||
2. Subject to the terms and conditions of this License Agreement, CNRI
|
||||
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
||||
license to reproduce, analyze, test, perform and/or display publicly,
|
||||
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
||||
alone or in any derivative version, provided, however, that CNRI's
|
||||
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
||||
1995-2001 Corporation for National Research Initiatives; All Rights
|
||||
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
||||
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
||||
Agreement, Licensee may substitute the following text (omitting the
|
||||
quotes): "Python 1.6.1 is made available subject to the terms and
|
||||
conditions in CNRI's License Agreement. This Agreement together with
|
||||
Python 1.6.1 may be located on the Internet using the following
|
||||
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
||||
Agreement may also be obtained from a proxy server on the Internet
|
||||
using the following URL: http://hdl.handle.net/1895.22/1013".
|
||||
|
||||
3. In the event Licensee prepares a derivative work that is based on
|
||||
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
||||
the derivative work available to others as provided herein, then
|
||||
Licensee hereby agrees to include in any such work a brief summary of
|
||||
the changes made to Python 1.6.1.
|
||||
|
||||
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
||||
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||
|
||||
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||
|
||||
6. This License Agreement will automatically terminate upon a material
|
||||
breach of its terms and conditions.
|
||||
|
||||
7. This License Agreement shall be governed by the federal
|
||||
intellectual property law of the United States, including without
|
||||
limitation the federal copyright law, and, to the extent such
|
||||
U.S. federal law does not apply, by the law of the Commonwealth of
|
||||
Virginia, excluding Virginia's conflict of law provisions.
|
||||
Notwithstanding the foregoing, with regard to derivative works based
|
||||
on Python 1.6.1 that incorporate non-separable material that was
|
||||
previously distributed under the GNU General Public License (GPL), the
|
||||
law of the Commonwealth of Virginia shall govern this License
|
||||
Agreement only as to issues arising under or with respect to
|
||||
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
||||
License Agreement shall be deemed to create any relationship of
|
||||
agency, partnership, or joint venture between CNRI and Licensee. This
|
||||
License Agreement does not grant permission to use CNRI trademarks or
|
||||
trade name in a trademark sense to endorse or promote products or
|
||||
services of Licensee, or any third party.
|
||||
|
||||
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
||||
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
||||
bound by the terms and conditions of this License Agreement.
|
||||
|
||||
ACCEPT
|
||||
|
||||
|
||||
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
||||
--------------------------------------------------
|
||||
|
||||
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
||||
The Netherlands. All rights reserved.
|
||||
|
||||
Permission to use, copy, modify, and distribute this software and its
|
||||
documentation for any purpose and without fee is hereby granted,
|
||||
provided that the above copyright notice appear in all copies and that
|
||||
both that copyright notice and this permission notice appear in
|
||||
supporting documentation, and that the name of Stichting Mathematisch
|
||||
Centrum or CWI not be used in advertising or publicity pertaining to
|
||||
distribution of the software without specific, written prior
|
||||
permission.
|
||||
|
||||
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
||||
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
||||
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
1
lib/spack/external/py2/functools32/__init__.py
vendored
Normal file
1
lib/spack/external/py2/functools32/__init__.py
vendored
Normal file
@@ -0,0 +1 @@
|
||||
from .functools32 import *
|
||||
158
lib/spack/external/py2/functools32/_dummy_thread32.py
vendored
Normal file
158
lib/spack/external/py2/functools32/_dummy_thread32.py
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
"""Drop-in replacement for the thread module.
|
||||
|
||||
Meant to be used as a brain-dead substitute so that threaded code does
|
||||
not need to be rewritten for when the thread module is not present.
|
||||
|
||||
Suggested usage is::
|
||||
|
||||
try:
|
||||
try:
|
||||
import _thread # Python >= 3
|
||||
except:
|
||||
import thread as _thread # Python < 3
|
||||
except ImportError:
|
||||
import _dummy_thread as _thread
|
||||
|
||||
"""
|
||||
# Exports only things specified by thread documentation;
|
||||
# skipping obsolete synonyms allocate(), start_new(), exit_thread().
|
||||
__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
|
||||
'interrupt_main', 'LockType']
|
||||
|
||||
# A dummy value
|
||||
TIMEOUT_MAX = 2**31
|
||||
|
||||
# NOTE: this module can be imported early in the extension building process,
|
||||
# and so top level imports of other modules should be avoided. Instead, all
|
||||
# imports are done when needed on a function-by-function basis. Since threads
|
||||
# are disabled, the import lock should not be an issue anyway (??).
|
||||
|
||||
class error(Exception):
|
||||
"""Dummy implementation of _thread.error."""
|
||||
|
||||
def __init__(self, *args):
|
||||
self.args = args
|
||||
|
||||
def start_new_thread(function, args, kwargs={}):
|
||||
"""Dummy implementation of _thread.start_new_thread().
|
||||
|
||||
Compatibility is maintained by making sure that ``args`` is a
|
||||
tuple and ``kwargs`` is a dictionary. If an exception is raised
|
||||
and it is SystemExit (which can be done by _thread.exit()) it is
|
||||
caught and nothing is done; all other exceptions are printed out
|
||||
by using traceback.print_exc().
|
||||
|
||||
If the executed function calls interrupt_main the KeyboardInterrupt will be
|
||||
raised when the function returns.
|
||||
|
||||
"""
|
||||
if type(args) != type(tuple()):
|
||||
raise TypeError("2nd arg must be a tuple")
|
||||
if type(kwargs) != type(dict()):
|
||||
raise TypeError("3rd arg must be a dict")
|
||||
global _main
|
||||
_main = False
|
||||
try:
|
||||
function(*args, **kwargs)
|
||||
except SystemExit:
|
||||
pass
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
_main = True
|
||||
global _interrupt
|
||||
if _interrupt:
|
||||
_interrupt = False
|
||||
raise KeyboardInterrupt
|
||||
|
||||
def exit():
|
||||
"""Dummy implementation of _thread.exit()."""
|
||||
raise SystemExit
|
||||
|
||||
def get_ident():
|
||||
"""Dummy implementation of _thread.get_ident().
|
||||
|
||||
Since this module should only be used when _threadmodule is not
|
||||
available, it is safe to assume that the current process is the
|
||||
only thread. Thus a constant can be safely returned.
|
||||
"""
|
||||
return -1
|
||||
|
||||
def allocate_lock():
|
||||
"""Dummy implementation of _thread.allocate_lock()."""
|
||||
return LockType()
|
||||
|
||||
def stack_size(size=None):
|
||||
"""Dummy implementation of _thread.stack_size()."""
|
||||
if size is not None:
|
||||
raise error("setting thread stack size not supported")
|
||||
return 0
|
||||
|
||||
class LockType(object):
|
||||
"""Class implementing dummy implementation of _thread.LockType.
|
||||
|
||||
Compatibility is maintained by maintaining self.locked_status
|
||||
which is a boolean that stores the state of the lock. Pickling of
|
||||
the lock, though, should not be done since if the _thread module is
|
||||
then used with an unpickled ``lock()`` from here problems could
|
||||
occur from this class not having atomic methods.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.locked_status = False
|
||||
|
||||
def acquire(self, waitflag=None, timeout=-1):
|
||||
"""Dummy implementation of acquire().
|
||||
|
||||
For blocking calls, self.locked_status is automatically set to
|
||||
True and returned appropriately based on value of
|
||||
``waitflag``. If it is non-blocking, then the value is
|
||||
actually checked and not set if it is already acquired. This
|
||||
is all done so that threading.Condition's assert statements
|
||||
aren't triggered and throw a little fit.
|
||||
|
||||
"""
|
||||
if waitflag is None or waitflag:
|
||||
self.locked_status = True
|
||||
return True
|
||||
else:
|
||||
if not self.locked_status:
|
||||
self.locked_status = True
|
||||
return True
|
||||
else:
|
||||
if timeout > 0:
|
||||
import time
|
||||
time.sleep(timeout)
|
||||
return False
|
||||
|
||||
__enter__ = acquire
|
||||
|
||||
def __exit__(self, typ, val, tb):
|
||||
self.release()
|
||||
|
||||
def release(self):
|
||||
"""Release the dummy lock."""
|
||||
# XXX Perhaps shouldn't actually bother to test? Could lead
|
||||
# to problems for complex, threaded code.
|
||||
if not self.locked_status:
|
||||
raise error
|
||||
self.locked_status = False
|
||||
return True
|
||||
|
||||
def locked(self):
|
||||
return self.locked_status
|
||||
|
||||
# Used to signal that interrupt_main was called in a "thread"
|
||||
_interrupt = False
|
||||
# True when not executing in a "thread"
|
||||
_main = True
|
||||
|
||||
def interrupt_main():
|
||||
"""Set _interrupt flag to True to have start_new_thread raise
|
||||
KeyboardInterrupt upon exiting."""
|
||||
if _main:
|
||||
raise KeyboardInterrupt
|
||||
else:
|
||||
global _interrupt
|
||||
_interrupt = True
|
||||
423
lib/spack/external/py2/functools32/functools32.py
vendored
Normal file
423
lib/spack/external/py2/functools32/functools32.py
vendored
Normal file
@@ -0,0 +1,423 @@
|
||||
"""functools.py - Tools for working with functions and callable objects
|
||||
"""
|
||||
# Python module wrapper for _functools C module
|
||||
# to allow utilities written in Python to be added
|
||||
# to the functools module.
|
||||
# Written by Nick Coghlan <ncoghlan at gmail.com>
|
||||
# and Raymond Hettinger <python at rcn.com>
|
||||
# Copyright (C) 2006-2010 Python Software Foundation.
|
||||
# See C source code for _functools credits/copyright
|
||||
|
||||
__all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
|
||||
'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial']
|
||||
|
||||
from _functools import partial, reduce
|
||||
from collections import MutableMapping, namedtuple
|
||||
from .reprlib32 import recursive_repr as _recursive_repr
|
||||
from weakref import proxy as _proxy
|
||||
import sys as _sys
|
||||
try:
|
||||
from thread import allocate_lock as Lock
|
||||
except ImportError:
|
||||
from ._dummy_thread32 import allocate_lock as Lock
|
||||
|
||||
################################################################################
|
||||
### OrderedDict
|
||||
################################################################################
|
||||
|
||||
class _Link(object):
|
||||
__slots__ = 'prev', 'next', 'key', '__weakref__'
|
||||
|
||||
class OrderedDict(dict):
|
||||
'Dictionary that remembers insertion order'
|
||||
# An inherited dict maps keys to values.
|
||||
# The inherited dict provides __getitem__, __len__, __contains__, and get.
|
||||
# The remaining methods are order-aware.
|
||||
# Big-O running times for all methods are the same as regular dictionaries.
|
||||
|
||||
# The internal self.__map dict maps keys to links in a doubly linked list.
|
||||
# The circular doubly linked list starts and ends with a sentinel element.
|
||||
# The sentinel element never gets deleted (this simplifies the algorithm).
|
||||
# The sentinel is in self.__hardroot with a weakref proxy in self.__root.
|
||||
# The prev links are weakref proxies (to prevent circular references).
|
||||
# Individual links are kept alive by the hard reference in self.__map.
|
||||
# Those hard references disappear when a key is deleted from an OrderedDict.
|
||||
|
||||
def __init__(self, *args, **kwds):
|
||||
'''Initialize an ordered dictionary. The signature is the same as
|
||||
regular dictionaries, but keyword arguments are not recommended because
|
||||
their insertion order is arbitrary.
|
||||
|
||||
'''
|
||||
if len(args) > 1:
|
||||
raise TypeError('expected at most 1 arguments, got %d' % len(args))
|
||||
try:
|
||||
self.__root
|
||||
except AttributeError:
|
||||
self.__hardroot = _Link()
|
||||
self.__root = root = _proxy(self.__hardroot)
|
||||
root.prev = root.next = root
|
||||
self.__map = {}
|
||||
self.__update(*args, **kwds)
|
||||
|
||||
def __setitem__(self, key, value,
|
||||
dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link):
|
||||
'od.__setitem__(i, y) <==> od[i]=y'
|
||||
# Setting a new item creates a new link at the end of the linked list,
|
||||
# and the inherited dictionary is updated with the new key/value pair.
|
||||
if key not in self:
|
||||
self.__map[key] = link = Link()
|
||||
root = self.__root
|
||||
last = root.prev
|
||||
link.prev, link.next, link.key = last, root, key
|
||||
last.next = link
|
||||
root.prev = proxy(link)
|
||||
dict_setitem(self, key, value)
|
||||
|
||||
def __delitem__(self, key, dict_delitem=dict.__delitem__):
|
||||
'od.__delitem__(y) <==> del od[y]'
|
||||
# Deleting an existing item uses self.__map to find the link which gets
|
||||
# removed by updating the links in the predecessor and successor nodes.
|
||||
dict_delitem(self, key)
|
||||
link = self.__map.pop(key)
|
||||
link_prev = link.prev
|
||||
link_next = link.next
|
||||
link_prev.next = link_next
|
||||
link_next.prev = link_prev
|
||||
|
||||
def __iter__(self):
|
||||
'od.__iter__() <==> iter(od)'
|
||||
# Traverse the linked list in order.
|
||||
root = self.__root
|
||||
curr = root.next
|
||||
while curr is not root:
|
||||
yield curr.key
|
||||
curr = curr.next
|
||||
|
||||
def __reversed__(self):
|
||||
'od.__reversed__() <==> reversed(od)'
|
||||
# Traverse the linked list in reverse order.
|
||||
root = self.__root
|
||||
curr = root.prev
|
||||
while curr is not root:
|
||||
yield curr.key
|
||||
curr = curr.prev
|
||||
|
||||
def clear(self):
|
||||
'od.clear() -> None. Remove all items from od.'
|
||||
root = self.__root
|
||||
root.prev = root.next = root
|
||||
self.__map.clear()
|
||||
dict.clear(self)
|
||||
|
||||
def popitem(self, last=True):
|
||||
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
|
||||
Pairs are returned in LIFO order if last is true or FIFO order if false.
|
||||
|
||||
'''
|
||||
if not self:
|
||||
raise KeyError('dictionary is empty')
|
||||
root = self.__root
|
||||
if last:
|
||||
link = root.prev
|
||||
link_prev = link.prev
|
||||
link_prev.next = root
|
||||
root.prev = link_prev
|
||||
else:
|
||||
link = root.next
|
||||
link_next = link.next
|
||||
root.next = link_next
|
||||
link_next.prev = root
|
||||
key = link.key
|
||||
del self.__map[key]
|
||||
value = dict.pop(self, key)
|
||||
return key, value
|
||||
|
||||
def move_to_end(self, key, last=True):
|
||||
'''Move an existing element to the end (or beginning if last==False).
|
||||
|
||||
Raises KeyError if the element does not exist.
|
||||
When last=True, acts like a fast version of self[key]=self.pop(key).
|
||||
|
||||
'''
|
||||
link = self.__map[key]
|
||||
link_prev = link.prev
|
||||
link_next = link.next
|
||||
link_prev.next = link_next
|
||||
link_next.prev = link_prev
|
||||
root = self.__root
|
||||
if last:
|
||||
last = root.prev
|
||||
link.prev = last
|
||||
link.next = root
|
||||
last.next = root.prev = link
|
||||
else:
|
||||
first = root.next
|
||||
link.prev = root
|
||||
link.next = first
|
||||
root.next = first.prev = link
|
||||
|
||||
def __sizeof__(self):
|
||||
sizeof = _sys.getsizeof
|
||||
n = len(self) + 1 # number of links including root
|
||||
size = sizeof(self.__dict__) # instance dictionary
|
||||
size += sizeof(self.__map) * 2 # internal dict and inherited dict
|
||||
size += sizeof(self.__hardroot) * n # link objects
|
||||
size += sizeof(self.__root) * n # proxy objects
|
||||
return size
|
||||
|
||||
update = __update = MutableMapping.update
|
||||
keys = MutableMapping.keys
|
||||
values = MutableMapping.values
|
||||
items = MutableMapping.items
|
||||
__ne__ = MutableMapping.__ne__
|
||||
|
||||
__marker = object()
|
||||
|
||||
def pop(self, key, default=__marker):
|
||||
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
|
||||
value. If key is not found, d is returned if given, otherwise KeyError
|
||||
is raised.
|
||||
|
||||
'''
|
||||
if key in self:
|
||||
result = self[key]
|
||||
del self[key]
|
||||
return result
|
||||
if default is self.__marker:
|
||||
raise KeyError(key)
|
||||
return default
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
|
||||
if key in self:
|
||||
return self[key]
|
||||
self[key] = default
|
||||
return default
|
||||
|
||||
@_recursive_repr()
|
||||
def __repr__(self):
|
||||
'od.__repr__() <==> repr(od)'
|
||||
if not self:
|
||||
return '%s()' % (self.__class__.__name__,)
|
||||
return '%s(%r)' % (self.__class__.__name__, list(self.items()))
|
||||
|
||||
def __reduce__(self):
|
||||
'Return state information for pickling'
|
||||
items = [[k, self[k]] for k in self]
|
||||
inst_dict = vars(self).copy()
|
||||
for k in vars(OrderedDict()):
|
||||
inst_dict.pop(k, None)
|
||||
if inst_dict:
|
||||
return (self.__class__, (items,), inst_dict)
|
||||
return self.__class__, (items,)
|
||||
|
||||
def copy(self):
|
||||
'od.copy() -> a shallow copy of od'
|
||||
return self.__class__(self)
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, value=None):
|
||||
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
|
||||
If not specified, the value defaults to None.
|
||||
|
||||
'''
|
||||
self = cls()
|
||||
for key in iterable:
|
||||
self[key] = value
|
||||
return self
|
||||
|
||||
def __eq__(self, other):
|
||||
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
|
||||
while comparison to a regular mapping is order-insensitive.
|
||||
|
||||
'''
|
||||
if isinstance(other, OrderedDict):
|
||||
return len(self)==len(other) and \
|
||||
all(p==q for p, q in zip(self.items(), other.items()))
|
||||
return dict.__eq__(self, other)
|
||||
|
||||
# update_wrapper() and wraps() are tools to help write
|
||||
# wrapper functions that can handle naive introspection
|
||||
|
||||
WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__doc__')
|
||||
WRAPPER_UPDATES = ('__dict__',)
|
||||
def update_wrapper(wrapper,
|
||||
wrapped,
|
||||
assigned = WRAPPER_ASSIGNMENTS,
|
||||
updated = WRAPPER_UPDATES):
|
||||
"""Update a wrapper function to look like the wrapped function
|
||||
|
||||
wrapper is the function to be updated
|
||||
wrapped is the original function
|
||||
assigned is a tuple naming the attributes assigned directly
|
||||
from the wrapped function to the wrapper function (defaults to
|
||||
functools.WRAPPER_ASSIGNMENTS)
|
||||
updated is a tuple naming the attributes of the wrapper that
|
||||
are updated with the corresponding attribute from the wrapped
|
||||
function (defaults to functools.WRAPPER_UPDATES)
|
||||
"""
|
||||
wrapper.__wrapped__ = wrapped
|
||||
for attr in assigned:
|
||||
try:
|
||||
value = getattr(wrapped, attr)
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
setattr(wrapper, attr, value)
|
||||
for attr in updated:
|
||||
getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
|
||||
# Return the wrapper so this can be used as a decorator via partial()
|
||||
return wrapper
|
||||
|
||||
def wraps(wrapped,
|
||||
assigned = WRAPPER_ASSIGNMENTS,
|
||||
updated = WRAPPER_UPDATES):
|
||||
"""Decorator factory to apply update_wrapper() to a wrapper function
|
||||
|
||||
Returns a decorator that invokes update_wrapper() with the decorated
|
||||
function as the wrapper argument and the arguments to wraps() as the
|
||||
remaining arguments. Default arguments are as for update_wrapper().
|
||||
This is a convenience function to simplify applying partial() to
|
||||
update_wrapper().
|
||||
"""
|
||||
return partial(update_wrapper, wrapped=wrapped,
|
||||
assigned=assigned, updated=updated)
|
||||
|
||||
def total_ordering(cls):
|
||||
"""Class decorator that fills in missing ordering methods"""
|
||||
convert = {
|
||||
'__lt__': [('__gt__', lambda self, other: not (self < other or self == other)),
|
||||
('__le__', lambda self, other: self < other or self == other),
|
||||
('__ge__', lambda self, other: not self < other)],
|
||||
'__le__': [('__ge__', lambda self, other: not self <= other or self == other),
|
||||
('__lt__', lambda self, other: self <= other and not self == other),
|
||||
('__gt__', lambda self, other: not self <= other)],
|
||||
'__gt__': [('__lt__', lambda self, other: not (self > other or self == other)),
|
||||
('__ge__', lambda self, other: self > other or self == other),
|
||||
('__le__', lambda self, other: not self > other)],
|
||||
'__ge__': [('__le__', lambda self, other: (not self >= other) or self == other),
|
||||
('__gt__', lambda self, other: self >= other and not self == other),
|
||||
('__lt__', lambda self, other: not self >= other)]
|
||||
}
|
||||
roots = set(dir(cls)) & set(convert)
|
||||
if not roots:
|
||||
raise ValueError('must define at least one ordering operation: < > <= >=')
|
||||
root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__
|
||||
for opname, opfunc in convert[root]:
|
||||
if opname not in roots:
|
||||
opfunc.__name__ = opname
|
||||
opfunc.__doc__ = getattr(int, opname).__doc__
|
||||
setattr(cls, opname, opfunc)
|
||||
return cls
|
||||
|
||||
def cmp_to_key(mycmp):
|
||||
"""Convert a cmp= function into a key= function"""
|
||||
class K(object):
|
||||
__slots__ = ['obj']
|
||||
def __init__(self, obj):
|
||||
self.obj = obj
|
||||
def __lt__(self, other):
|
||||
return mycmp(self.obj, other.obj) < 0
|
||||
def __gt__(self, other):
|
||||
return mycmp(self.obj, other.obj) > 0
|
||||
def __eq__(self, other):
|
||||
return mycmp(self.obj, other.obj) == 0
|
||||
def __le__(self, other):
|
||||
return mycmp(self.obj, other.obj) <= 0
|
||||
def __ge__(self, other):
|
||||
return mycmp(self.obj, other.obj) >= 0
|
||||
def __ne__(self, other):
|
||||
return mycmp(self.obj, other.obj) != 0
|
||||
__hash__ = None
|
||||
return K
|
||||
|
||||
_CacheInfo = namedtuple("CacheInfo", "hits misses maxsize currsize")
|
||||
|
||||
def lru_cache(maxsize=100):
|
||||
"""Least-recently-used cache decorator.
|
||||
|
||||
If *maxsize* is set to None, the LRU features are disabled and the cache
|
||||
can grow without bound.
|
||||
|
||||
Arguments to the cached function must be hashable.
|
||||
|
||||
View the cache statistics named tuple (hits, misses, maxsize, currsize) with
|
||||
f.cache_info(). Clear the cache and statistics with f.cache_clear().
|
||||
Access the underlying function with f.__wrapped__.
|
||||
|
||||
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
|
||||
|
||||
"""
|
||||
# Users should only access the lru_cache through its public API:
|
||||
# cache_info, cache_clear, and f.__wrapped__
|
||||
# The internals of the lru_cache are encapsulated for thread safety and
|
||||
# to allow the implementation to change (including a possible C version).
|
||||
|
||||
def decorating_function(user_function,
|
||||
tuple=tuple, sorted=sorted, len=len, KeyError=KeyError):
|
||||
|
||||
hits, misses = [0], [0]
|
||||
kwd_mark = (object(),) # separates positional and keyword args
|
||||
lock = Lock() # needed because OrderedDict isn't threadsafe
|
||||
|
||||
if maxsize is None:
|
||||
cache = dict() # simple cache without ordering or size limit
|
||||
|
||||
@wraps(user_function)
|
||||
def wrapper(*args, **kwds):
|
||||
key = args
|
||||
if kwds:
|
||||
key += kwd_mark + tuple(sorted(kwds.items()))
|
||||
try:
|
||||
result = cache[key]
|
||||
hits[0] += 1
|
||||
return result
|
||||
except KeyError:
|
||||
pass
|
||||
result = user_function(*args, **kwds)
|
||||
cache[key] = result
|
||||
misses[0] += 1
|
||||
return result
|
||||
else:
|
||||
cache = OrderedDict() # ordered least recent to most recent
|
||||
cache_popitem = cache.popitem
|
||||
cache_renew = cache.move_to_end
|
||||
|
||||
@wraps(user_function)
|
||||
def wrapper(*args, **kwds):
|
||||
key = args
|
||||
if kwds:
|
||||
key += kwd_mark + tuple(sorted(kwds.items()))
|
||||
with lock:
|
||||
try:
|
||||
result = cache[key]
|
||||
cache_renew(key) # record recent use of this key
|
||||
hits[0] += 1
|
||||
return result
|
||||
except KeyError:
|
||||
pass
|
||||
result = user_function(*args, **kwds)
|
||||
with lock:
|
||||
cache[key] = result # record recent use of this key
|
||||
misses[0] += 1
|
||||
if len(cache) > maxsize:
|
||||
cache_popitem(0) # purge least recently used cache entry
|
||||
return result
|
||||
|
||||
def cache_info():
|
||||
"""Report cache statistics"""
|
||||
with lock:
|
||||
return _CacheInfo(hits[0], misses[0], maxsize, len(cache))
|
||||
|
||||
def cache_clear():
|
||||
"""Clear the cache and cache statistics"""
|
||||
with lock:
|
||||
cache.clear()
|
||||
hits[0] = misses[0] = 0
|
||||
|
||||
wrapper.cache_info = cache_info
|
||||
wrapper.cache_clear = cache_clear
|
||||
return wrapper
|
||||
|
||||
return decorating_function
|
||||
157
lib/spack/external/py2/functools32/reprlib32.py
vendored
Normal file
157
lib/spack/external/py2/functools32/reprlib32.py
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
"""Redo the builtin repr() (representation) but with limits on most sizes."""
|
||||
|
||||
__all__ = ["Repr", "repr", "recursive_repr"]
|
||||
|
||||
import __builtin__ as builtins
|
||||
from itertools import islice
|
||||
try:
|
||||
from thread import get_ident
|
||||
except ImportError:
|
||||
from _dummy_thread32 import get_ident
|
||||
|
||||
def recursive_repr(fillvalue='...'):
|
||||
'Decorator to make a repr function return fillvalue for a recursive call'
|
||||
|
||||
def decorating_function(user_function):
|
||||
repr_running = set()
|
||||
|
||||
def wrapper(self):
|
||||
key = id(self), get_ident()
|
||||
if key in repr_running:
|
||||
return fillvalue
|
||||
repr_running.add(key)
|
||||
try:
|
||||
result = user_function(self)
|
||||
finally:
|
||||
repr_running.discard(key)
|
||||
return result
|
||||
|
||||
# Can't use functools.wraps() here because of bootstrap issues
|
||||
wrapper.__module__ = getattr(user_function, '__module__')
|
||||
wrapper.__doc__ = getattr(user_function, '__doc__')
|
||||
wrapper.__name__ = getattr(user_function, '__name__')
|
||||
wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
|
||||
return wrapper
|
||||
|
||||
return decorating_function
|
||||
|
||||
class Repr:
|
||||
|
||||
def __init__(self):
|
||||
self.maxlevel = 6
|
||||
self.maxtuple = 6
|
||||
self.maxlist = 6
|
||||
self.maxarray = 5
|
||||
self.maxdict = 4
|
||||
self.maxset = 6
|
||||
self.maxfrozenset = 6
|
||||
self.maxdeque = 6
|
||||
self.maxstring = 30
|
||||
self.maxlong = 40
|
||||
self.maxother = 30
|
||||
|
||||
def repr(self, x):
|
||||
return self.repr1(x, self.maxlevel)
|
||||
|
||||
def repr1(self, x, level):
|
||||
typename = type(x).__name__
|
||||
if ' ' in typename:
|
||||
parts = typename.split()
|
||||
typename = '_'.join(parts)
|
||||
if hasattr(self, 'repr_' + typename):
|
||||
return getattr(self, 'repr_' + typename)(x, level)
|
||||
else:
|
||||
return self.repr_instance(x, level)
|
||||
|
||||
def _repr_iterable(self, x, level, left, right, maxiter, trail=''):
|
||||
n = len(x)
|
||||
if level <= 0 and n:
|
||||
s = '...'
|
||||
else:
|
||||
newlevel = level - 1
|
||||
repr1 = self.repr1
|
||||
pieces = [repr1(elem, newlevel) for elem in islice(x, maxiter)]
|
||||
if n > maxiter: pieces.append('...')
|
||||
s = ', '.join(pieces)
|
||||
if n == 1 and trail: right = trail + right
|
||||
return '%s%s%s' % (left, s, right)
|
||||
|
||||
def repr_tuple(self, x, level):
|
||||
return self._repr_iterable(x, level, '(', ')', self.maxtuple, ',')
|
||||
|
||||
def repr_list(self, x, level):
|
||||
return self._repr_iterable(x, level, '[', ']', self.maxlist)
|
||||
|
||||
def repr_array(self, x, level):
|
||||
header = "array('%s', [" % x.typecode
|
||||
return self._repr_iterable(x, level, header, '])', self.maxarray)
|
||||
|
||||
def repr_set(self, x, level):
|
||||
x = _possibly_sorted(x)
|
||||
return self._repr_iterable(x, level, 'set([', '])', self.maxset)
|
||||
|
||||
def repr_frozenset(self, x, level):
|
||||
x = _possibly_sorted(x)
|
||||
return self._repr_iterable(x, level, 'frozenset([', '])',
|
||||
self.maxfrozenset)
|
||||
|
||||
def repr_deque(self, x, level):
|
||||
return self._repr_iterable(x, level, 'deque([', '])', self.maxdeque)
|
||||
|
||||
def repr_dict(self, x, level):
|
||||
n = len(x)
|
||||
if n == 0: return '{}'
|
||||
if level <= 0: return '{...}'
|
||||
newlevel = level - 1
|
||||
repr1 = self.repr1
|
||||
pieces = []
|
||||
for key in islice(_possibly_sorted(x), self.maxdict):
|
||||
keyrepr = repr1(key, newlevel)
|
||||
valrepr = repr1(x[key], newlevel)
|
||||
pieces.append('%s: %s' % (keyrepr, valrepr))
|
||||
if n > self.maxdict: pieces.append('...')
|
||||
s = ', '.join(pieces)
|
||||
return '{%s}' % (s,)
|
||||
|
||||
def repr_str(self, x, level):
|
||||
s = builtins.repr(x[:self.maxstring])
|
||||
if len(s) > self.maxstring:
|
||||
i = max(0, (self.maxstring-3)//2)
|
||||
j = max(0, self.maxstring-3-i)
|
||||
s = builtins.repr(x[:i] + x[len(x)-j:])
|
||||
s = s[:i] + '...' + s[len(s)-j:]
|
||||
return s
|
||||
|
||||
def repr_int(self, x, level):
|
||||
s = builtins.repr(x) # XXX Hope this isn't too slow...
|
||||
if len(s) > self.maxlong:
|
||||
i = max(0, (self.maxlong-3)//2)
|
||||
j = max(0, self.maxlong-3-i)
|
||||
s = s[:i] + '...' + s[len(s)-j:]
|
||||
return s
|
||||
|
||||
def repr_instance(self, x, level):
|
||||
try:
|
||||
s = builtins.repr(x)
|
||||
# Bugs in x.__repr__() can cause arbitrary
|
||||
# exceptions -- then make up something
|
||||
except Exception:
|
||||
return '<%s instance at %x>' % (x.__class__.__name__, id(x))
|
||||
if len(s) > self.maxother:
|
||||
i = max(0, (self.maxother-3)//2)
|
||||
j = max(0, self.maxother-3-i)
|
||||
s = s[:i] + '...' + s[len(s)-j:]
|
||||
return s
|
||||
|
||||
|
||||
def _possibly_sorted(x):
|
||||
# Since not all sequences of items can be sorted and comparison
|
||||
# functions may raise arbitrary exceptions, return an unsorted
|
||||
# sequence in that case.
|
||||
try:
|
||||
return sorted(x)
|
||||
except Exception:
|
||||
return list(x)
|
||||
|
||||
aRepr = Repr()
|
||||
repr = aRepr.repr
|
||||
22
lib/spack/external/pyrsistent/LICENSE
vendored
Normal file
22
lib/spack/external/pyrsistent/LICENSE
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
Copyright (c) 2019 Tobias Gustafsson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
6
lib/spack/external/pyrsistent/__init__.py
vendored
Normal file
6
lib/spack/external/pyrsistent/__init__.py
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pyrsistent._pmap import pmap
|
||||
|
||||
|
||||
__all__ = ('pmap',)
|
||||
31
lib/spack/external/pyrsistent/_compat.py
vendored
Normal file
31
lib/spack/external/pyrsistent/_compat.py
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
from six import string_types
|
||||
|
||||
|
||||
# enum compat
|
||||
try:
|
||||
from enum import Enum
|
||||
except:
|
||||
class Enum(object): pass
|
||||
# no objects will be instances of this class
|
||||
|
||||
# collections compat
|
||||
try:
|
||||
from collections.abc import (
|
||||
Container,
|
||||
Hashable,
|
||||
Iterable,
|
||||
Mapping,
|
||||
Sequence,
|
||||
Set,
|
||||
Sized,
|
||||
)
|
||||
except ImportError:
|
||||
from collections import (
|
||||
Container,
|
||||
Hashable,
|
||||
Iterable,
|
||||
Mapping,
|
||||
Sequence,
|
||||
Set,
|
||||
Sized,
|
||||
)
|
||||
460
lib/spack/external/pyrsistent/_pmap.py
vendored
Normal file
460
lib/spack/external/pyrsistent/_pmap.py
vendored
Normal file
@@ -0,0 +1,460 @@
|
||||
from ._compat import Mapping, Hashable
|
||||
from itertools import chain
|
||||
import six
|
||||
from pyrsistent._pvector import pvector
|
||||
from pyrsistent._transformations import transform
|
||||
|
||||
|
||||
class PMap(object):
|
||||
"""
|
||||
Persistent map/dict. Tries to follow the same naming conventions as the built in dict where feasible.
|
||||
|
||||
Do not instantiate directly, instead use the factory functions :py:func:`m` or :py:func:`pmap` to
|
||||
create an instance.
|
||||
|
||||
Was originally written as a very close copy of the Clojure equivalent but was later rewritten to closer
|
||||
re-assemble the python dict. This means that a sparse vector (a PVector) of buckets is used. The keys are
|
||||
hashed and the elements inserted at position hash % len(bucket_vector). Whenever the map size exceeds 2/3 of
|
||||
the containing vectors size the map is reallocated to a vector of double the size. This is done to avoid
|
||||
excessive hash collisions.
|
||||
|
||||
This structure corresponds most closely to the built in dict type and is intended as a replacement. Where the
|
||||
semantics are the same (more or less) the same function names have been used but for some cases it is not possible,
|
||||
for example assignments and deletion of values.
|
||||
|
||||
PMap implements the Mapping protocol and is Hashable. It also supports dot-notation for
|
||||
element access.
|
||||
|
||||
Random access and insert is log32(n) where n is the size of the map.
|
||||
|
||||
The following are examples of some common operations on persistent maps
|
||||
|
||||
>>> m1 = m(a=1, b=3)
|
||||
>>> m2 = m1.set('c', 3)
|
||||
>>> m3 = m2.remove('a')
|
||||
>>> m1
|
||||
pmap({'b': 3, 'a': 1})
|
||||
>>> m2
|
||||
pmap({'c': 3, 'b': 3, 'a': 1})
|
||||
>>> m3
|
||||
pmap({'c': 3, 'b': 3})
|
||||
>>> m3['c']
|
||||
3
|
||||
>>> m3.c
|
||||
3
|
||||
"""
|
||||
__slots__ = ('_size', '_buckets', '__weakref__', '_cached_hash')
|
||||
|
||||
def __new__(cls, size, buckets):
|
||||
self = super(PMap, cls).__new__(cls)
|
||||
self._size = size
|
||||
self._buckets = buckets
|
||||
return self
|
||||
|
||||
@staticmethod
|
||||
def _get_bucket(buckets, key):
|
||||
index = hash(key) % len(buckets)
|
||||
bucket = buckets[index]
|
||||
return index, bucket
|
||||
|
||||
@staticmethod
|
||||
def _getitem(buckets, key):
|
||||
_, bucket = PMap._get_bucket(buckets, key)
|
||||
if bucket:
|
||||
for k, v in bucket:
|
||||
if k == key:
|
||||
return v
|
||||
|
||||
raise KeyError(key)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return PMap._getitem(self._buckets, key)
|
||||
|
||||
@staticmethod
|
||||
def _contains(buckets, key):
|
||||
_, bucket = PMap._get_bucket(buckets, key)
|
||||
if bucket:
|
||||
for k, _ in bucket:
|
||||
if k == key:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
def __contains__(self, key):
|
||||
return self._contains(self._buckets, key)
|
||||
|
||||
get = Mapping.get
|
||||
|
||||
def __iter__(self):
|
||||
return self.iterkeys()
|
||||
|
||||
def __getattr__(self, key):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
raise AttributeError(
|
||||
"{0} has no attribute '{1}'".format(type(self).__name__, key)
|
||||
)
|
||||
|
||||
def iterkeys(self):
|
||||
for k, _ in self.iteritems():
|
||||
yield k
|
||||
|
||||
# These are more efficient implementations compared to the original
|
||||
# methods that are based on the keys iterator and then calls the
|
||||
# accessor functions to access the value for the corresponding key
|
||||
def itervalues(self):
|
||||
for _, v in self.iteritems():
|
||||
yield v
|
||||
|
||||
def iteritems(self):
|
||||
for bucket in self._buckets:
|
||||
if bucket:
|
||||
for k, v in bucket:
|
||||
yield k, v
|
||||
|
||||
def values(self):
|
||||
return pvector(self.itervalues())
|
||||
|
||||
def keys(self):
|
||||
return pvector(self.iterkeys())
|
||||
|
||||
def items(self):
|
||||
return pvector(self.iteritems())
|
||||
|
||||
def __len__(self):
|
||||
return self._size
|
||||
|
||||
def __repr__(self):
|
||||
return 'pmap({0})'.format(str(dict(self)))
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
return True
|
||||
if not isinstance(other, Mapping):
|
||||
return NotImplemented
|
||||
if len(self) != len(other):
|
||||
return False
|
||||
if isinstance(other, PMap):
|
||||
if (hasattr(self, '_cached_hash') and hasattr(other, '_cached_hash')
|
||||
and self._cached_hash != other._cached_hash):
|
||||
return False
|
||||
if self._buckets == other._buckets:
|
||||
return True
|
||||
return dict(self.iteritems()) == dict(other.iteritems())
|
||||
elif isinstance(other, dict):
|
||||
return dict(self.iteritems()) == other
|
||||
return dict(self.iteritems()) == dict(six.iteritems(other))
|
||||
|
||||
__ne__ = Mapping.__ne__
|
||||
|
||||
def __lt__(self, other):
|
||||
raise TypeError('PMaps are not orderable')
|
||||
|
||||
__le__ = __lt__
|
||||
__gt__ = __lt__
|
||||
__ge__ = __lt__
|
||||
|
||||
def __str__(self):
|
||||
return self.__repr__()
|
||||
|
||||
def __hash__(self):
|
||||
if not hasattr(self, '_cached_hash'):
|
||||
self._cached_hash = hash(frozenset(self.iteritems()))
|
||||
return self._cached_hash
|
||||
|
||||
def set(self, key, val):
|
||||
"""
|
||||
Return a new PMap with key and val inserted.
|
||||
|
||||
>>> m1 = m(a=1, b=2)
|
||||
>>> m2 = m1.set('a', 3)
|
||||
>>> m3 = m1.set('c' ,4)
|
||||
>>> m1
|
||||
pmap({'b': 2, 'a': 1})
|
||||
>>> m2
|
||||
pmap({'b': 2, 'a': 3})
|
||||
>>> m3
|
||||
pmap({'c': 4, 'b': 2, 'a': 1})
|
||||
"""
|
||||
return self.evolver().set(key, val).persistent()
|
||||
|
||||
def remove(self, key):
|
||||
"""
|
||||
Return a new PMap without the element specified by key. Raises KeyError if the element
|
||||
is not present.
|
||||
|
||||
>>> m1 = m(a=1, b=2)
|
||||
>>> m1.remove('a')
|
||||
pmap({'b': 2})
|
||||
"""
|
||||
return self.evolver().remove(key).persistent()
|
||||
|
||||
def discard(self, key):
|
||||
"""
|
||||
Return a new PMap without the element specified by key. Returns reference to itself
|
||||
if element is not present.
|
||||
|
||||
>>> m1 = m(a=1, b=2)
|
||||
>>> m1.discard('a')
|
||||
pmap({'b': 2})
|
||||
>>> m1 is m1.discard('c')
|
||||
True
|
||||
"""
|
||||
try:
|
||||
return self.remove(key)
|
||||
except KeyError:
|
||||
return self
|
||||
|
||||
def update(self, *maps):
|
||||
"""
|
||||
Return a new PMap with the items in Mappings inserted. If the same key is present in multiple
|
||||
maps the rightmost (last) value is inserted.
|
||||
|
||||
>>> m1 = m(a=1, b=2)
|
||||
>>> m1.update(m(a=2, c=3), {'a': 17, 'd': 35})
|
||||
pmap({'c': 3, 'b': 2, 'a': 17, 'd': 35})
|
||||
"""
|
||||
return self.update_with(lambda l, r: r, *maps)
|
||||
|
||||
def update_with(self, update_fn, *maps):
|
||||
"""
|
||||
Return a new PMap with the items in Mappings maps inserted. If the same key is present in multiple
|
||||
maps the values will be merged using merge_fn going from left to right.
|
||||
|
||||
>>> from operator import add
|
||||
>>> m1 = m(a=1, b=2)
|
||||
>>> m1.update_with(add, m(a=2))
|
||||
pmap({'b': 2, 'a': 3})
|
||||
|
||||
The reverse behaviour of the regular merge. Keep the leftmost element instead of the rightmost.
|
||||
|
||||
>>> m1 = m(a=1)
|
||||
>>> m1.update_with(lambda l, r: l, m(a=2), {'a':3})
|
||||
pmap({'a': 1})
|
||||
"""
|
||||
evolver = self.evolver()
|
||||
for map in maps:
|
||||
for key, value in map.items():
|
||||
evolver.set(key, update_fn(evolver[key], value) if key in evolver else value)
|
||||
|
||||
return evolver.persistent()
|
||||
|
||||
def __add__(self, other):
|
||||
return self.update(other)
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return pmap, (dict(self),)
|
||||
|
||||
def transform(self, *transformations):
|
||||
"""
|
||||
Transform arbitrarily complex combinations of PVectors and PMaps. A transformation
|
||||
consists of two parts. One match expression that specifies which elements to transform
|
||||
and one transformation function that performs the actual transformation.
|
||||
|
||||
>>> from pyrsistent import freeze, ny
|
||||
>>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
|
||||
... {'author': 'Steve', 'content': 'A slightly longer article'}],
|
||||
... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
|
||||
>>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
|
||||
>>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
|
||||
>>> very_short_news.articles[0].content
|
||||
'A short article'
|
||||
>>> very_short_news.articles[1].content
|
||||
'A slightly long...'
|
||||
|
||||
When nothing has been transformed the original data structure is kept
|
||||
|
||||
>>> short_news is news_paper
|
||||
True
|
||||
>>> very_short_news is news_paper
|
||||
False
|
||||
>>> very_short_news.articles[0] is news_paper.articles[0]
|
||||
True
|
||||
"""
|
||||
return transform(self, transformations)
|
||||
|
||||
def copy(self):
|
||||
return self
|
||||
|
||||
class _Evolver(object):
|
||||
__slots__ = ('_buckets_evolver', '_size', '_original_pmap')
|
||||
|
||||
def __init__(self, original_pmap):
|
||||
self._original_pmap = original_pmap
|
||||
self._buckets_evolver = original_pmap._buckets.evolver()
|
||||
self._size = original_pmap._size
|
||||
|
||||
def __getitem__(self, key):
|
||||
return PMap._getitem(self._buckets_evolver, key)
|
||||
|
||||
def __setitem__(self, key, val):
|
||||
self.set(key, val)
|
||||
|
||||
def set(self, key, val):
|
||||
if len(self._buckets_evolver) < 0.67 * self._size:
|
||||
self._reallocate(2 * len(self._buckets_evolver))
|
||||
|
||||
kv = (key, val)
|
||||
index, bucket = PMap._get_bucket(self._buckets_evolver, key)
|
||||
if bucket:
|
||||
for k, v in bucket:
|
||||
if k == key:
|
||||
if v is not val:
|
||||
new_bucket = [(k2, v2) if k2 != k else (k2, val) for k2, v2 in bucket]
|
||||
self._buckets_evolver[index] = new_bucket
|
||||
|
||||
return self
|
||||
|
||||
new_bucket = [kv]
|
||||
new_bucket.extend(bucket)
|
||||
self._buckets_evolver[index] = new_bucket
|
||||
self._size += 1
|
||||
else:
|
||||
self._buckets_evolver[index] = [kv]
|
||||
self._size += 1
|
||||
|
||||
return self
|
||||
|
||||
def _reallocate(self, new_size):
|
||||
new_list = new_size * [None]
|
||||
buckets = self._buckets_evolver.persistent()
|
||||
for k, v in chain.from_iterable(x for x in buckets if x):
|
||||
index = hash(k) % new_size
|
||||
if new_list[index]:
|
||||
new_list[index].append((k, v))
|
||||
else:
|
||||
new_list[index] = [(k, v)]
|
||||
|
||||
# A reallocation should always result in a dirty buckets evolver to avoid
|
||||
# possible loss of elements when doing the reallocation.
|
||||
self._buckets_evolver = pvector().evolver()
|
||||
self._buckets_evolver.extend(new_list)
|
||||
|
||||
def is_dirty(self):
|
||||
return self._buckets_evolver.is_dirty()
|
||||
|
||||
def persistent(self):
|
||||
if self.is_dirty():
|
||||
self._original_pmap = PMap(self._size, self._buckets_evolver.persistent())
|
||||
|
||||
return self._original_pmap
|
||||
|
||||
def __len__(self):
|
||||
return self._size
|
||||
|
||||
def __contains__(self, key):
|
||||
return PMap._contains(self._buckets_evolver, key)
|
||||
|
||||
def __delitem__(self, key):
|
||||
self.remove(key)
|
||||
|
||||
def remove(self, key):
|
||||
index, bucket = PMap._get_bucket(self._buckets_evolver, key)
|
||||
|
||||
if bucket:
|
||||
new_bucket = [(k, v) for (k, v) in bucket if k != key]
|
||||
if len(bucket) > len(new_bucket):
|
||||
self._buckets_evolver[index] = new_bucket if new_bucket else None
|
||||
self._size -= 1
|
||||
return self
|
||||
|
||||
raise KeyError('{0}'.format(key))
|
||||
|
||||
def evolver(self):
|
||||
"""
|
||||
Create a new evolver for this pmap. For a discussion on evolvers in general see the
|
||||
documentation for the pvector evolver.
|
||||
|
||||
Create the evolver and perform various mutating updates to it:
|
||||
|
||||
>>> m1 = m(a=1, b=2)
|
||||
>>> e = m1.evolver()
|
||||
>>> e['c'] = 3
|
||||
>>> len(e)
|
||||
3
|
||||
>>> del e['a']
|
||||
|
||||
The underlying pmap remains the same:
|
||||
|
||||
>>> m1
|
||||
pmap({'b': 2, 'a': 1})
|
||||
|
||||
The changes are kept in the evolver. An updated pmap can be created using the
|
||||
persistent() function on the evolver.
|
||||
|
||||
>>> m2 = e.persistent()
|
||||
>>> m2
|
||||
pmap({'c': 3, 'b': 2})
|
||||
|
||||
The new pmap will share data with the original pmap in the same way that would have
|
||||
been done if only using operations on the pmap.
|
||||
"""
|
||||
return self._Evolver(self)
|
||||
|
||||
Mapping.register(PMap)
|
||||
Hashable.register(PMap)
|
||||
|
||||
|
||||
def _turbo_mapping(initial, pre_size):
|
||||
if pre_size:
|
||||
size = pre_size
|
||||
else:
|
||||
try:
|
||||
size = 2 * len(initial) or 8
|
||||
except Exception:
|
||||
# Guess we can't figure out the length. Give up on length hinting,
|
||||
# we can always reallocate later.
|
||||
size = 8
|
||||
|
||||
buckets = size * [None]
|
||||
|
||||
if not isinstance(initial, Mapping):
|
||||
# Make a dictionary of the initial data if it isn't already,
|
||||
# that will save us some job further down since we can assume no
|
||||
# key collisions
|
||||
initial = dict(initial)
|
||||
|
||||
for k, v in six.iteritems(initial):
|
||||
h = hash(k)
|
||||
index = h % size
|
||||
bucket = buckets[index]
|
||||
|
||||
if bucket:
|
||||
bucket.append((k, v))
|
||||
else:
|
||||
buckets[index] = [(k, v)]
|
||||
|
||||
return PMap(len(initial), pvector().extend(buckets))
|
||||
|
||||
|
||||
_EMPTY_PMAP = _turbo_mapping({}, 0)
|
||||
|
||||
|
||||
def pmap(initial={}, pre_size=0):
|
||||
"""
|
||||
Create new persistent map, inserts all elements in initial into the newly created map.
|
||||
The optional argument pre_size may be used to specify an initial size of the underlying bucket vector. This
|
||||
may have a positive performance impact in the cases where you know beforehand that a large number of elements
|
||||
will be inserted into the map eventually since it will reduce the number of reallocations required.
|
||||
|
||||
>>> pmap({'a': 13, 'b': 14})
|
||||
pmap({'b': 14, 'a': 13})
|
||||
"""
|
||||
if not initial:
|
||||
return _EMPTY_PMAP
|
||||
|
||||
return _turbo_mapping(initial, pre_size)
|
||||
|
||||
|
||||
def m(**kwargs):
|
||||
"""
|
||||
Creates a new persitent map. Inserts all key value arguments into the newly created map.
|
||||
|
||||
>>> m(a=13, b=14)
|
||||
pmap({'b': 14, 'a': 13})
|
||||
"""
|
||||
return pmap(kwargs)
|
||||
713
lib/spack/external/pyrsistent/_pvector.py
vendored
Normal file
713
lib/spack/external/pyrsistent/_pvector.py
vendored
Normal file
@@ -0,0 +1,713 @@
|
||||
from abc import abstractmethod, ABCMeta
|
||||
from ._compat import Sequence, Hashable
|
||||
from numbers import Integral
|
||||
import operator
|
||||
import six
|
||||
from pyrsistent._transformations import transform
|
||||
|
||||
|
||||
def _bitcount(val):
|
||||
return bin(val).count("1")
|
||||
|
||||
BRANCH_FACTOR = 32
|
||||
BIT_MASK = BRANCH_FACTOR - 1
|
||||
SHIFT = _bitcount(BIT_MASK)
|
||||
|
||||
|
||||
def compare_pvector(v, other, operator):
|
||||
return operator(v.tolist(), other.tolist() if isinstance(other, PVector) else other)
|
||||
|
||||
|
||||
def _index_or_slice(index, stop):
|
||||
if stop is None:
|
||||
return index
|
||||
|
||||
return slice(index, stop)
|
||||
|
||||
|
||||
class PythonPVector(object):
|
||||
"""
|
||||
Support structure for PVector that implements structural sharing for vectors using a trie.
|
||||
"""
|
||||
__slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '__weakref__')
|
||||
|
||||
def __new__(cls, count, shift, root, tail):
|
||||
self = super(PythonPVector, cls).__new__(cls)
|
||||
self._count = count
|
||||
self._shift = shift
|
||||
self._root = root
|
||||
self._tail = tail
|
||||
|
||||
# Derived attribute stored for performance
|
||||
self._tail_offset = self._count - len(self._tail)
|
||||
return self
|
||||
|
||||
def __len__(self):
|
||||
return self._count
|
||||
|
||||
def __getitem__(self, index):
|
||||
if isinstance(index, slice):
|
||||
# There are more conditions than the below where it would be OK to
|
||||
# return ourselves, implement those...
|
||||
if index.start is None and index.stop is None and index.step is None:
|
||||
return self
|
||||
|
||||
# This is a bit nasty realizing the whole structure as a list before
|
||||
# slicing it but it is the fastest way I've found to date, and it's easy :-)
|
||||
return _EMPTY_PVECTOR.extend(self.tolist()[index])
|
||||
|
||||
if index < 0:
|
||||
index += self._count
|
||||
|
||||
return PythonPVector._node_for(self, index)[index & BIT_MASK]
|
||||
|
||||
def __add__(self, other):
|
||||
return self.extend(other)
|
||||
|
||||
def __repr__(self):
|
||||
return 'pvector({0})'.format(str(self.tolist()))
|
||||
|
||||
def __str__(self):
|
||||
return self.__repr__()
|
||||
|
||||
def __iter__(self):
|
||||
# This is kind of lazy and will produce some memory overhead but it is the fasted method
|
||||
# by far of those tried since it uses the speed of the built in python list directly.
|
||||
return iter(self.tolist())
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self is other or (hasattr(other, '__len__') and self._count == len(other)) and compare_pvector(self, other, operator.eq)
|
||||
|
||||
def __gt__(self, other):
|
||||
return compare_pvector(self, other, operator.gt)
|
||||
|
||||
def __lt__(self, other):
|
||||
return compare_pvector(self, other, operator.lt)
|
||||
|
||||
def __ge__(self, other):
|
||||
return compare_pvector(self, other, operator.ge)
|
||||
|
||||
def __le__(self, other):
|
||||
return compare_pvector(self, other, operator.le)
|
||||
|
||||
def __mul__(self, times):
|
||||
if times <= 0 or self is _EMPTY_PVECTOR:
|
||||
return _EMPTY_PVECTOR
|
||||
|
||||
if times == 1:
|
||||
return self
|
||||
|
||||
return _EMPTY_PVECTOR.extend(times * self.tolist())
|
||||
|
||||
__rmul__ = __mul__
|
||||
|
||||
def _fill_list(self, node, shift, the_list):
|
||||
if shift:
|
||||
shift -= SHIFT
|
||||
for n in node:
|
||||
self._fill_list(n, shift, the_list)
|
||||
else:
|
||||
the_list.extend(node)
|
||||
|
||||
def tolist(self):
|
||||
"""
|
||||
The fastest way to convert the vector into a python list.
|
||||
"""
|
||||
the_list = []
|
||||
self._fill_list(self._root, self._shift, the_list)
|
||||
the_list.extend(self._tail)
|
||||
return the_list
|
||||
|
||||
def _totuple(self):
|
||||
"""
|
||||
Returns the content as a python tuple.
|
||||
"""
|
||||
return tuple(self.tolist())
|
||||
|
||||
def __hash__(self):
|
||||
# Taking the easy way out again...
|
||||
return hash(self._totuple())
|
||||
|
||||
def transform(self, *transformations):
|
||||
return transform(self, transformations)
|
||||
|
||||
def __reduce__(self):
|
||||
# Pickling support
|
||||
return pvector, (self.tolist(),)
|
||||
|
||||
def mset(self, *args):
|
||||
if len(args) % 2:
|
||||
raise TypeError("mset expected an even number of arguments")
|
||||
|
||||
evolver = self.evolver()
|
||||
for i in range(0, len(args), 2):
|
||||
evolver[args[i]] = args[i+1]
|
||||
|
||||
return evolver.persistent()
|
||||
|
||||
class Evolver(object):
|
||||
__slots__ = ('_count', '_shift', '_root', '_tail', '_tail_offset', '_dirty_nodes',
|
||||
'_extra_tail', '_cached_leafs', '_orig_pvector')
|
||||
|
||||
def __init__(self, v):
|
||||
self._reset(v)
|
||||
|
||||
def __getitem__(self, index):
|
||||
if not isinstance(index, Integral):
|
||||
raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
|
||||
|
||||
if index < 0:
|
||||
index += self._count + len(self._extra_tail)
|
||||
|
||||
if self._count <= index < self._count + len(self._extra_tail):
|
||||
return self._extra_tail[index - self._count]
|
||||
|
||||
return PythonPVector._node_for(self, index)[index & BIT_MASK]
|
||||
|
||||
def _reset(self, v):
|
||||
self._count = v._count
|
||||
self._shift = v._shift
|
||||
self._root = v._root
|
||||
self._tail = v._tail
|
||||
self._tail_offset = v._tail_offset
|
||||
self._dirty_nodes = {}
|
||||
self._cached_leafs = {}
|
||||
self._extra_tail = []
|
||||
self._orig_pvector = v
|
||||
|
||||
def append(self, element):
|
||||
self._extra_tail.append(element)
|
||||
return self
|
||||
|
||||
def extend(self, iterable):
|
||||
self._extra_tail.extend(iterable)
|
||||
return self
|
||||
|
||||
def set(self, index, val):
|
||||
self[index] = val
|
||||
return self
|
||||
|
||||
def __setitem__(self, index, val):
|
||||
if not isinstance(index, Integral):
|
||||
raise TypeError("'%s' object cannot be interpreted as an index" % type(index).__name__)
|
||||
|
||||
if index < 0:
|
||||
index += self._count + len(self._extra_tail)
|
||||
|
||||
if 0 <= index < self._count:
|
||||
node = self._cached_leafs.get(index >> SHIFT)
|
||||
if node:
|
||||
node[index & BIT_MASK] = val
|
||||
elif index >= self._tail_offset:
|
||||
if id(self._tail) not in self._dirty_nodes:
|
||||
self._tail = list(self._tail)
|
||||
self._dirty_nodes[id(self._tail)] = True
|
||||
self._cached_leafs[index >> SHIFT] = self._tail
|
||||
self._tail[index & BIT_MASK] = val
|
||||
else:
|
||||
self._root = self._do_set(self._shift, self._root, index, val)
|
||||
elif self._count <= index < self._count + len(self._extra_tail):
|
||||
self._extra_tail[index - self._count] = val
|
||||
elif index == self._count + len(self._extra_tail):
|
||||
self._extra_tail.append(val)
|
||||
else:
|
||||
raise IndexError("Index out of range: %s" % (index,))
|
||||
|
||||
def _do_set(self, level, node, i, val):
|
||||
if id(node) in self._dirty_nodes:
|
||||
ret = node
|
||||
else:
|
||||
ret = list(node)
|
||||
self._dirty_nodes[id(ret)] = True
|
||||
|
||||
if level == 0:
|
||||
ret[i & BIT_MASK] = val
|
||||
self._cached_leafs[i >> SHIFT] = ret
|
||||
else:
|
||||
sub_index = (i >> level) & BIT_MASK # >>>
|
||||
ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val)
|
||||
|
||||
return ret
|
||||
|
||||
def delete(self, index):
|
||||
del self[index]
|
||||
return self
|
||||
|
||||
def __delitem__(self, key):
|
||||
if self._orig_pvector:
|
||||
# All structural sharing bets are off, base evolver on _extra_tail only
|
||||
l = PythonPVector(self._count, self._shift, self._root, self._tail).tolist()
|
||||
l.extend(self._extra_tail)
|
||||
self._reset(_EMPTY_PVECTOR)
|
||||
self._extra_tail = l
|
||||
|
||||
del self._extra_tail[key]
|
||||
|
||||
def persistent(self):
|
||||
result = self._orig_pvector
|
||||
if self.is_dirty():
|
||||
result = PythonPVector(self._count, self._shift, self._root, self._tail).extend(self._extra_tail)
|
||||
self._reset(result)
|
||||
|
||||
return result
|
||||
|
||||
def __len__(self):
|
||||
return self._count + len(self._extra_tail)
|
||||
|
||||
def is_dirty(self):
|
||||
return bool(self._dirty_nodes or self._extra_tail)
|
||||
|
||||
def evolver(self):
|
||||
return PythonPVector.Evolver(self)
|
||||
|
||||
def set(self, i, val):
|
||||
# This method could be implemented by a call to mset() but doing so would cause
|
||||
# a ~5 X performance penalty on PyPy (considered the primary platform for this implementation
|
||||
# of PVector) so we're keeping this implementation for now.
|
||||
|
||||
if not isinstance(i, Integral):
|
||||
raise TypeError("'%s' object cannot be interpreted as an index" % type(i).__name__)
|
||||
|
||||
if i < 0:
|
||||
i += self._count
|
||||
|
||||
if 0 <= i < self._count:
|
||||
if i >= self._tail_offset:
|
||||
new_tail = list(self._tail)
|
||||
new_tail[i & BIT_MASK] = val
|
||||
return PythonPVector(self._count, self._shift, self._root, new_tail)
|
||||
|
||||
return PythonPVector(self._count, self._shift, self._do_set(self._shift, self._root, i, val), self._tail)
|
||||
|
||||
if i == self._count:
|
||||
return self.append(val)
|
||||
|
||||
raise IndexError("Index out of range: %s" % (i,))
|
||||
|
||||
def _do_set(self, level, node, i, val):
|
||||
ret = list(node)
|
||||
if level == 0:
|
||||
ret[i & BIT_MASK] = val
|
||||
else:
|
||||
sub_index = (i >> level) & BIT_MASK # >>>
|
||||
ret[sub_index] = self._do_set(level - SHIFT, node[sub_index], i, val)
|
||||
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def _node_for(pvector_like, i):
|
||||
if 0 <= i < pvector_like._count:
|
||||
if i >= pvector_like._tail_offset:
|
||||
return pvector_like._tail
|
||||
|
||||
node = pvector_like._root
|
||||
for level in range(pvector_like._shift, 0, -SHIFT):
|
||||
node = node[(i >> level) & BIT_MASK] # >>>
|
||||
|
||||
return node
|
||||
|
||||
raise IndexError("Index out of range: %s" % (i,))
|
||||
|
||||
def _create_new_root(self):
|
||||
new_shift = self._shift
|
||||
|
||||
# Overflow root?
|
||||
if (self._count >> SHIFT) > (1 << self._shift): # >>>
|
||||
new_root = [self._root, self._new_path(self._shift, self._tail)]
|
||||
new_shift += SHIFT
|
||||
else:
|
||||
new_root = self._push_tail(self._shift, self._root, self._tail)
|
||||
|
||||
return new_root, new_shift
|
||||
|
||||
def append(self, val):
|
||||
if len(self._tail) < BRANCH_FACTOR:
|
||||
new_tail = list(self._tail)
|
||||
new_tail.append(val)
|
||||
return PythonPVector(self._count + 1, self._shift, self._root, new_tail)
|
||||
|
||||
# Full tail, push into tree
|
||||
new_root, new_shift = self._create_new_root()
|
||||
return PythonPVector(self._count + 1, new_shift, new_root, [val])
|
||||
|
||||
def _new_path(self, level, node):
|
||||
if level == 0:
|
||||
return node
|
||||
|
||||
return [self._new_path(level - SHIFT, node)]
|
||||
|
||||
def _mutating_insert_tail(self):
|
||||
self._root, self._shift = self._create_new_root()
|
||||
self._tail = []
|
||||
|
||||
def _mutating_fill_tail(self, offset, sequence):
|
||||
max_delta_len = BRANCH_FACTOR - len(self._tail)
|
||||
delta = sequence[offset:offset + max_delta_len]
|
||||
self._tail.extend(delta)
|
||||
delta_len = len(delta)
|
||||
self._count += delta_len
|
||||
return offset + delta_len
|
||||
|
||||
def _mutating_extend(self, sequence):
|
||||
offset = 0
|
||||
sequence_len = len(sequence)
|
||||
while offset < sequence_len:
|
||||
offset = self._mutating_fill_tail(offset, sequence)
|
||||
if len(self._tail) == BRANCH_FACTOR:
|
||||
self._mutating_insert_tail()
|
||||
|
||||
self._tail_offset = self._count - len(self._tail)
|
||||
|
||||
def extend(self, obj):
|
||||
# Mutates the new vector directly for efficiency but that's only an
|
||||
# implementation detail, once it is returned it should be considered immutable
|
||||
l = obj.tolist() if isinstance(obj, PythonPVector) else list(obj)
|
||||
if l:
|
||||
new_vector = self.append(l[0])
|
||||
new_vector._mutating_extend(l[1:])
|
||||
return new_vector
|
||||
|
||||
return self
|
||||
|
||||
def _push_tail(self, level, parent, tail_node):
|
||||
"""
|
||||
if parent is leaf, insert node,
|
||||
else does it map to an existing child? ->
|
||||
node_to_insert = push node one more level
|
||||
else alloc new path
|
||||
|
||||
return node_to_insert placed in copy of parent
|
||||
"""
|
||||
ret = list(parent)
|
||||
|
||||
if level == SHIFT:
|
||||
ret.append(tail_node)
|
||||
return ret
|
||||
|
||||
sub_index = ((self._count - 1) >> level) & BIT_MASK # >>>
|
||||
if len(parent) > sub_index:
|
||||
ret[sub_index] = self._push_tail(level - SHIFT, parent[sub_index], tail_node)
|
||||
return ret
|
||||
|
||||
ret.append(self._new_path(level - SHIFT, tail_node))
|
||||
return ret
|
||||
|
||||
def index(self, value, *args, **kwargs):
|
||||
return self.tolist().index(value, *args, **kwargs)
|
||||
|
||||
def count(self, value):
|
||||
return self.tolist().count(value)
|
||||
|
||||
def delete(self, index, stop=None):
|
||||
l = self.tolist()
|
||||
del l[_index_or_slice(index, stop)]
|
||||
return _EMPTY_PVECTOR.extend(l)
|
||||
|
||||
def remove(self, value):
|
||||
l = self.tolist()
|
||||
l.remove(value)
|
||||
return _EMPTY_PVECTOR.extend(l)
|
||||
|
||||
@six.add_metaclass(ABCMeta)
|
||||
class PVector(object):
|
||||
"""
|
||||
Persistent vector implementation. Meant as a replacement for the cases where you would normally
|
||||
use a Python list.
|
||||
|
||||
Do not instantiate directly, instead use the factory functions :py:func:`v` and :py:func:`pvector` to
|
||||
create an instance.
|
||||
|
||||
Heavily influenced by the persistent vector available in Clojure. Initially this was more or
|
||||
less just a port of the Java code for the Clojure vector. It has since been modified and to
|
||||
some extent optimized for usage in Python.
|
||||
|
||||
The vector is organized as a trie, any mutating method will return a new vector that contains the changes. No
|
||||
updates are done to the original vector. Structural sharing between vectors are applied where possible to save
|
||||
space and to avoid making complete copies.
|
||||
|
||||
This structure corresponds most closely to the built in list type and is intended as a replacement. Where the
|
||||
semantics are the same (more or less) the same function names have been used but for some cases it is not possible,
|
||||
for example assignments.
|
||||
|
||||
The PVector implements the Sequence protocol and is Hashable.
|
||||
|
||||
Inserts are amortized O(1). Random access is log32(n) where n is the size of the vector.
|
||||
|
||||
The following are examples of some common operations on persistent vectors:
|
||||
|
||||
>>> p = v(1, 2, 3)
|
||||
>>> p2 = p.append(4)
|
||||
>>> p3 = p2.extend([5, 6, 7])
|
||||
>>> p
|
||||
pvector([1, 2, 3])
|
||||
>>> p2
|
||||
pvector([1, 2, 3, 4])
|
||||
>>> p3
|
||||
pvector([1, 2, 3, 4, 5, 6, 7])
|
||||
>>> p3[5]
|
||||
6
|
||||
>>> p.set(1, 99)
|
||||
pvector([1, 99, 3])
|
||||
>>>
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def __len__(self):
|
||||
"""
|
||||
>>> len(v(1, 2, 3))
|
||||
3
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def __getitem__(self, index):
|
||||
"""
|
||||
Get value at index. Full slicing support.
|
||||
|
||||
>>> v1 = v(5, 6, 7, 8)
|
||||
>>> v1[2]
|
||||
7
|
||||
>>> v1[1:3]
|
||||
pvector([6, 7])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def __add__(self, other):
|
||||
"""
|
||||
>>> v1 = v(1, 2)
|
||||
>>> v2 = v(3, 4)
|
||||
>>> v1 + v2
|
||||
pvector([1, 2, 3, 4])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def __mul__(self, times):
|
||||
"""
|
||||
>>> v1 = v(1, 2)
|
||||
>>> 3 * v1
|
||||
pvector([1, 2, 1, 2, 1, 2])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def __hash__(self):
|
||||
"""
|
||||
>>> v1 = v(1, 2, 3)
|
||||
>>> v2 = v(1, 2, 3)
|
||||
>>> hash(v1) == hash(v2)
|
||||
True
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def evolver(self):
|
||||
"""
|
||||
Create a new evolver for this pvector. The evolver acts as a mutable view of the vector
|
||||
with "transaction like" semantics. No part of the underlying vector i updated, it is still
|
||||
fully immutable. Furthermore multiple evolvers created from the same pvector do not
|
||||
interfere with each other.
|
||||
|
||||
You may want to use an evolver instead of working directly with the pvector in the
|
||||
following cases:
|
||||
|
||||
* Multiple updates are done to the same vector and the intermediate results are of no
|
||||
interest. In this case using an evolver may be a more efficient and easier to work with.
|
||||
* You need to pass a vector into a legacy function or a function that you have no control
|
||||
over which performs in place mutations of lists. In this case pass an evolver instance
|
||||
instead and then create a new pvector from the evolver once the function returns.
|
||||
|
||||
The following example illustrates a typical workflow when working with evolvers. It also
|
||||
displays most of the API (which i kept small by design, you should not be tempted to
|
||||
use evolvers in excess ;-)).
|
||||
|
||||
Create the evolver and perform various mutating updates to it:
|
||||
|
||||
>>> v1 = v(1, 2, 3, 4, 5)
|
||||
>>> e = v1.evolver()
|
||||
>>> e[1] = 22
|
||||
>>> _ = e.append(6)
|
||||
>>> _ = e.extend([7, 8, 9])
|
||||
>>> e[8] += 1
|
||||
>>> len(e)
|
||||
9
|
||||
|
||||
The underlying pvector remains the same:
|
||||
|
||||
>>> v1
|
||||
pvector([1, 2, 3, 4, 5])
|
||||
|
||||
The changes are kept in the evolver. An updated pvector can be created using the
|
||||
persistent() function on the evolver.
|
||||
|
||||
>>> v2 = e.persistent()
|
||||
>>> v2
|
||||
pvector([1, 22, 3, 4, 5, 6, 7, 8, 10])
|
||||
|
||||
The new pvector will share data with the original pvector in the same way that would have
|
||||
been done if only using operations on the pvector.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def mset(self, *args):
|
||||
"""
|
||||
Return a new vector with elements in specified positions replaced by values (multi set).
|
||||
|
||||
Elements on even positions in the argument list are interpreted as indexes while
|
||||
elements on odd positions are considered values.
|
||||
|
||||
>>> v1 = v(1, 2, 3)
|
||||
>>> v1.mset(0, 11, 2, 33)
|
||||
pvector([11, 2, 33])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def set(self, i, val):
|
||||
"""
|
||||
Return a new vector with element at position i replaced with val. The original vector remains unchanged.
|
||||
|
||||
Setting a value one step beyond the end of the vector is equal to appending. Setting beyond that will
|
||||
result in an IndexError.
|
||||
|
||||
>>> v1 = v(1, 2, 3)
|
||||
>>> v1.set(1, 4)
|
||||
pvector([1, 4, 3])
|
||||
>>> v1.set(3, 4)
|
||||
pvector([1, 2, 3, 4])
|
||||
>>> v1.set(-1, 4)
|
||||
pvector([1, 2, 4])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def append(self, val):
|
||||
"""
|
||||
Return a new vector with val appended.
|
||||
|
||||
>>> v1 = v(1, 2)
|
||||
>>> v1.append(3)
|
||||
pvector([1, 2, 3])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def extend(self, obj):
|
||||
"""
|
||||
Return a new vector with all values in obj appended to it. Obj may be another
|
||||
PVector or any other Iterable.
|
||||
|
||||
>>> v1 = v(1, 2, 3)
|
||||
>>> v1.extend([4, 5])
|
||||
pvector([1, 2, 3, 4, 5])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def index(self, value, *args, **kwargs):
|
||||
"""
|
||||
Return first index of value. Additional indexes may be supplied to limit the search to a
|
||||
sub range of the vector.
|
||||
|
||||
>>> v1 = v(1, 2, 3, 4, 3)
|
||||
>>> v1.index(3)
|
||||
2
|
||||
>>> v1.index(3, 3, 5)
|
||||
4
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def count(self, value):
|
||||
"""
|
||||
Return the number of times that value appears in the vector.
|
||||
|
||||
>>> v1 = v(1, 4, 3, 4)
|
||||
>>> v1.count(4)
|
||||
2
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def transform(self, *transformations):
|
||||
"""
|
||||
Transform arbitrarily complex combinations of PVectors and PMaps. A transformation
|
||||
consists of two parts. One match expression that specifies which elements to transform
|
||||
and one transformation function that performs the actual transformation.
|
||||
|
||||
>>> from pyrsistent import freeze, ny
|
||||
>>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
|
||||
... {'author': 'Steve', 'content': 'A slightly longer article'}],
|
||||
... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
|
||||
>>> short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:25] + '...' if len(c) > 25 else c)
|
||||
>>> very_short_news = news_paper.transform(['articles', ny, 'content'], lambda c: c[:15] + '...' if len(c) > 15 else c)
|
||||
>>> very_short_news.articles[0].content
|
||||
'A short article'
|
||||
>>> very_short_news.articles[1].content
|
||||
'A slightly long...'
|
||||
|
||||
When nothing has been transformed the original data structure is kept
|
||||
|
||||
>>> short_news is news_paper
|
||||
True
|
||||
>>> very_short_news is news_paper
|
||||
False
|
||||
>>> very_short_news.articles[0] is news_paper.articles[0]
|
||||
True
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def delete(self, index, stop=None):
|
||||
"""
|
||||
Delete a portion of the vector by index or range.
|
||||
|
||||
>>> v1 = v(1, 2, 3, 4, 5)
|
||||
>>> v1.delete(1)
|
||||
pvector([1, 3, 4, 5])
|
||||
>>> v1.delete(1, 3)
|
||||
pvector([1, 4, 5])
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def remove(self, value):
|
||||
"""
|
||||
Remove the first occurrence of a value from the vector.
|
||||
|
||||
>>> v1 = v(1, 2, 3, 2, 1)
|
||||
>>> v2 = v1.remove(1)
|
||||
>>> v2
|
||||
pvector([2, 3, 2, 1])
|
||||
>>> v2.remove(1)
|
||||
pvector([2, 3, 2])
|
||||
"""
|
||||
|
||||
|
||||
_EMPTY_PVECTOR = PythonPVector(0, SHIFT, [], [])
|
||||
PVector.register(PythonPVector)
|
||||
Sequence.register(PVector)
|
||||
Hashable.register(PVector)
|
||||
|
||||
def python_pvector(iterable=()):
|
||||
"""
|
||||
Create a new persistent vector containing the elements in iterable.
|
||||
|
||||
>>> v1 = pvector([1, 2, 3])
|
||||
>>> v1
|
||||
pvector([1, 2, 3])
|
||||
"""
|
||||
return _EMPTY_PVECTOR.extend(iterable)
|
||||
|
||||
try:
|
||||
# Use the C extension as underlying trie implementation if it is available
|
||||
import os
|
||||
if os.environ.get('PYRSISTENT_NO_C_EXTENSION'):
|
||||
pvector = python_pvector
|
||||
else:
|
||||
from pvectorc import pvector
|
||||
PVector.register(type(pvector()))
|
||||
except ImportError:
|
||||
pvector = python_pvector
|
||||
|
||||
|
||||
def v(*elements):
|
||||
"""
|
||||
Create a new persistent vector containing all parameters to this function.
|
||||
|
||||
>>> v1 = v(1, 2, 3)
|
||||
>>> v1
|
||||
pvector([1, 2, 3])
|
||||
"""
|
||||
return pvector(elements)
|
||||
143
lib/spack/external/pyrsistent/_transformations.py
vendored
Normal file
143
lib/spack/external/pyrsistent/_transformations.py
vendored
Normal file
@@ -0,0 +1,143 @@
|
||||
import re
|
||||
import six
|
||||
try:
|
||||
from inspect import Parameter, signature
|
||||
except ImportError:
|
||||
signature = None
|
||||
try:
|
||||
from inspect import getfullargspec as getargspec
|
||||
except ImportError:
|
||||
from inspect import getargspec
|
||||
|
||||
|
||||
_EMPTY_SENTINEL = object()
|
||||
|
||||
|
||||
def inc(x):
|
||||
""" Add one to the current value """
|
||||
return x + 1
|
||||
|
||||
|
||||
def dec(x):
|
||||
""" Subtract one from the current value """
|
||||
return x - 1
|
||||
|
||||
|
||||
def discard(evolver, key):
|
||||
""" Discard the element and returns a structure without the discarded elements """
|
||||
try:
|
||||
del evolver[key]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
# Matchers
|
||||
def rex(expr):
|
||||
""" Regular expression matcher to use together with transform functions """
|
||||
r = re.compile(expr)
|
||||
return lambda key: isinstance(key, six.string_types) and r.match(key)
|
||||
|
||||
|
||||
def ny(_):
|
||||
""" Matcher that matches any value """
|
||||
return True
|
||||
|
||||
|
||||
# Support functions
|
||||
def _chunks(l, n):
|
||||
for i in range(0, len(l), n):
|
||||
yield l[i:i + n]
|
||||
|
||||
|
||||
def transform(structure, transformations):
|
||||
r = structure
|
||||
for path, command in _chunks(transformations, 2):
|
||||
r = _do_to_path(r, path, command)
|
||||
return r
|
||||
|
||||
|
||||
def _do_to_path(structure, path, command):
|
||||
if not path:
|
||||
return command(structure) if callable(command) else command
|
||||
|
||||
kvs = _get_keys_and_values(structure, path[0])
|
||||
return _update_structure(structure, kvs, path[1:], command)
|
||||
|
||||
|
||||
def _items(structure):
|
||||
try:
|
||||
return structure.items()
|
||||
except AttributeError:
|
||||
# Support wider range of structures by adding a transform_items() or similar?
|
||||
return list(enumerate(structure))
|
||||
|
||||
|
||||
def _get(structure, key, default):
|
||||
try:
|
||||
if hasattr(structure, '__getitem__'):
|
||||
return structure[key]
|
||||
|
||||
return getattr(structure, key)
|
||||
|
||||
except (IndexError, KeyError):
|
||||
return default
|
||||
|
||||
|
||||
def _get_keys_and_values(structure, key_spec):
|
||||
if callable(key_spec):
|
||||
# Support predicates as callable objects in the path
|
||||
arity = _get_arity(key_spec)
|
||||
if arity == 1:
|
||||
# Unary predicates are called with the "key" of the path
|
||||
# - eg a key in a mapping, an index in a sequence.
|
||||
return [(k, v) for k, v in _items(structure) if key_spec(k)]
|
||||
elif arity == 2:
|
||||
# Binary predicates are called with the key and the corresponding
|
||||
# value.
|
||||
return [(k, v) for k, v in _items(structure) if key_spec(k, v)]
|
||||
else:
|
||||
# Other arities are an error.
|
||||
raise ValueError(
|
||||
"callable in transform path must take 1 or 2 arguments"
|
||||
)
|
||||
|
||||
# Non-callables are used as-is as a key.
|
||||
return [(key_spec, _get(structure, key_spec, _EMPTY_SENTINEL))]
|
||||
|
||||
|
||||
if signature is None:
|
||||
def _get_arity(f):
|
||||
argspec = getargspec(f)
|
||||
return len(argspec.args) - len(argspec.defaults or ())
|
||||
else:
|
||||
def _get_arity(f):
|
||||
return sum(
|
||||
1
|
||||
for p
|
||||
in signature(f).parameters.values()
|
||||
if p.default is Parameter.empty
|
||||
and p.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD)
|
||||
)
|
||||
|
||||
|
||||
def _update_structure(structure, kvs, path, command):
|
||||
from pyrsistent._pmap import pmap
|
||||
e = structure.evolver()
|
||||
if not path and command is discard:
|
||||
# Do this in reverse to avoid index problems with vectors. See #92.
|
||||
for k, v in reversed(kvs):
|
||||
discard(e, k)
|
||||
else:
|
||||
for k, v in kvs:
|
||||
is_empty = False
|
||||
if v is _EMPTY_SENTINEL:
|
||||
# Allow expansion of structure but make sure to cover the case
|
||||
# when an empty pmap is added as leaf node. See #154.
|
||||
is_empty = True
|
||||
v = pmap()
|
||||
|
||||
result = _do_to_path(v, path, command)
|
||||
if result is not v or is_empty:
|
||||
e[k] = result
|
||||
|
||||
return e.persistent()
|
||||
@@ -141,7 +141,7 @@ def filter_file(regex, repl, *filenames, **kwargs):
|
||||
file.
|
||||
"""
|
||||
string = kwargs.get('string', False)
|
||||
backup = kwargs.get('backup', True)
|
||||
backup = kwargs.get('backup', False)
|
||||
ignore_absent = kwargs.get('ignore_absent', False)
|
||||
stop_at = kwargs.get('stop_at', None)
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: major, minor, patch version for Spack, in a tuple
|
||||
spack_version_info = (0, 17, 0)
|
||||
spack_version_info = (0, 17, 1)
|
||||
|
||||
#: String containing Spack version joined with .'s
|
||||
spack_version = '.'.join(str(v) for v in spack_version_info)
|
||||
|
||||
@@ -407,6 +407,46 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@package_directives
|
||||
def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls):
|
||||
"""Report if version constraints used in directives are not satisfiable"""
|
||||
import spack.repo
|
||||
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg = spack.repo.get(pkg_name)
|
||||
filename = spack.repo.path.filename_for_package_name(pkg_name)
|
||||
dependencies_to_check = []
|
||||
for dependency_name, dependency_data in pkg.dependencies.items():
|
||||
# Skip virtual dependencies for the time being, check on
|
||||
# their versions can be added later
|
||||
if spack.repo.path.is_virtual(dependency_name):
|
||||
continue
|
||||
|
||||
dependencies_to_check.extend(
|
||||
[edge.spec for edge in dependency_data.values()]
|
||||
)
|
||||
|
||||
for s in dependencies_to_check:
|
||||
dependency_pkg = None
|
||||
try:
|
||||
dependency_pkg = spack.repo.get(s.name)
|
||||
assert any(
|
||||
v.satisfies(s.versions) for v in list(dependency_pkg.versions)
|
||||
)
|
||||
except Exception:
|
||||
summary = ("{0}: dependency on {1} cannot be satisfied "
|
||||
"by known versions of {1.name}").format(pkg_name, s)
|
||||
details = ['happening in ' + filename]
|
||||
if dependency_pkg is not None:
|
||||
details.append('known versions of {0.name} are {1}'.format(
|
||||
s, ', '.join([str(x) for x in dependency_pkg.versions])
|
||||
))
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
|
||||
import spack.variant
|
||||
variant_exceptions = (
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
import tarfile
|
||||
import tempfile
|
||||
import traceback
|
||||
import warnings
|
||||
from contextlib import closing
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
@@ -27,10 +28,13 @@
|
||||
import spack.database as spack_db
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.hash_types as ht
|
||||
import spack.hooks
|
||||
import spack.hooks.sbang
|
||||
import spack.mirror
|
||||
import spack.platforms
|
||||
import spack.relocate as relocate
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_json as sjson
|
||||
@@ -975,8 +979,11 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
allow_root=False, key=None, regenerate_index=False):
|
||||
def _build_tarball(
|
||||
spec, outdir,
|
||||
force=False, relative=False, unsigned=False,
|
||||
allow_root=False, key=None, regenerate_index=False
|
||||
):
|
||||
"""
|
||||
Build a tarball from given spec and put it into the directory structure
|
||||
used at the mirror (following <tarball_directory_name>).
|
||||
@@ -1044,11 +1051,11 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
os.remove(temp_tarfile_path)
|
||||
|
||||
# create info for later relocation and create tar
|
||||
write_buildinfo_file(spec, workdir, rel)
|
||||
write_buildinfo_file(spec, workdir, relative)
|
||||
|
||||
# optionally make the paths in the binaries relative to each other
|
||||
# in the spack install tree before creating tarball
|
||||
if rel:
|
||||
if relative:
|
||||
try:
|
||||
make_package_relative(workdir, spec, allow_root)
|
||||
except Exception as e:
|
||||
@@ -1096,7 +1103,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
buildinfo = {}
|
||||
buildinfo['relative_prefix'] = os.path.relpath(
|
||||
spec.prefix, spack.store.layout.root)
|
||||
buildinfo['relative_rpaths'] = rel
|
||||
buildinfo['relative_rpaths'] = relative
|
||||
spec_dict['buildinfo'] = buildinfo
|
||||
|
||||
with open(specfile_path, 'w') as outfile:
|
||||
@@ -1148,6 +1155,64 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
return None
|
||||
|
||||
|
||||
def nodes_to_be_packaged(specs, include_root=True, include_dependencies=True):
|
||||
"""Return the list of nodes to be packaged, given a list of specs.
|
||||
|
||||
Args:
|
||||
specs (List[spack.spec.Spec]): list of root specs to be processed
|
||||
include_root (bool): include the root of each spec in the nodes
|
||||
include_dependencies (bool): include the dependencies of each
|
||||
spec in the nodes
|
||||
"""
|
||||
if not include_root and not include_dependencies:
|
||||
return set()
|
||||
|
||||
def skip_node(current_node):
|
||||
if current_node.external or current_node.virtual:
|
||||
return True
|
||||
return spack.store.db.query_one(current_node) is None
|
||||
|
||||
expanded_set = set()
|
||||
for current_spec in specs:
|
||||
if not include_dependencies:
|
||||
nodes = [current_spec]
|
||||
else:
|
||||
nodes = [n for n in current_spec.traverse(
|
||||
order='post', root=include_root, deptype=('link', 'run')
|
||||
)]
|
||||
|
||||
for node in nodes:
|
||||
if not skip_node(node):
|
||||
expanded_set.add(node)
|
||||
|
||||
return expanded_set
|
||||
|
||||
|
||||
def push(specs, push_url, specs_kwargs=None, **kwargs):
|
||||
"""Create a binary package for each of the specs passed as input and push them
|
||||
to a given push URL.
|
||||
|
||||
Args:
|
||||
specs (List[spack.spec.Spec]): installed specs to be packaged
|
||||
push_url (str): url where to push the binary package
|
||||
specs_kwargs (dict): dictionary with two possible boolean keys, "include_root"
|
||||
and "include_dependencies", which determine which part of each spec is
|
||||
packaged and pushed to the mirror
|
||||
**kwargs: TODO
|
||||
|
||||
"""
|
||||
specs_kwargs = specs_kwargs or {'include_root': True, 'include_dependencies': True}
|
||||
nodes = nodes_to_be_packaged(specs, **specs_kwargs)
|
||||
|
||||
# TODO: This seems to be an easy target for task
|
||||
# TODO: distribution using a parallel pool
|
||||
for node in nodes:
|
||||
try:
|
||||
_build_tarball(node, push_url, **kwargs)
|
||||
except NoOverwriteException as e:
|
||||
warnings.warn(str(e))
|
||||
|
||||
|
||||
def download_tarball(spec, preferred_mirrors=None):
|
||||
"""
|
||||
Download binary tarball for given package into stage area, returning
|
||||
@@ -1486,6 +1551,66 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
os.remove(filename)
|
||||
|
||||
|
||||
def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None):
|
||||
"""Install the root node of a concrete spec from a buildcache.
|
||||
|
||||
Checking the sha256 sum of a node before installation is usually needed only
|
||||
for software installed during Spack's bootstrapping (since we might not have
|
||||
a proper signature verification mechanism available).
|
||||
|
||||
Args:
|
||||
spec: spec to be installed (note that only the root node will be installed)
|
||||
allow_root (bool): allows the root directory to be present in binaries
|
||||
(may affect relocation)
|
||||
unsigned (bool): if True allows installing unsigned binaries
|
||||
force (bool): force installation if the spec is already present in the
|
||||
local store
|
||||
sha256 (str): optional sha256 of the binary package, to be checked
|
||||
before installation
|
||||
"""
|
||||
package = spack.repo.get(spec)
|
||||
# Early termination
|
||||
if spec.external or spec.virtual:
|
||||
warnings.warn("Skipping external or virtual package {0}".format(spec.format()))
|
||||
return
|
||||
elif spec.concrete and package.installed and not force:
|
||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||
return
|
||||
|
||||
tarball = download_tarball(spec)
|
||||
if not tarball:
|
||||
msg = 'download of binary cache file for spec "{0}" failed'
|
||||
raise RuntimeError(msg.format(spec.format()))
|
||||
|
||||
if sha256:
|
||||
checker = spack.util.crypto.Checker(sha256)
|
||||
msg = 'cannot verify checksum for "{0}" [expected={1}]'
|
||||
msg = msg.format(tarball, sha256)
|
||||
if not checker.check(tarball):
|
||||
raise spack.binary_distribution.NoChecksumException(msg)
|
||||
tty.debug('Verified SHA256 checksum of the build cache')
|
||||
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, tarball, allow_root, unsigned, force)
|
||||
spack.hooks.post_install(spec)
|
||||
spack.store.db.add(spec, spack.store.layout)
|
||||
|
||||
|
||||
def install_single_spec(spec, allow_root=False, unsigned=False, force=False):
|
||||
"""Install a single concrete spec from a buildcache.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): spec to be installed
|
||||
allow_root (bool): allows the root directory to be present in binaries
|
||||
(may affect relocation)
|
||||
unsigned (bool): if True allows installing unsigned binaries
|
||||
force (bool): force installation if the spec is already present in the
|
||||
local store
|
||||
"""
|
||||
for node in spec.traverse(root=True, order='post', deptype=('link', 'run')):
|
||||
install_root_node(node, allow_root=allow_root, unsigned=unsigned, force=force)
|
||||
|
||||
|
||||
def try_direct_fetch(spec, full_hash_match=False, mirrors=None):
|
||||
"""
|
||||
Try to find the spec directly on the configured mirrors
|
||||
@@ -1937,3 +2062,73 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
|
||||
continue
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def download_single_spec(
|
||||
concrete_spec, destination, require_cdashid=False, mirror_url=None
|
||||
):
|
||||
"""Download the buildcache files for a single concrete spec.
|
||||
|
||||
Args:
|
||||
concrete_spec: concrete spec to be downloaded
|
||||
destination (str): path where to put the downloaded buildcache
|
||||
require_cdashid (bool): if False the `.cdashid` file is optional
|
||||
mirror_url (str): url of the mirror from which to download
|
||||
"""
|
||||
tarfile_name = tarball_name(concrete_spec, '.spack')
|
||||
tarball_dir_name = tarball_directory_name(concrete_spec)
|
||||
tarball_path_name = os.path.join(tarball_dir_name, tarfile_name)
|
||||
local_tarball_path = os.path.join(destination, tarball_dir_name)
|
||||
|
||||
files_to_fetch = [
|
||||
{
|
||||
'url': [tarball_path_name],
|
||||
'path': local_tarball_path,
|
||||
'required': True,
|
||||
}, {
|
||||
'url': [tarball_name(concrete_spec, '.spec.json'),
|
||||
tarball_name(concrete_spec, '.spec.yaml')],
|
||||
'path': destination,
|
||||
'required': True,
|
||||
}, {
|
||||
'url': [tarball_name(concrete_spec, '.cdashid')],
|
||||
'path': destination,
|
||||
'required': require_cdashid,
|
||||
},
|
||||
]
|
||||
|
||||
return download_buildcache_entry(files_to_fetch, mirror_url)
|
||||
|
||||
|
||||
class BinaryCacheQuery(object):
|
||||
"""Callable object to query if a spec is in a binary cache"""
|
||||
def __init__(self, all_architectures):
|
||||
"""
|
||||
Args:
|
||||
all_architectures (bool): if True consider all the spec for querying,
|
||||
otherwise restrict to the current default architecture
|
||||
"""
|
||||
self.all_architectures = all_architectures
|
||||
|
||||
specs = update_cache_and_get_specs()
|
||||
|
||||
if not self.all_architectures:
|
||||
arch = spack.spec.Spec.default_arch()
|
||||
specs = [s for s in specs if s.satisfies(arch)]
|
||||
|
||||
self.possible_specs = specs
|
||||
|
||||
def __call__(self, spec, **kwargs):
|
||||
matches = []
|
||||
if spec.startswith('/'):
|
||||
# Matching a DAG hash
|
||||
query_hash = spec.replace('/', '')
|
||||
for candidate_spec in self.possible_specs:
|
||||
if candidate_spec.dag_hash().startswith(query_hash):
|
||||
matches.append(candidate_spec)
|
||||
else:
|
||||
# Matching a spec constraint
|
||||
matches = [
|
||||
s for s in self.possible_specs if s.satisfies(spec)
|
||||
]
|
||||
return matches
|
||||
|
||||
@@ -4,13 +4,13 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import contextlib
|
||||
import fnmatch
|
||||
import functools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import platform
|
||||
import re
|
||||
import sys
|
||||
import sysconfig
|
||||
@@ -255,11 +255,6 @@ def _read_metadata(self, package_name):
|
||||
return data
|
||||
|
||||
def _install_by_hash(self, pkg_hash, pkg_sha256, index, bincache_platform):
|
||||
# TODO: The local import is due to a circular import error. The
|
||||
# TODO: correct fix for this is a refactor of the API used for
|
||||
# TODO: binary relocation
|
||||
import spack.cmd.buildcache
|
||||
|
||||
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
||||
# Reconstruct the compiler that we need to use for bootstrapping
|
||||
compiler_entry = {
|
||||
@@ -279,16 +274,18 @@ def _install_by_hash(self, pkg_hash, pkg_sha256, index, bincache_platform):
|
||||
'compilers', [{'compiler': compiler_entry}]
|
||||
):
|
||||
spec_str = '/' + pkg_hash
|
||||
parser = argparse.ArgumentParser()
|
||||
spack.cmd.buildcache.setup_parser(parser)
|
||||
install_args = [
|
||||
'install',
|
||||
'--sha256', pkg_sha256,
|
||||
'--only-root',
|
||||
'-a', '-u', '-o', '-f', spec_str
|
||||
]
|
||||
args = parser.parse_args(install_args)
|
||||
spack.cmd.buildcache.installtarball(args)
|
||||
query = spack.binary_distribution.BinaryCacheQuery(
|
||||
all_architectures=True
|
||||
)
|
||||
matches = spack.store.find([spec_str], multiple=False, query_fn=query)
|
||||
for match in matches:
|
||||
spack.binary_distribution.install_root_node(
|
||||
match,
|
||||
allow_root=True,
|
||||
unsigned=True,
|
||||
force=True,
|
||||
sha256=pkg_sha256
|
||||
)
|
||||
|
||||
def _install_and_test(
|
||||
self, abstract_spec, bincache_platform, bincache_data, test_fn
|
||||
@@ -841,3 +838,142 @@ def ensure_flake8_in_path_or_raise():
|
||||
"""Ensure that flake8 is in the PATH or raise."""
|
||||
executable, root_spec = 'flake8', flake8_root_spec()
|
||||
return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
|
||||
|
||||
|
||||
def _missing(name, purpose, system_only=True):
|
||||
"""Message to be printed if an executable is not found"""
|
||||
msg = '[{2}] MISSING "{0}": {1}'
|
||||
if not system_only:
|
||||
return msg.format(name, purpose, '@*y{{B}}')
|
||||
return msg.format(name, purpose, '@*y{{-}}')
|
||||
|
||||
|
||||
def _required_system_executable(exes, msg):
|
||||
"""Search for an executable is the system path only."""
|
||||
if isinstance(exes, six.string_types):
|
||||
exes = (exes,)
|
||||
if spack.util.executable.which_string(*exes):
|
||||
return True, None
|
||||
return False, msg
|
||||
|
||||
|
||||
def _required_python_module(module, query_spec, msg):
|
||||
"""Check if a Python module is available in the current interpreter or
|
||||
if it can be loaded from the bootstrap store
|
||||
"""
|
||||
if _python_import(module) or _try_import_from_store(module, query_spec):
|
||||
return True, None
|
||||
return False, msg
|
||||
|
||||
|
||||
def _required_executable(exes, query_spec, msg):
|
||||
"""Search for an executable in the system path or in the bootstrap store."""
|
||||
if isinstance(exes, six.string_types):
|
||||
exes = (exes,)
|
||||
if (spack.util.executable.which_string(*exes) or
|
||||
_executables_in_store(exes, query_spec)):
|
||||
return True, None
|
||||
return False, msg
|
||||
|
||||
|
||||
def _core_requirements():
|
||||
_core_system_exes = {
|
||||
'make': _missing('make', 'required to build software from sources'),
|
||||
'patch': _missing('patch', 'required to patch source code before building'),
|
||||
'bash': _missing('bash', 'required for Spack compiler wrapper'),
|
||||
'tar': _missing('tar', 'required to manage code archives'),
|
||||
'gzip': _missing('gzip', 'required to compress/decompress code archives'),
|
||||
'unzip': _missing('unzip', 'required to compress/decompress code archives'),
|
||||
'bzip2': _missing('bzip2', 'required to compress/decompress code archives'),
|
||||
'git': _missing('git', 'required to fetch/manage git repositories')
|
||||
}
|
||||
if platform.system().lower() == 'linux':
|
||||
_core_system_exes['xz'] = _missing(
|
||||
'xz', 'required to compress/decompress code archives'
|
||||
)
|
||||
|
||||
# Executables that are not bootstrapped yet
|
||||
result = [_required_system_executable(exe, msg)
|
||||
for exe, msg in _core_system_exes.items()]
|
||||
# Python modules
|
||||
result.append(_required_python_module(
|
||||
'clingo', clingo_root_spec(),
|
||||
_missing('clingo', 'required to concretize specs', False)
|
||||
))
|
||||
return result
|
||||
|
||||
|
||||
def _buildcache_requirements():
|
||||
_buildcache_exes = {
|
||||
'file': _missing('file', 'required to analyze files for buildcaches'),
|
||||
('gpg2', 'gpg'): _missing('gpg2', 'required to sign/verify buildcaches', False)
|
||||
}
|
||||
if platform.system().lower() == 'darwin':
|
||||
_buildcache_exes['otool'] = _missing('otool', 'required to relocate binaries')
|
||||
|
||||
# Executables that are not bootstrapped yet
|
||||
result = [_required_system_executable(exe, msg)
|
||||
for exe, msg in _buildcache_exes.items()]
|
||||
|
||||
if platform.system().lower() == 'linux':
|
||||
result.append(_required_executable(
|
||||
'patchelf', patchelf_root_spec(),
|
||||
_missing('patchelf', 'required to relocate binaries', False)
|
||||
))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _optional_requirements():
|
||||
_optional_exes = {
|
||||
'zstd': _missing('zstd', 'required to compress/decompress code archives'),
|
||||
'svn': _missing('svn', 'required to manage subversion repositories'),
|
||||
'hg': _missing('hg', 'required to manage mercurial repositories')
|
||||
}
|
||||
# Executables that are not bootstrapped yet
|
||||
result = [_required_system_executable(exe, msg)
|
||||
for exe, msg in _optional_exes.items()]
|
||||
return result
|
||||
|
||||
|
||||
def _development_requirements():
|
||||
return [
|
||||
_required_executable('isort', isort_root_spec(),
|
||||
_missing('isort', 'required for style checks', False)),
|
||||
_required_executable('mypy', mypy_root_spec(),
|
||||
_missing('mypy', 'required for style checks', False)),
|
||||
_required_executable('flake8', flake8_root_spec(),
|
||||
_missing('flake8', 'required for style checks', False)),
|
||||
_required_executable('black', black_root_spec(),
|
||||
_missing('black', 'required for code formatting', False))
|
||||
]
|
||||
|
||||
|
||||
def status_message(section):
|
||||
"""Return a status message to be printed to screen that refers to the
|
||||
section passed as argument and a bool which is True if there are missing
|
||||
dependencies.
|
||||
|
||||
Args:
|
||||
section (str): either 'core' or 'buildcache' or 'optional' or 'develop'
|
||||
"""
|
||||
pass_token, fail_token = '@*g{[PASS]}', '@*r{[FAIL]}'
|
||||
|
||||
# Contain the header of the section and a list of requirements
|
||||
spack_sections = {
|
||||
'core': ("{0} @*{{Core Functionalities}}", _core_requirements),
|
||||
'buildcache': ("{0} @*{{Binary packages}}", _buildcache_requirements),
|
||||
'optional': ("{0} @*{{Optional Features}}", _optional_requirements),
|
||||
'develop': ("{0} @*{{Development Dependencies}}", _development_requirements)
|
||||
}
|
||||
msg, required_software = spack_sections[section]
|
||||
|
||||
with ensure_bootstrap_configuration():
|
||||
missing_software = False
|
||||
for found, err_msg in required_software():
|
||||
if not found:
|
||||
missing_software = True
|
||||
msg += "\n " + err_msg
|
||||
msg += '\n'
|
||||
msg = msg.format(pass_token if not missing_software else fail_token)
|
||||
return msg, missing_software
|
||||
|
||||
@@ -1237,7 +1237,7 @@ def make_stack(tb, stack=None):
|
||||
class InstallError(spack.error.SpackError):
|
||||
"""Raised by packages when a package fails to install.
|
||||
|
||||
Any subclass of InstallError will be annotated by Spack wtih a
|
||||
Any subclass of InstallError will be annotated by Spack with a
|
||||
``pkg`` attribute on failure, which the caller can use to get the
|
||||
package for which the exception was raised.
|
||||
"""
|
||||
|
||||
@@ -91,7 +91,7 @@ class ROCmPackage(PackageBase):
|
||||
# Possible architectures
|
||||
amdgpu_targets = (
|
||||
'gfx701', 'gfx801', 'gfx802', 'gfx803',
|
||||
'gfx900', 'gfx906', 'gfx908', 'gfx1010',
|
||||
'gfx900', 'gfx906', 'gfx908', 'gfx90a', 'gfx1010',
|
||||
'gfx1011', 'gfx1012'
|
||||
)
|
||||
|
||||
|
||||
@@ -1271,6 +1271,7 @@ def get_concrete_specs(env, root_spec, job_name, related_builds,
|
||||
def register_cdash_build(build_name, base_url, project, site, track):
|
||||
url = base_url + '/api/v1/addBuild.php'
|
||||
time_stamp = datetime.datetime.now().strftime('%Y%m%d-%H%M')
|
||||
build_id = None
|
||||
build_stamp = '{0}-{1}'.format(time_stamp, track)
|
||||
payload = {
|
||||
"project": project,
|
||||
@@ -1292,17 +1293,20 @@ def register_cdash_build(build_name, base_url, project, site, track):
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
try:
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200 and response_code != 201:
|
||||
msg = 'Adding build failed (response code = {0}'.format(response_code)
|
||||
tty.warn(msg)
|
||||
return (None, None)
|
||||
if response_code != 200 and response_code != 201:
|
||||
msg = 'Adding build failed (response code = {0}'.format(response_code)
|
||||
tty.warn(msg)
|
||||
return (None, None)
|
||||
|
||||
response_text = response.read()
|
||||
response_json = json.loads(response_text)
|
||||
build_id = response_json['buildid']
|
||||
response_text = response.read()
|
||||
response_json = json.loads(response_text)
|
||||
build_id = response_json['buildid']
|
||||
except Exception as e:
|
||||
print("Registering build in CDash failed: {0}".format(e))
|
||||
|
||||
return (build_id, build_stamp)
|
||||
|
||||
@@ -1412,15 +1416,26 @@ def read_cdashid_from_mirror(spec, mirror_url):
|
||||
return int(contents)
|
||||
|
||||
|
||||
def push_mirror_contents(env, spec, specfile_path, mirror_url, sign_binaries):
|
||||
def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
|
||||
"""Unchecked version of the public API, for easier mocking"""
|
||||
unsigned = not sign_binaries
|
||||
tty.debug('Creating buildcache ({0})'.format(
|
||||
'unsigned' if unsigned else 'signed'))
|
||||
hashes = env.all_hashes() if env else None
|
||||
matches = spack.store.specfile_matches(specfile_path, hashes=hashes)
|
||||
push_url = spack.mirror.push_url_from_mirror_url(mirror_url)
|
||||
spec_kwargs = {'include_root': True, 'include_dependencies': False}
|
||||
kwargs = {
|
||||
'force': True,
|
||||
'allow_root': True,
|
||||
'unsigned': unsigned
|
||||
}
|
||||
bindist.push(matches, push_url, spec_kwargs, **kwargs)
|
||||
|
||||
|
||||
def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
||||
try:
|
||||
unsigned = not sign_binaries
|
||||
tty.debug('Creating buildcache ({0})'.format(
|
||||
'unsigned' if unsigned else 'signed'))
|
||||
spack.cmd.buildcache._createtarball(
|
||||
env, spec_file=specfile_path, add_deps=False,
|
||||
output_location=mirror_url, force=True, allow_root=True,
|
||||
unsigned=unsigned)
|
||||
_push_mirror_contents(env, specfile_path, sign_binaries, mirror_url)
|
||||
except Exception as inst:
|
||||
# If the mirror we're pushing to is on S3 and there's some
|
||||
# permissions problem, for example, we can't just target
|
||||
|
||||
@@ -110,7 +110,6 @@ def analyze(parser, args, **kwargs):
|
||||
monitor = spack.monitor.get_client(
|
||||
host=args.monitor_host,
|
||||
prefix=args.monitor_prefix,
|
||||
disable_auth=args.monitor_disable_auth,
|
||||
)
|
||||
|
||||
# Run the analysis
|
||||
|
||||
@@ -10,6 +10,8 @@
|
||||
import llnl.util.tty
|
||||
import llnl.util.tty.color
|
||||
|
||||
import spack
|
||||
import spack.bootstrap
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.main
|
||||
@@ -32,6 +34,16 @@ def _add_scope_option(parser):
|
||||
def setup_parser(subparser):
|
||||
sp = subparser.add_subparsers(dest='subcommand')
|
||||
|
||||
status = sp.add_parser('status', help='get the status of Spack')
|
||||
status.add_argument(
|
||||
'--optional', action='store_true', default=False,
|
||||
help='show the status of rarely used optional dependencies'
|
||||
)
|
||||
status.add_argument(
|
||||
'--dev', action='store_true', default=False,
|
||||
help='show the status of dependencies needed to develop Spack'
|
||||
)
|
||||
|
||||
enable = sp.add_parser('enable', help='enable bootstrapping')
|
||||
_add_scope_option(enable)
|
||||
|
||||
@@ -207,8 +219,39 @@ def _untrust(args):
|
||||
llnl.util.tty.msg(msg.format(args.name))
|
||||
|
||||
|
||||
def _status(args):
|
||||
sections = ['core', 'buildcache']
|
||||
if args.optional:
|
||||
sections.append('optional')
|
||||
if args.dev:
|
||||
sections.append('develop')
|
||||
|
||||
header = "@*b{{Spack v{0} - {1}}}".format(
|
||||
spack.spack_version, spack.bootstrap.spec_for_current_python()
|
||||
)
|
||||
print(llnl.util.tty.color.colorize(header))
|
||||
print()
|
||||
# Use the context manager here to avoid swapping between user and
|
||||
# bootstrap config many times
|
||||
missing = False
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
for current_section in sections:
|
||||
status_msg, fail = spack.bootstrap.status_message(section=current_section)
|
||||
missing = missing or fail
|
||||
if status_msg:
|
||||
print(llnl.util.tty.color.colorize(status_msg))
|
||||
print()
|
||||
legend = ('Spack will take care of bootstrapping any missing dependency marked'
|
||||
' as [@*y{B}]. Dependencies marked as [@*y{-}] are instead required'
|
||||
' to be found on the system.')
|
||||
if missing:
|
||||
print(llnl.util.tty.color.colorize(legend))
|
||||
print()
|
||||
|
||||
|
||||
def bootstrap(parser, args):
|
||||
callbacks = {
|
||||
'status': _status,
|
||||
'enable': _enable_or_disable,
|
||||
'disable': _enable_or_disable,
|
||||
'reset': _reset,
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import warnings
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
@@ -40,7 +40,7 @@ def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
subparsers = subparser.add_subparsers(help='buildcache sub-commands')
|
||||
|
||||
create = subparsers.add_parser('create', help=createtarball.__doc__)
|
||||
create = subparsers.add_parser('create', help=create_fn.__doc__)
|
||||
create.add_argument('-r', '--rel', action='store_true',
|
||||
help="make all rpaths relative" +
|
||||
" before creating tarballs.")
|
||||
@@ -86,9 +86,9 @@ def setup_parser(subparser):
|
||||
' decide to build a cache for only the package'
|
||||
' or only the dependencies'))
|
||||
arguments.add_common_arguments(create, ['specs'])
|
||||
create.set_defaults(func=createtarball)
|
||||
create.set_defaults(func=create_fn)
|
||||
|
||||
install = subparsers.add_parser('install', help=installtarball.__doc__)
|
||||
install = subparsers.add_parser('install', help=install_fn.__doc__)
|
||||
install.add_argument('-f', '--force', action='store_true',
|
||||
help="overwrite install directory if it exists.")
|
||||
install.add_argument('-m', '--multiple', action='store_true',
|
||||
@@ -102,16 +102,11 @@ def setup_parser(subparser):
|
||||
install.add_argument('-o', '--otherarch', action='store_true',
|
||||
help="install specs from other architectures" +
|
||||
" instead of default platform and OS")
|
||||
# This argument is needed by the bootstrapping logic to verify checksums
|
||||
install.add_argument('--sha256', help=argparse.SUPPRESS)
|
||||
install.add_argument(
|
||||
'--only-root', action='store_true', help=argparse.SUPPRESS
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(install, ['specs'])
|
||||
install.set_defaults(func=installtarball)
|
||||
install.set_defaults(func=install_fn)
|
||||
|
||||
listcache = subparsers.add_parser('list', help=listspecs.__doc__)
|
||||
listcache = subparsers.add_parser('list', help=list_fn.__doc__)
|
||||
arguments.add_common_arguments(listcache, ['long', 'very_long'])
|
||||
listcache.add_argument('-v', '--variants',
|
||||
action='store_true',
|
||||
@@ -121,29 +116,25 @@ def setup_parser(subparser):
|
||||
help="list specs for all available architectures" +
|
||||
" instead of default platform and OS")
|
||||
arguments.add_common_arguments(listcache, ['specs'])
|
||||
listcache.set_defaults(func=listspecs)
|
||||
listcache.set_defaults(func=list_fn)
|
||||
|
||||
dlkeys = subparsers.add_parser('keys', help=getkeys.__doc__)
|
||||
dlkeys.add_argument(
|
||||
keys = subparsers.add_parser('keys', help=keys_fn.__doc__)
|
||||
keys.add_argument(
|
||||
'-i', '--install', action='store_true',
|
||||
help="install Keys pulled from mirror")
|
||||
dlkeys.add_argument(
|
||||
keys.add_argument(
|
||||
'-t', '--trust', action='store_true',
|
||||
help="trust all downloaded keys")
|
||||
dlkeys.add_argument('-f', '--force', action='store_true',
|
||||
help="force new download of keys")
|
||||
dlkeys.set_defaults(func=getkeys)
|
||||
keys.add_argument('-f', '--force', action='store_true',
|
||||
help="force new download of keys")
|
||||
keys.set_defaults(func=keys_fn)
|
||||
|
||||
preview_parser = subparsers.add_parser(
|
||||
'preview',
|
||||
help='analyzes an installed spec and reports whether '
|
||||
'executables and libraries are relocatable'
|
||||
)
|
||||
arguments.add_common_arguments(preview_parser, ['installed_specs'])
|
||||
preview_parser.set_defaults(func=preview)
|
||||
preview = subparsers.add_parser('preview', help=preview_fn.__doc__)
|
||||
arguments.add_common_arguments(preview, ['installed_specs'])
|
||||
preview.set_defaults(func=preview_fn)
|
||||
|
||||
# Check if binaries need to be rebuilt on remote mirror
|
||||
check = subparsers.add_parser('check', help=check_binaries.__doc__)
|
||||
check = subparsers.add_parser('check', help=check_fn.__doc__)
|
||||
check.add_argument(
|
||||
'-m', '--mirror-url', default=None,
|
||||
help='Override any configured mirrors with this mirror url')
|
||||
@@ -175,28 +166,28 @@ def setup_parser(subparser):
|
||||
help="Default to rebuilding packages if errors are encountered " +
|
||||
"during the process of checking whether rebuilding is needed")
|
||||
|
||||
check.set_defaults(func=check_binaries)
|
||||
check.set_defaults(func=check_fn)
|
||||
|
||||
# Download tarball and specfile
|
||||
dltarball = subparsers.add_parser('download', help=get_tarball.__doc__)
|
||||
dltarball.add_argument(
|
||||
download = subparsers.add_parser('download', help=download_fn.__doc__)
|
||||
download.add_argument(
|
||||
'-s', '--spec', default=None,
|
||||
help="Download built tarball for spec from mirror")
|
||||
dltarball.add_argument(
|
||||
download.add_argument(
|
||||
'--spec-file', default=None,
|
||||
help=("Download built tarball for spec (from json or yaml file) " +
|
||||
"from mirror"))
|
||||
dltarball.add_argument(
|
||||
download.add_argument(
|
||||
'-p', '--path', default=None,
|
||||
help="Path to directory where tarball should be downloaded")
|
||||
dltarball.add_argument(
|
||||
download.add_argument(
|
||||
'-c', '--require-cdashid', action='store_true', default=False,
|
||||
help="Require .cdashid file to be downloaded with buildcache entry")
|
||||
dltarball.set_defaults(func=get_tarball)
|
||||
download.set_defaults(func=download_fn)
|
||||
|
||||
# Get buildcache name
|
||||
getbuildcachename = subparsers.add_parser('get-buildcache-name',
|
||||
help=get_buildcache_name.__doc__)
|
||||
help=get_buildcache_name_fn.__doc__)
|
||||
getbuildcachename.add_argument(
|
||||
'-s', '--spec', default=None,
|
||||
help='Spec string for which buildcache name is desired')
|
||||
@@ -204,11 +195,11 @@ def setup_parser(subparser):
|
||||
'--spec-file', default=None,
|
||||
help=('Path to spec json or yaml file for which buildcache name is ' +
|
||||
'desired'))
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name)
|
||||
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
|
||||
|
||||
# Given the root spec, save the yaml of the dependent spec to a file
|
||||
savespecfile = subparsers.add_parser('save-specfile',
|
||||
help=save_specfiles.__doc__)
|
||||
help=save_specfile_fn.__doc__)
|
||||
savespecfile.add_argument(
|
||||
'--root-spec', default=None,
|
||||
help='Root spec of dependent spec')
|
||||
@@ -221,10 +212,10 @@ def setup_parser(subparser):
|
||||
savespecfile.add_argument(
|
||||
'--specfile-dir', default=None,
|
||||
help='Path to directory where spec yamls should be saved')
|
||||
savespecfile.set_defaults(func=save_specfiles)
|
||||
savespecfile.set_defaults(func=save_specfile_fn)
|
||||
|
||||
# Copy buildcache from some directory to another mirror url
|
||||
copy = subparsers.add_parser('copy', help=buildcache_copy.__doc__)
|
||||
copy = subparsers.add_parser('copy', help=copy_fn.__doc__)
|
||||
copy.add_argument(
|
||||
'--base-dir', default=None,
|
||||
help='Path to mirror directory (root of existing buildcache)')
|
||||
@@ -235,10 +226,10 @@ def setup_parser(subparser):
|
||||
copy.add_argument(
|
||||
'--destination-url', default=None,
|
||||
help='Destination mirror url')
|
||||
copy.set_defaults(func=buildcache_copy)
|
||||
copy.set_defaults(func=copy_fn)
|
||||
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser('sync', help=buildcache_sync.__doc__)
|
||||
sync = subparsers.add_parser('sync', help=sync_fn.__doc__)
|
||||
source = sync.add_mutually_exclusive_group(required=True)
|
||||
source.add_argument('--src-directory',
|
||||
metavar='DIRECTORY',
|
||||
@@ -265,311 +256,115 @@ def setup_parser(subparser):
|
||||
metavar='MIRROR_URL',
|
||||
type=str,
|
||||
help="URL of the destination mirror")
|
||||
sync.set_defaults(func=buildcache_sync)
|
||||
sync.set_defaults(func=sync_fn)
|
||||
|
||||
# Update buildcache index without copying any additional packages
|
||||
update_index = subparsers.add_parser(
|
||||
'update-index', help=buildcache_update_index.__doc__)
|
||||
'update-index', help=update_index_fn.__doc__)
|
||||
update_index.add_argument(
|
||||
'-d', '--mirror-url', default=None, help='Destination mirror url')
|
||||
update_index.add_argument(
|
||||
'-k', '--keys', default=False, action='store_true',
|
||||
help='If provided, key index will be updated as well as package index')
|
||||
update_index.set_defaults(func=buildcache_update_index)
|
||||
update_index.set_defaults(func=update_index_fn)
|
||||
|
||||
|
||||
def find_matching_specs(pkgs, allow_multiple_matches=False, env=None):
|
||||
"""Returns a list of specs matching the not necessarily
|
||||
concretized specs given from cli
|
||||
|
||||
Args:
|
||||
pkgs (str): spec to be matched against installed packages
|
||||
allow_multiple_matches (bool): if True multiple matches are admitted
|
||||
env (spack.environment.Environment or None): active environment, or ``None``
|
||||
if there is not one
|
||||
|
||||
Return:
|
||||
list: list of specs
|
||||
def _matching_specs(args):
|
||||
"""Return a list of matching specs read from either a spec file (JSON or YAML),
|
||||
a query over the store or a query over the active environment.
|
||||
"""
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
# List of specs that match expressions given via command line
|
||||
specs_from_cli = []
|
||||
has_errors = False
|
||||
tty.debug('find_matching_specs: about to parse specs for {0}'.format(pkgs))
|
||||
specs = spack.cmd.parse_specs(pkgs)
|
||||
for spec in specs:
|
||||
matching = spack.store.db.query(spec, hashes=hashes)
|
||||
# For each spec provided, make sure it refers to only one package.
|
||||
# Fail and ask user to be unambiguous if it doesn't
|
||||
if not allow_multiple_matches and len(matching) > 1:
|
||||
tty.error('%s matches multiple installed packages:' % spec)
|
||||
for match in matching:
|
||||
tty.msg('"%s"' % match.format())
|
||||
has_errors = True
|
||||
|
||||
# No installed package matches the query
|
||||
if len(matching) == 0 and spec is not any:
|
||||
tty.error('{0} does not match any installed packages.'.format(
|
||||
spec))
|
||||
has_errors = True
|
||||
|
||||
specs_from_cli.extend(matching)
|
||||
if has_errors:
|
||||
tty.die('use one of the matching specs above')
|
||||
|
||||
return specs_from_cli
|
||||
|
||||
|
||||
def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
|
||||
other_arch=False):
|
||||
"""Returns a list of specs matching the not necessarily
|
||||
concretized specs given from cli
|
||||
|
||||
Args:
|
||||
specs: list of specs to be matched against buildcaches on mirror
|
||||
allow_multiple_matches : if True multiple matches are admitted
|
||||
|
||||
Return:
|
||||
list of specs
|
||||
"""
|
||||
# List of specs that match expressions given via command line
|
||||
specs_from_cli = []
|
||||
has_errors = False
|
||||
|
||||
specs = bindist.update_cache_and_get_specs()
|
||||
|
||||
if not other_arch:
|
||||
arch = spack.spec.Spec.default_arch()
|
||||
specs = [s for s in specs if s.satisfies(arch)]
|
||||
|
||||
for pkg in pkgs:
|
||||
matches = []
|
||||
tty.msg("buildcache spec(s) matching %s \n" % pkg)
|
||||
for spec in sorted(specs):
|
||||
if pkg.startswith('/'):
|
||||
pkghash = pkg.replace('/', '')
|
||||
if spec.dag_hash().startswith(pkghash):
|
||||
matches.append(spec)
|
||||
else:
|
||||
if spec.satisfies(pkg):
|
||||
matches.append(spec)
|
||||
# For each pkg provided, make sure it refers to only one package.
|
||||
# Fail and ask user to be unambiguous if it doesn't
|
||||
if not allow_multiple_matches and len(matches) > 1:
|
||||
tty.error('%s matches multiple downloaded packages:' % pkg)
|
||||
for match in matches:
|
||||
tty.msg('"%s"' % match.format())
|
||||
has_errors = True
|
||||
|
||||
# No downloaded package matches the query
|
||||
if len(matches) == 0:
|
||||
tty.error('%s does not match any downloaded packages.' % pkg)
|
||||
has_errors = True
|
||||
|
||||
specs_from_cli.extend(matches)
|
||||
if has_errors:
|
||||
tty.die('use one of the matching specs above')
|
||||
|
||||
return specs_from_cli
|
||||
|
||||
|
||||
def _createtarball(env, spec_file=None, packages=None, add_spec=True,
|
||||
add_deps=True, output_location=os.getcwd(),
|
||||
signing_key=None, force=False, make_relative=False,
|
||||
unsigned=False, allow_root=False, rebuild_index=False):
|
||||
if spec_file:
|
||||
with open(spec_file, 'r') as fd:
|
||||
specfile_contents = fd.read()
|
||||
tty.debug('createtarball read specfile contents:')
|
||||
tty.debug(specfile_contents)
|
||||
if spec_file.endswith('.json'):
|
||||
s = Spec.from_json(specfile_contents)
|
||||
else:
|
||||
s = Spec.from_yaml(specfile_contents)
|
||||
package = '/{0}'.format(s.dag_hash())
|
||||
matches = find_matching_specs(package, env=env)
|
||||
|
||||
elif packages:
|
||||
matches = find_matching_specs(packages, env=env)
|
||||
|
||||
elif env:
|
||||
matches = [env.specs_by_hash[h] for h in env.concretized_order]
|
||||
|
||||
else:
|
||||
tty.die("build cache file creation requires at least one" +
|
||||
" installed package spec, an active environment," +
|
||||
" or else a path to a json or yaml file containing a spec" +
|
||||
" to install")
|
||||
specs = set()
|
||||
|
||||
mirror = spack.mirror.MirrorCollection().lookup(output_location)
|
||||
outdir = url_util.format(mirror.push_url)
|
||||
|
||||
msg = 'Buildcache files will be output to %s/build_cache' % outdir
|
||||
tty.msg(msg)
|
||||
|
||||
if matches:
|
||||
tty.debug('Found at least one matching spec')
|
||||
|
||||
for match in matches:
|
||||
tty.debug('examining match {0}'.format(match.format()))
|
||||
if match.external or match.virtual:
|
||||
tty.debug('skipping external or virtual spec %s' %
|
||||
match.format())
|
||||
else:
|
||||
lookup = spack.store.db.query_one(match)
|
||||
|
||||
if not add_spec:
|
||||
tty.debug('skipping matching root spec %s' % match.format())
|
||||
elif lookup is None:
|
||||
tty.debug('skipping uninstalled matching spec %s' %
|
||||
match.format())
|
||||
else:
|
||||
tty.debug('adding matching spec %s' % match.format())
|
||||
specs.add(match)
|
||||
|
||||
if not add_deps:
|
||||
continue
|
||||
|
||||
tty.debug('recursing dependencies')
|
||||
for d, node in match.traverse(order='post',
|
||||
depth=True,
|
||||
deptype=('link', 'run')):
|
||||
# skip root, since it's handled above
|
||||
if d == 0:
|
||||
continue
|
||||
|
||||
lookup = spack.store.db.query_one(node)
|
||||
|
||||
if node.external or node.virtual:
|
||||
tty.debug('skipping external or virtual dependency %s' %
|
||||
node.format())
|
||||
elif lookup is None:
|
||||
tty.debug('skipping uninstalled depenendency %s' %
|
||||
node.format())
|
||||
else:
|
||||
tty.debug('adding dependency %s' % node.format())
|
||||
specs.add(node)
|
||||
|
||||
tty.debug('writing tarballs to %s/build_cache' % outdir)
|
||||
|
||||
for spec in specs:
|
||||
tty.debug('creating binary cache file for package %s ' % spec.format())
|
||||
try:
|
||||
bindist.build_tarball(spec, outdir, force, make_relative,
|
||||
unsigned, allow_root, signing_key,
|
||||
rebuild_index)
|
||||
except bindist.NoOverwriteException as e:
|
||||
tty.warn(e)
|
||||
|
||||
|
||||
def createtarball(args):
|
||||
"""create a binary package from an existing install"""
|
||||
|
||||
# restrict matching to current environment if one is active
|
||||
env = ev.active_environment()
|
||||
hashes = env.all_hashes() if env else None
|
||||
if args.spec_file:
|
||||
return spack.store.specfile_matches(args.spec_file, hashes=hashes)
|
||||
|
||||
output_location = None
|
||||
if args.specs:
|
||||
constraints = spack.cmd.parse_specs(args.specs)
|
||||
return spack.store.find(constraints, hashes=hashes)
|
||||
|
||||
if env:
|
||||
return [env.specs_by_hash[h] for h in env.concretized_order]
|
||||
|
||||
tty.die("build cache file creation requires at least one" +
|
||||
" installed package spec, an active environment," +
|
||||
" or else a path to a json or yaml file containing a spec" +
|
||||
" to install")
|
||||
|
||||
|
||||
def _concrete_spec_from_args(args):
|
||||
spec_str, specfile_path = args.spec, args.spec_file
|
||||
|
||||
if not spec_str and not specfile_path:
|
||||
tty.error('must provide either spec string or path to YAML or JSON specfile')
|
||||
sys.exit(1)
|
||||
|
||||
if spec_str:
|
||||
try:
|
||||
constraints = spack.cmd.parse_specs(spec_str)
|
||||
spec = spack.store.find(constraints)[0]
|
||||
spec.concretize()
|
||||
except SpecError as spec_error:
|
||||
tty.error('Unable to concretize spec {0}'.format(spec_str))
|
||||
tty.debug(spec_error)
|
||||
sys.exit(1)
|
||||
|
||||
return spec
|
||||
|
||||
return Spec.from_specfile(specfile_path)
|
||||
|
||||
|
||||
def create_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.directory:
|
||||
output_location = args.directory
|
||||
push_url = spack.mirror.push_url_from_directory(args.directory)
|
||||
|
||||
# User meant to provide a path to a local directory.
|
||||
# Ensure that they did not accidentally pass a URL.
|
||||
scheme = url_util.parse(output_location, scheme='<missing>').scheme
|
||||
if scheme != '<missing>':
|
||||
raise ValueError(
|
||||
'"--directory" expected a local path; got a URL, instead')
|
||||
if args.mirror_name:
|
||||
push_url = spack.mirror.push_url_from_mirror_name(args.mirror_name)
|
||||
|
||||
# User meant to provide a path to a local directory.
|
||||
# Ensure that the mirror lookup does not mistake it for a named mirror.
|
||||
output_location = 'file://' + output_location
|
||||
if args.mirror_url:
|
||||
push_url = spack.mirror.push_url_from_mirror_url(args.mirror_url)
|
||||
|
||||
elif args.mirror_name:
|
||||
output_location = args.mirror_name
|
||||
matches = _matching_specs(args)
|
||||
|
||||
# User meant to provide the name of a preconfigured mirror.
|
||||
# Ensure that the mirror lookup actually returns a named mirror.
|
||||
result = spack.mirror.MirrorCollection().lookup(output_location)
|
||||
if result.name == "<unnamed>":
|
||||
raise ValueError(
|
||||
'no configured mirror named "{name}"'.format(
|
||||
name=output_location))
|
||||
|
||||
elif args.mirror_url:
|
||||
output_location = args.mirror_url
|
||||
|
||||
# User meant to provide a URL for an anonymous mirror.
|
||||
# Ensure that they actually provided a URL.
|
||||
scheme = url_util.parse(output_location, scheme='<missing>').scheme
|
||||
if scheme == '<missing>':
|
||||
raise ValueError(
|
||||
'"{url}" is not a valid URL'.format(url=output_location))
|
||||
add_spec = ('package' in args.things_to_install)
|
||||
add_deps = ('dependencies' in args.things_to_install)
|
||||
|
||||
_createtarball(env, spec_file=args.spec_file, packages=args.specs,
|
||||
add_spec=add_spec, add_deps=add_deps,
|
||||
output_location=output_location, signing_key=args.key,
|
||||
force=args.force, make_relative=args.rel,
|
||||
unsigned=args.unsigned, allow_root=args.allow_root,
|
||||
rebuild_index=args.rebuild_index)
|
||||
msg = 'Pushing binary packages to {0}/build_cache'.format(push_url)
|
||||
tty.msg(msg)
|
||||
specs_kwargs = {
|
||||
'include_root': 'package' in args.things_to_install,
|
||||
'include_dependencies': 'dependencies' in args.things_to_install
|
||||
}
|
||||
kwargs = {
|
||||
'key': args.key,
|
||||
'force': args.force,
|
||||
'relative': args.rel,
|
||||
'unsigned': args.unsigned,
|
||||
'allow_root': args.allow_root,
|
||||
'regenerate_index': args.rebuild_index
|
||||
}
|
||||
bindist.push(matches, push_url, specs_kwargs, **kwargs)
|
||||
|
||||
|
||||
def installtarball(args):
|
||||
def install_fn(args):
|
||||
"""install from a binary package"""
|
||||
if not args.specs:
|
||||
tty.die("build cache file installation requires" +
|
||||
" at least one package spec argument")
|
||||
pkgs = set(args.specs)
|
||||
matches = match_downloaded_specs(pkgs, args.multiple, args.force,
|
||||
args.otherarch)
|
||||
tty.die("a spec argument is required to install from a buildcache")
|
||||
|
||||
query = bindist.BinaryCacheQuery(all_architectures=args.otherarch)
|
||||
matches = spack.store.find(args.specs, multiple=args.multiple, query_fn=query)
|
||||
for match in matches:
|
||||
install_tarball(match, args)
|
||||
bindist.install_single_spec(
|
||||
match,
|
||||
allow_root=args.allow_root,
|
||||
unsigned=args.unsigned,
|
||||
force=args.force
|
||||
)
|
||||
|
||||
|
||||
def install_tarball(spec, args):
|
||||
s = Spec(spec)
|
||||
if s.external or s.virtual:
|
||||
tty.warn("Skipping external or virtual package %s" % spec.format())
|
||||
return
|
||||
|
||||
# This argument is used only for bootstrapping specs without signatures,
|
||||
# since we need to check the sha256 of each tarball
|
||||
if not args.only_root:
|
||||
for d in s.dependencies(deptype=('link', 'run')):
|
||||
tty.msg("Installing buildcache for dependency spec %s" % d)
|
||||
install_tarball(d, args)
|
||||
|
||||
package = spack.repo.get(spec)
|
||||
if s.concrete and package.installed and not args.force:
|
||||
tty.warn("Package for spec %s already installed." % spec.format())
|
||||
else:
|
||||
tarball = bindist.download_tarball(spec)
|
||||
if tarball:
|
||||
if args.sha256:
|
||||
checker = spack.util.crypto.Checker(args.sha256)
|
||||
msg = ('cannot verify checksum for "{0}"'
|
||||
' [expected={1}]')
|
||||
msg = msg.format(tarball, args.sha256)
|
||||
if not checker.check(tarball):
|
||||
raise spack.binary_distribution.NoChecksumException(msg)
|
||||
tty.debug('Verified SHA256 checksum of the build cache')
|
||||
|
||||
tty.msg('Installing buildcache for spec %s' % spec.format())
|
||||
bindist.extract_tarball(spec, tarball, args.allow_root,
|
||||
args.unsigned, args.force)
|
||||
spack.hooks.post_install(spec)
|
||||
spack.store.db.add(spec, spack.store.layout)
|
||||
else:
|
||||
tty.die('Download of binary cache file for spec %s failed.' %
|
||||
spec.format())
|
||||
|
||||
|
||||
def listspecs(args):
|
||||
def list_fn(args):
|
||||
"""list binary packages available from mirrors"""
|
||||
specs = bindist.update_cache_and_get_specs()
|
||||
try:
|
||||
specs = bindist.update_cache_and_get_specs()
|
||||
except bindist.FetchCacheError as e:
|
||||
tty.error(e)
|
||||
|
||||
if not args.allarch:
|
||||
arch = spack.spec.Spec.default_arch()
|
||||
@@ -587,19 +382,17 @@ def listspecs(args):
|
||||
display_specs(specs, args, all_headers=True)
|
||||
|
||||
|
||||
def getkeys(args):
|
||||
def keys_fn(args):
|
||||
"""get public keys available on mirrors"""
|
||||
bindist.get_keys(args.install, args.trust, args.force)
|
||||
|
||||
|
||||
def preview(args):
|
||||
"""Print a status tree of the selected specs that shows which nodes are
|
||||
relocatable and which might not be.
|
||||
|
||||
Args:
|
||||
args: command line arguments
|
||||
def preview_fn(args):
|
||||
"""analyze an installed spec and reports whether executables
|
||||
and libraries are relocatable
|
||||
"""
|
||||
specs = find_matching_specs(args.specs, allow_multiple_matches=True)
|
||||
constraints = spack.cmd.parse_specs(args.specs)
|
||||
specs = spack.store.find(constraints, multiple=True)
|
||||
|
||||
# Cycle over the specs that match
|
||||
for spec in specs:
|
||||
@@ -608,7 +401,7 @@ def preview(args):
|
||||
print(spec.tree(status_fn=spack.relocate.is_relocatable))
|
||||
|
||||
|
||||
def check_binaries(args):
|
||||
def check_fn(args):
|
||||
"""Check specs (either a single spec from --spec, or else the full set
|
||||
of release specs) against remote binary mirror(s) to see if any need
|
||||
to be rebuilt. This command uses the process exit code to indicate
|
||||
@@ -616,7 +409,7 @@ def check_binaries(args):
|
||||
one of the indicated specs needs to be rebuilt.
|
||||
"""
|
||||
if args.spec or args.spec_file:
|
||||
specs = [get_concrete_spec(args)]
|
||||
specs = [_concrete_spec_from_args(args)]
|
||||
else:
|
||||
env = spack.cmd.require_active_env(cmd_name='buildcache')
|
||||
env.concretize()
|
||||
@@ -643,34 +436,7 @@ def check_binaries(args):
|
||||
configured_mirrors, specs, args.output_file, args.rebuild_on_error))
|
||||
|
||||
|
||||
def download_buildcache_files(concrete_spec, local_dest, require_cdashid,
|
||||
mirror_url=None):
|
||||
tarfile_name = bindist.tarball_name(concrete_spec, '.spack')
|
||||
tarball_dir_name = bindist.tarball_directory_name(concrete_spec)
|
||||
tarball_path_name = os.path.join(tarball_dir_name, tarfile_name)
|
||||
local_tarball_path = os.path.join(local_dest, tarball_dir_name)
|
||||
|
||||
files_to_fetch = [
|
||||
{
|
||||
'url': [tarball_path_name],
|
||||
'path': local_tarball_path,
|
||||
'required': True,
|
||||
}, {
|
||||
'url': [bindist.tarball_name(concrete_spec, '.spec.json'),
|
||||
bindist.tarball_name(concrete_spec, '.spec.yaml')],
|
||||
'path': local_dest,
|
||||
'required': True,
|
||||
}, {
|
||||
'url': [bindist.tarball_name(concrete_spec, '.cdashid')],
|
||||
'path': local_dest,
|
||||
'required': require_cdashid,
|
||||
},
|
||||
]
|
||||
|
||||
return bindist.download_buildcache_entry(files_to_fetch, mirror_url)
|
||||
|
||||
|
||||
def get_tarball(args):
|
||||
def download_fn(args):
|
||||
"""Download buildcache entry from a remote mirror to local folder. This
|
||||
command uses the process exit code to indicate its result, specifically,
|
||||
a non-zero exit code indicates that the command failed to download at
|
||||
@@ -685,54 +451,30 @@ def get_tarball(args):
|
||||
tty.msg('No download path provided, exiting')
|
||||
sys.exit(0)
|
||||
|
||||
spec = get_concrete_spec(args)
|
||||
result = download_buildcache_files(spec, args.path, args.require_cdashid)
|
||||
spec = _concrete_spec_from_args(args)
|
||||
result = bindist.download_single_spec(
|
||||
spec, args.path, require_cdashid=args.require_cdashid
|
||||
)
|
||||
|
||||
if not result:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_concrete_spec(args):
|
||||
spec_str = args.spec
|
||||
spec_yaml_path = args.spec_file
|
||||
|
||||
if not spec_str and not spec_yaml_path:
|
||||
tty.msg('Must provide either spec string or path to ' +
|
||||
'yaml to concretize spec')
|
||||
sys.exit(1)
|
||||
|
||||
if spec_str:
|
||||
try:
|
||||
spec = find_matching_specs(spec_str)[0]
|
||||
spec.concretize()
|
||||
except SpecError as spec_error:
|
||||
tty.error('Unable to concrectize spec {0}'.format(args.spec))
|
||||
tty.debug(spec_error)
|
||||
sys.exit(1)
|
||||
|
||||
return spec
|
||||
|
||||
with open(spec_yaml_path, 'r') as fd:
|
||||
return Spec.from_yaml(fd.read())
|
||||
|
||||
|
||||
def get_buildcache_name(args):
|
||||
def get_buildcache_name_fn(args):
|
||||
"""Get name (prefix) of buildcache entries for this spec"""
|
||||
spec = get_concrete_spec(args)
|
||||
spec = _concrete_spec_from_args(args)
|
||||
buildcache_name = bindist.tarball_name(spec, '')
|
||||
|
||||
print('{0}'.format(buildcache_name))
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def save_specfiles(args):
|
||||
def save_specfile_fn(args):
|
||||
"""Get full spec for dependencies, relative to root spec, and write them
|
||||
to files in the specified output directory. Uses exit code to signal
|
||||
success or failure. An exit code of zero means the command was likely
|
||||
successful. If any errors or exceptions are encountered, or if expected
|
||||
command-line arguments are not provided, then the exit code will be
|
||||
non-zero."""
|
||||
non-zero.
|
||||
"""
|
||||
if not args.root_spec and not args.root_specfile:
|
||||
tty.msg('No root spec provided, exiting.')
|
||||
sys.exit(1)
|
||||
@@ -759,12 +501,15 @@ def save_specfiles(args):
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def buildcache_copy(args):
|
||||
def copy_fn(args):
|
||||
"""Copy a buildcache entry and all its files from one mirror, given as
|
||||
'--base-dir', to some other mirror, specified as '--destination-url'.
|
||||
The specific buildcache entry to be copied from one location to the
|
||||
other is identified using the '--spec-file' argument."""
|
||||
# TODO: This sub-command should go away once #11117 is merged
|
||||
# TODO: Remove after v0.18.0 release
|
||||
msg = ('"spack buildcache copy" is deprecated and will be removed from '
|
||||
'Spack starting in v0.19.0')
|
||||
warnings.warn(msg)
|
||||
|
||||
if not args.spec_file:
|
||||
tty.msg('No spec yaml provided, exiting.')
|
||||
@@ -839,7 +584,7 @@ def buildcache_copy(args):
|
||||
shutil.copyfile(cdashid_src_path, cdashid_dest_path)
|
||||
|
||||
|
||||
def buildcache_sync(args):
|
||||
def sync_fn(args):
|
||||
""" Syncs binaries (and associated metadata) from one mirror to another.
|
||||
Requires an active environment in order to know which specs to sync.
|
||||
|
||||
@@ -973,7 +718,7 @@ def update_index(mirror_url, update_keys=False):
|
||||
bindist.generate_key_index(keys_url)
|
||||
|
||||
|
||||
def buildcache_update_index(args):
|
||||
def update_index_fn(args):
|
||||
"""Update a buildcache index."""
|
||||
outdir = '.'
|
||||
if args.mirror_url:
|
||||
|
||||
@@ -25,17 +25,17 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'--keep-stage', action='store_true',
|
||||
'--keep-stage', action='store_true', default=False,
|
||||
help="don't clean up staging area when command completes")
|
||||
sp = subparser.add_mutually_exclusive_group()
|
||||
sp.add_argument(
|
||||
'-b', '--batch', action='store_true',
|
||||
'-b', '--batch', action='store_true', default=False,
|
||||
help="don't ask which versions to checksum")
|
||||
sp.add_argument(
|
||||
'-l', '--latest', action='store_true',
|
||||
'-l', '--latest', action='store_true', default=False,
|
||||
help="checksum the latest available version only")
|
||||
sp.add_argument(
|
||||
'-p', '--preferred', action='store_true',
|
||||
'-p', '--preferred', action='store_true', default=False,
|
||||
help="checksum the preferred version only")
|
||||
arguments.add_common_arguments(subparser, ['package'])
|
||||
subparser.add_argument(
|
||||
|
||||
@@ -397,8 +397,12 @@ def ci_rebuild(args):
|
||||
tty.debug('Getting {0} buildcache from {1}'.format(
|
||||
job_spec_pkg_name, matching_mirror))
|
||||
tty.debug('Downloading to {0}'.format(build_cache_dir))
|
||||
buildcache.download_buildcache_files(
|
||||
job_spec, build_cache_dir, False, matching_mirror)
|
||||
bindist.download_single_spec(
|
||||
job_spec,
|
||||
build_cache_dir,
|
||||
require_cdashid=False,
|
||||
mirror_url=matching_mirror
|
||||
)
|
||||
|
||||
# Now we are done and successful
|
||||
sys.exit(0)
|
||||
@@ -433,17 +437,17 @@ def ci_rebuild(args):
|
||||
cdash_build_name, cdash_base_url, cdash_project,
|
||||
cdash_site, job_spec_buildgroup)
|
||||
|
||||
cdash_upload_url = '{0}/submit.php?project={1}'.format(
|
||||
cdash_base_url, cdash_project_enc)
|
||||
|
||||
install_args.extend([
|
||||
'--cdash-upload-url', cdash_upload_url,
|
||||
'--cdash-build', cdash_build_name,
|
||||
'--cdash-site', cdash_site,
|
||||
'--cdash-buildstamp', cdash_build_stamp,
|
||||
])
|
||||
|
||||
if cdash_build_id is not None:
|
||||
cdash_upload_url = '{0}/submit.php?project={1}'.format(
|
||||
cdash_base_url, cdash_project_enc)
|
||||
|
||||
install_args.extend([
|
||||
'--cdash-upload-url', cdash_upload_url,
|
||||
'--cdash-build', cdash_build_name,
|
||||
'--cdash-site', cdash_site,
|
||||
'--cdash-buildstamp', cdash_build_stamp,
|
||||
])
|
||||
|
||||
tty.debug('CDash: Relating build with dependency builds')
|
||||
spack_ci.relate_cdash_builds(
|
||||
spec_map, cdash_base_url, cdash_build_id, cdash_project,
|
||||
@@ -553,8 +557,8 @@ def ci_rebuild(args):
|
||||
# per-PR mirror, if this is a PR pipeline
|
||||
if buildcache_mirror_url:
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec, job_spec_yaml_path, buildcache_mirror_url,
|
||||
sign_binaries)
|
||||
env, job_spec_yaml_path, buildcache_mirror_url, sign_binaries
|
||||
)
|
||||
|
||||
if cdash_build_id:
|
||||
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
|
||||
@@ -568,8 +572,8 @@ def ci_rebuild(args):
|
||||
# prefix is set)
|
||||
if pipeline_mirror_url:
|
||||
spack_ci.push_mirror_contents(
|
||||
env, job_spec, job_spec_yaml_path, pipeline_mirror_url,
|
||||
sign_binaries)
|
||||
env, job_spec_yaml_path, pipeline_mirror_url, sign_binaries
|
||||
)
|
||||
|
||||
if cdash_build_id:
|
||||
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
|
||||
|
||||
@@ -50,7 +50,6 @@ def containerize(parser, args):
|
||||
# If we have a monitor request, add monitor metadata to config
|
||||
if args.use_monitor:
|
||||
config['spack']['monitor'] = {
|
||||
"disable_auth": args.monitor_disable_auth,
|
||||
"host": args.monitor_host,
|
||||
"keep_going": args.monitor_keep_going,
|
||||
"prefix": args.monitor_prefix,
|
||||
|
||||
@@ -69,7 +69,7 @@
|
||||
hdf5 @B{+mpi} hdf5 with mpi enabled
|
||||
hdf5 @r{~mpi} hdf5 with mpi disabled
|
||||
hdf5 @B{+mpi} ^mpich hdf5 with mpi, using mpich
|
||||
hdf5 @B{+mpi} ^openmpi@c{@1.7} hdf5 wtih mpi, using openmpi 1.7
|
||||
hdf5 @B{+mpi} ^openmpi@c{@1.7} hdf5 with mpi, using openmpi 1.7
|
||||
boxlib @B{dim=2} boxlib built for 2 dimensions
|
||||
libdwarf @g{%intel} ^libelf@g{%gcc}
|
||||
libdwarf, built with intel compiler, linked to libelf built with gcc
|
||||
|
||||
@@ -305,7 +305,6 @@ def install(parser, args, **kwargs):
|
||||
monitor = spack.monitor.get_client(
|
||||
host=args.monitor_host,
|
||||
prefix=args.monitor_prefix,
|
||||
disable_auth=args.monitor_disable_auth,
|
||||
tags=args.monitor_tags,
|
||||
save_local=args.monitor_save_local,
|
||||
)
|
||||
@@ -348,17 +347,22 @@ def get_tests(specs):
|
||||
env.write(regenerate=False)
|
||||
|
||||
specs = env.all_specs()
|
||||
if not args.log_file and not reporter.filename:
|
||||
reporter.filename = default_log_file(specs[0])
|
||||
reporter.specs = specs
|
||||
if specs:
|
||||
if not args.log_file and not reporter.filename:
|
||||
reporter.filename = default_log_file(specs[0])
|
||||
reporter.specs = specs
|
||||
|
||||
# Tell the monitor about the specs
|
||||
if args.use_monitor and specs:
|
||||
monitor.new_configuration(specs)
|
||||
# Tell the monitor about the specs
|
||||
if args.use_monitor and specs:
|
||||
monitor.new_configuration(specs)
|
||||
|
||||
tty.msg("Installing environment {0}".format(env.name))
|
||||
with reporter('build'):
|
||||
env.install_all(**kwargs)
|
||||
tty.msg("Installing environment {0}".format(env.name))
|
||||
with reporter('build'):
|
||||
env.install_all(**kwargs)
|
||||
|
||||
else:
|
||||
msg = '{0} environment has no specs to install'.format(env.name)
|
||||
tty.msg(msg)
|
||||
|
||||
tty.debug("Regenerating environment views for {0}"
|
||||
.format(env.name))
|
||||
@@ -399,6 +403,10 @@ def get_tests(specs):
|
||||
except SpackError as e:
|
||||
tty.debug(e)
|
||||
reporter.concretization_report(e.message)
|
||||
|
||||
# Tell spack monitor about it
|
||||
if args.use_monitor and abstract_specs:
|
||||
monitor.failed_concretization(abstract_specs)
|
||||
raise
|
||||
|
||||
# 2. Concrete specs from yaml files
|
||||
@@ -462,7 +470,6 @@ def get_tests(specs):
|
||||
|
||||
# Update install_args with the monitor args, needed for build task
|
||||
kwargs.update({
|
||||
"monitor_disable_auth": args.monitor_disable_auth,
|
||||
"monitor_keep_going": args.monitor_keep_going,
|
||||
"monitor_host": args.monitor_host,
|
||||
"use_monitor": args.use_monitor,
|
||||
|
||||
@@ -4,12 +4,11 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import functools
|
||||
import os
|
||||
|
||||
import llnl.util.filesystem
|
||||
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.modules
|
||||
import spack.config
|
||||
import spack.modules.lmod
|
||||
|
||||
|
||||
def add_command(parser, command_dict):
|
||||
@@ -41,12 +40,19 @@ def setdefault(module_type, specs, args):
|
||||
# https://lmod.readthedocs.io/en/latest/060_locating.html#marking-a-version-as-default
|
||||
#
|
||||
spack.cmd.modules.one_spec_or_raise(specs)
|
||||
writer = spack.modules.module_types['lmod'](
|
||||
specs[0], args.module_set_name)
|
||||
|
||||
module_folder = os.path.dirname(writer.layout.filename)
|
||||
module_basename = os.path.basename(writer.layout.filename)
|
||||
with llnl.util.filesystem.working_dir(module_folder):
|
||||
if os.path.exists('default') and os.path.islink('default'):
|
||||
os.remove('default')
|
||||
os.symlink(module_basename, 'default')
|
||||
spec = specs[0]
|
||||
data = {
|
||||
'modules': {
|
||||
args.module_set_name: {
|
||||
'lmod': {
|
||||
'defaults': [str(spec)]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
# Need to clear the cache if a SpackCommand is called during scripting
|
||||
spack.modules.lmod.configuration_registry = {}
|
||||
scope = spack.config.InternalConfigScope('lmod-setdefault', data)
|
||||
with spack.config.override(scope):
|
||||
writer = spack.modules.module_types['lmod'](spec, args.module_set_name)
|
||||
writer.update_module_defaults()
|
||||
|
||||
@@ -2,18 +2,52 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import functools
|
||||
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.modules
|
||||
import spack.config
|
||||
import spack.modules.tcl
|
||||
|
||||
|
||||
def add_command(parser, command_dict):
|
||||
tcl_parser = parser.add_parser(
|
||||
'tcl', help='manipulate non-hierarchical module files'
|
||||
)
|
||||
spack.cmd.modules.setup_parser(tcl_parser)
|
||||
sp = spack.cmd.modules.setup_parser(tcl_parser)
|
||||
|
||||
# Set default module file for a package
|
||||
setdefault_parser = sp.add_parser(
|
||||
'setdefault', help='set the default module file for a package'
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(
|
||||
setdefault_parser, ['constraint']
|
||||
)
|
||||
|
||||
callbacks = dict(spack.cmd.modules.callbacks.items())
|
||||
callbacks['setdefault'] = setdefault
|
||||
|
||||
command_dict['tcl'] = functools.partial(
|
||||
spack.cmd.modules.modules_cmd, module_type='tcl'
|
||||
spack.cmd.modules.modules_cmd, module_type='tcl', callbacks=callbacks
|
||||
)
|
||||
|
||||
|
||||
def setdefault(module_type, specs, args):
|
||||
"""Set the default module file, when multiple are present"""
|
||||
# Currently, accepts only a single matching spec
|
||||
spack.cmd.modules.one_spec_or_raise(specs)
|
||||
spec = specs[0]
|
||||
data = {
|
||||
'modules': {
|
||||
args.module_set_name: {
|
||||
'tcl': {
|
||||
'defaults': [str(spec)]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
spack.modules.tcl.configuration_registry = {}
|
||||
scope = spack.config.InternalConfigScope('tcl-setdefault', data)
|
||||
with spack.config.override(scope):
|
||||
writer = spack.modules.module_types['tcl'](spec, args.module_set_name)
|
||||
writer.update_module_defaults()
|
||||
|
||||
@@ -27,7 +27,6 @@ def monitor(parser, args, **kwargs):
|
||||
monitor = spack.monitor.get_client(
|
||||
host=args.monitor_host,
|
||||
prefix=args.monitor_prefix,
|
||||
disable_auth=args.monitor_disable_auth,
|
||||
)
|
||||
|
||||
# Upload the directory
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from os.path import dirname
|
||||
|
||||
from spack.compiler import Compiler
|
||||
|
||||
|
||||
@@ -105,3 +107,11 @@ def fc_pic_flag(self):
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return ('-cxxlib', )
|
||||
|
||||
def setup_custom_environment(self, pkg, env):
|
||||
# workaround bug in icpx driver where it requires sycl-post-link is on the PATH
|
||||
# It is located in the same directory as the driver. Error message:
|
||||
# clang++: error: unable to execute command:
|
||||
# Executable "sycl-post-link" doesn't exist!
|
||||
if self.cxx:
|
||||
env.prepend_path('PATH', dirname(self.cxx))
|
||||
|
||||
@@ -38,7 +38,6 @@
|
||||
import spack.spec
|
||||
import spack.target
|
||||
import spack.tengine
|
||||
import spack.util.path
|
||||
import spack.variant as vt
|
||||
from spack.config import config
|
||||
from spack.package_prefs import PackagePrefs, is_spec_buildable, spec_externals
|
||||
@@ -90,7 +89,7 @@ def concretize_develop(self, spec):
|
||||
if not dev_info:
|
||||
return False
|
||||
|
||||
path = spack.util.path.canonicalize_path(dev_info['path'])
|
||||
path = os.path.normpath(os.path.join(env.path, dev_info['path']))
|
||||
|
||||
if 'dev_path' in spec.variants:
|
||||
assert spec.variants['dev_path'].value == path
|
||||
|
||||
@@ -28,6 +28,19 @@
|
||||
"develop": "latest"
|
||||
}
|
||||
},
|
||||
"opensuse/leap:15": {
|
||||
"bootstrap": {
|
||||
"template": "container/leap-15.dockerfile"
|
||||
},
|
||||
"os_package_manager": "zypper",
|
||||
"build": "spack/leap15",
|
||||
"build_tags": {
|
||||
"develop": "latest"
|
||||
},
|
||||
"final": {
|
||||
"image": "opensuse/leap:latest"
|
||||
}
|
||||
},
|
||||
"nvidia/cuda:11.2.1": {
|
||||
"bootstrap": {
|
||||
"template": "container/cuda_11_2_1.dockerfile",
|
||||
@@ -85,6 +98,11 @@
|
||||
"update": "yum update -y && amazon-linux-extras install epel -y",
|
||||
"install": "yum install -y",
|
||||
"clean": "rm -rf /var/cache/yum && yum clean all"
|
||||
},
|
||||
"zypper": {
|
||||
"update": "zypper update -y",
|
||||
"install": "zypper install -y",
|
||||
"clean": "rm -rf /var/cache/zypp && zypper clean -a"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -183,19 +183,18 @@ def paths(self):
|
||||
def monitor(self):
|
||||
"""Enable using spack monitor during build."""
|
||||
Monitor = collections.namedtuple('Monitor', [
|
||||
'enabled', 'host', 'disable_auth', 'prefix', 'keep_going', 'tags'
|
||||
'enabled', 'host', 'prefix', 'keep_going', 'tags'
|
||||
])
|
||||
monitor = self.config.get("monitor")
|
||||
|
||||
# If we don't have a monitor group, cut out early.
|
||||
if not monitor:
|
||||
return Monitor(False, None, None, None, None, None)
|
||||
return Monitor(False, None, None, None, None)
|
||||
|
||||
return Monitor(
|
||||
enabled=True,
|
||||
host=monitor.get('host'),
|
||||
prefix=monitor.get('prefix'),
|
||||
disable_auth=monitor.get("disable_auth"),
|
||||
keep_going=monitor.get("keep_going"),
|
||||
tags=monitor.get('tags')
|
||||
)
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import warnings
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty
|
||||
@@ -99,9 +100,14 @@ def by_executable(packages_to_check, path_hints=None):
|
||||
# for one prefix, but without additional details (e.g. about the
|
||||
# naming scheme which differentiates them), the spec won't be
|
||||
# usable.
|
||||
specs = _convert_to_iterable(
|
||||
pkg.determine_spec_details(prefix, exes_in_prefix)
|
||||
)
|
||||
try:
|
||||
specs = _convert_to_iterable(
|
||||
pkg.determine_spec_details(prefix, exes_in_prefix)
|
||||
)
|
||||
except Exception as e:
|
||||
specs = []
|
||||
msg = 'error detecting "{0}" from prefix {1} [{2}]'
|
||||
warnings.warn(msg.format(pkg.name, prefix, str(e)))
|
||||
|
||||
if not specs:
|
||||
llnl.util.tty.debug(
|
||||
|
||||
@@ -466,7 +466,8 @@ def _execute_extends(pkg):
|
||||
return
|
||||
|
||||
_depends_on(pkg, spec, when=when, type=type)
|
||||
pkg.extendees[spec] = (spack.spec.Spec(spec), kwargs)
|
||||
spec_obj = spack.spec.Spec(spec)
|
||||
pkg.extendees[spec_obj.name] = (spec_obj, kwargs)
|
||||
return _execute_extends
|
||||
|
||||
|
||||
|
||||
@@ -630,7 +630,8 @@ def _rewrite_relative_paths_on_relocation(self, init_file_dir):
|
||||
|
||||
for name, entry in self.dev_specs.items():
|
||||
dev_path = entry['path']
|
||||
expanded_path = spack.util.path.canonicalize_path(entry['path'])
|
||||
expanded_path = os.path.normpath(os.path.join(
|
||||
init_file_dir, entry['path']))
|
||||
|
||||
# Skip if the expanded path is the same (e.g. when absolute)
|
||||
if dev_path == expanded_path:
|
||||
@@ -1514,10 +1515,8 @@ def install_specs(self, specs=None, **install_args):
|
||||
|
||||
if not specs_to_install:
|
||||
tty.msg('All of the packages are already installed')
|
||||
return
|
||||
|
||||
tty.debug('Processing {0} uninstalled specs'.format(
|
||||
len(specs_to_install)))
|
||||
else:
|
||||
tty.debug('Processing {0} uninstalled specs'.format(len(specs_to_install)))
|
||||
|
||||
specs_to_overwrite = self._get_overwrite_specs()
|
||||
tty.debug('{0} specs need to be overwritten'.format(
|
||||
|
||||
@@ -39,7 +39,7 @@ def activate_header(env, shell, prompt=None):
|
||||
#
|
||||
else:
|
||||
if 'color' in os.getenv('TERM', '') and prompt:
|
||||
prompt = colorize('@G{%s} ' % prompt, color=True)
|
||||
prompt = colorize('@G{%s}' % prompt, color=True)
|
||||
|
||||
cmds += 'export SPACK_ENV=%s;\n' % env.path
|
||||
cmds += "alias despacktivate='spack env deactivate';\n"
|
||||
|
||||
@@ -123,18 +123,11 @@ class UnsatisfiableSpecError(SpecError):
|
||||
For original concretizer, provide the requirement that was violated when
|
||||
raising.
|
||||
"""
|
||||
def __init__(self, provided, required=None, constraint_type=None, conflicts=None):
|
||||
# required is only set by the original concretizer.
|
||||
# clingo concretizer handles error messages differently.
|
||||
if required is not None:
|
||||
assert not conflicts # can't mix formats
|
||||
super(UnsatisfiableSpecError, self).__init__(
|
||||
"%s does not satisfy %s" % (provided, required))
|
||||
else:
|
||||
indented = [' %s\n' % conflict for conflict in conflicts]
|
||||
conflict_msg = ''.join(indented)
|
||||
msg = '%s is unsatisfiable, conflicts are:\n%s' % (provided, conflict_msg)
|
||||
super(UnsatisfiableSpecError, self).__init__(msg)
|
||||
def __init__(self, provided, required, constraint_type):
|
||||
# This is only the entrypoint for old concretizer errors
|
||||
super(UnsatisfiableSpecError, self).__init__(
|
||||
"%s does not satisfy %s" % (provided, required))
|
||||
|
||||
self.provided = provided
|
||||
self.required = required
|
||||
self.constraint_type = constraint_type
|
||||
|
||||
@@ -90,6 +90,7 @@ def __call__(self, *args, **kwargs):
|
||||
on_install_start = _HookRunner('on_install_start')
|
||||
on_install_success = _HookRunner('on_install_success')
|
||||
on_install_failure = _HookRunner('on_install_failure')
|
||||
on_install_cancel = _HookRunner('on_install_cancel')
|
||||
|
||||
# Analyzer hooks
|
||||
on_analyzer_save = _HookRunner('on_analyzer_save')
|
||||
|
||||
@@ -41,6 +41,17 @@ def on_install_failure(spec):
|
||||
tty.verbose(result.get('message'))
|
||||
|
||||
|
||||
def on_install_cancel(spec):
|
||||
"""Triggered on cancel of an install
|
||||
"""
|
||||
if not spack.monitor.cli:
|
||||
return
|
||||
|
||||
tty.debug("Running on_install_cancel for %s" % spec)
|
||||
result = spack.monitor.cli.cancel_task(spec)
|
||||
tty.verbose(result.get('message'))
|
||||
|
||||
|
||||
def on_phase_success(pkg, phase_name, log_file):
|
||||
"""Triggered on a phase success
|
||||
"""
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import filecmp
|
||||
import grp
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
@@ -14,7 +15,9 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.package_prefs
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.store
|
||||
|
||||
#: OS-imposed character limit for shebang line: 127 for Linux; 511 for Mac.
|
||||
@@ -187,11 +190,47 @@ def install_sbang():
|
||||
spack.paths.sbang_script, sbang_path):
|
||||
return
|
||||
|
||||
# make $install_tree/bin and copy in a new version of sbang if needed
|
||||
# make $install_tree/bin
|
||||
sbang_bin_dir = os.path.dirname(sbang_path)
|
||||
fs.mkdirp(sbang_bin_dir)
|
||||
fs.install(spack.paths.sbang_script, sbang_path)
|
||||
fs.set_install_permissions(sbang_bin_dir)
|
||||
|
||||
# get permissions for bin dir from configuration files
|
||||
group_name = spack.package_prefs.get_package_group(spack.spec.Spec("all"))
|
||||
config_mode = spack.package_prefs.get_package_dir_permissions(
|
||||
spack.spec.Spec("all")
|
||||
)
|
||||
|
||||
if group_name:
|
||||
os.chmod(sbang_bin_dir, config_mode) # Use package directory permissions
|
||||
else:
|
||||
fs.set_install_permissions(sbang_bin_dir)
|
||||
|
||||
# set group on sbang_bin_dir if not already set (only if set in configuration)
|
||||
if group_name and grp.getgrgid(os.stat(sbang_bin_dir).st_gid).gr_name != group_name:
|
||||
os.chown(
|
||||
sbang_bin_dir,
|
||||
os.stat(sbang_bin_dir).st_uid,
|
||||
grp.getgrnam(group_name).gr_gid
|
||||
)
|
||||
|
||||
# copy over the fresh copy of `sbang`
|
||||
sbang_tmp_path = os.path.join(
|
||||
os.path.dirname(sbang_path),
|
||||
".%s.tmp" % os.path.basename(sbang_path),
|
||||
)
|
||||
shutil.copy(spack.paths.sbang_script, sbang_tmp_path)
|
||||
|
||||
# set permissions on `sbang` (including group if set in configuration)
|
||||
os.chmod(sbang_tmp_path, config_mode)
|
||||
if group_name:
|
||||
os.chown(
|
||||
sbang_tmp_path,
|
||||
os.stat(sbang_tmp_path).st_uid,
|
||||
grp.getgrnam(group_name).gr_gid
|
||||
)
|
||||
|
||||
# Finally, move the new `sbang` into place atomically
|
||||
os.rename(sbang_tmp_path, sbang_path)
|
||||
|
||||
|
||||
def post_install(spec):
|
||||
|
||||
@@ -794,10 +794,10 @@ def _check_deps_status(self, request):
|
||||
.format(dep_id, action)
|
||||
raise InstallError(err.format(request.pkg_id, msg))
|
||||
|
||||
# Attempt to get a write lock to ensure another process does not
|
||||
# Attempt to get a read lock to ensure another process does not
|
||||
# uninstall the dependency while the requested spec is being
|
||||
# installed
|
||||
ltype, lock = self._ensure_locked('write', dep_pkg)
|
||||
ltype, lock = self._ensure_locked('read', dep_pkg)
|
||||
if lock is None:
|
||||
msg = '{0} is write locked by another process'.format(dep_id)
|
||||
raise InstallError(err.format(request.pkg_id, msg))
|
||||
@@ -816,6 +816,8 @@ def _check_deps_status(self, request):
|
||||
tty.debug('Flagging {0} as installed per the database'
|
||||
.format(dep_id))
|
||||
self._flag_installed(dep_pkg)
|
||||
else:
|
||||
lock.release_read()
|
||||
|
||||
def _prepare_for_install(self, task):
|
||||
"""
|
||||
@@ -1027,7 +1029,7 @@ def _ensure_locked(self, lock_type, pkg):
|
||||
except (lk.LockDowngradeError, lk.LockTimeoutError) as exc:
|
||||
tty.debug(err.format(op, desc, pkg_id, exc.__class__.__name__,
|
||||
str(exc)))
|
||||
lock = None
|
||||
return (lock_type, None)
|
||||
|
||||
except (Exception, KeyboardInterrupt, SystemExit) as exc:
|
||||
tty.error(err.format(op, desc, pkg_id, exc.__class__.__name__,
|
||||
@@ -1198,6 +1200,7 @@ def _install_task(self, task):
|
||||
except spack.build_environment.StopPhase as e:
|
||||
# A StopPhase exception means that do_install was asked to
|
||||
# stop early from clients, and is not an error at this point
|
||||
spack.hooks.on_install_failure(task.request.pkg.spec)
|
||||
pid = '{0}: '.format(self.pid) if tty.show_pid() else ''
|
||||
tty.debug('{0}{1}'.format(pid, str(e)))
|
||||
tty.debug('Package stage directory: {0}' .format(pkg.stage.source_path))
|
||||
@@ -1328,8 +1331,7 @@ def _setup_install_dir(self, pkg):
|
||||
pkg (spack.package.Package): the package to be built and installed
|
||||
"""
|
||||
if not os.path.exists(pkg.spec.prefix):
|
||||
tty.verbose('Creating the installation directory {0}'
|
||||
.format(pkg.spec.prefix))
|
||||
tty.debug('Creating the installation directory {0}'.format(pkg.spec.prefix))
|
||||
spack.store.layout.create_install_directory(pkg.spec)
|
||||
else:
|
||||
# Set the proper group for the prefix
|
||||
@@ -1627,6 +1629,7 @@ def install(self):
|
||||
# established by the other process -- failed, installed, or
|
||||
# uninstalled -- on the next pass.
|
||||
if ltype == 'read':
|
||||
lock.release_read()
|
||||
self._requeue_task(task)
|
||||
continue
|
||||
|
||||
@@ -1655,7 +1658,7 @@ def install(self):
|
||||
err = 'Failed to install {0} due to {1}: {2}'
|
||||
tty.error(err.format(pkg.name, exc.__class__.__name__,
|
||||
str(exc)))
|
||||
spack.hooks.on_install_failure(task.request.pkg.spec)
|
||||
spack.hooks.on_install_cancel(task.request.pkg.spec)
|
||||
raise
|
||||
|
||||
except (Exception, SystemExit) as exc:
|
||||
@@ -1919,6 +1922,9 @@ def _real_install(self):
|
||||
except BaseException:
|
||||
combine_phase_logs(pkg.phase_log_files, pkg.log_path)
|
||||
spack.hooks.on_phase_error(pkg, phase_name, log_file)
|
||||
|
||||
# phase error indicates install error
|
||||
spack.hooks.on_install_failure(pkg.spec)
|
||||
raise
|
||||
|
||||
# We assume loggers share echo True/False
|
||||
|
||||
@@ -41,6 +41,7 @@
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.debug
|
||||
@@ -380,6 +381,13 @@ def make_argument_parser(**kwargs):
|
||||
# stat names in groups of 7, for nice wrapping.
|
||||
stat_lines = list(zip(*(iter(stat_names),) * 7))
|
||||
|
||||
# help message for --show-cores
|
||||
show_cores_help = 'provide additional information on concretization failures\n'
|
||||
show_cores_help += 'off (default): show only the violated rule\n'
|
||||
show_cores_help += 'full: show raw unsat cores from clingo\n'
|
||||
show_cores_help += 'minimized: show subset-minimal unsat cores '
|
||||
show_cores_help += '(Warning: this may take hours for some specs)'
|
||||
|
||||
parser.add_argument(
|
||||
'-h', '--help',
|
||||
dest='help', action='store_const', const='short', default=None,
|
||||
@@ -403,6 +411,9 @@ def make_argument_parser(**kwargs):
|
||||
'-d', '--debug', action='count', default=0,
|
||||
help="write out debug messages "
|
||||
"(more d's for more verbosity: -d, -dd, -ddd, etc.)")
|
||||
parser.add_argument(
|
||||
'--show-cores', choices=["off", "full", "minimized"], default="off",
|
||||
help=show_cores_help)
|
||||
parser.add_argument(
|
||||
'--timestamp', action='store_true',
|
||||
help="Add a timestamp to tty output")
|
||||
@@ -486,6 +497,13 @@ def setup_main_options(args):
|
||||
spack.config.set('config:debug', True, scope='command_line')
|
||||
spack.util.environment.tracing_enabled = True
|
||||
|
||||
if args.show_cores != "off":
|
||||
# minimize_cores defaults to true, turn it off if we're showing full core
|
||||
# but don't want to wait to minimize it.
|
||||
spack.solver.asp.full_cores = True
|
||||
if args.show_cores == 'full':
|
||||
spack.solver.asp.minimize_cores = False
|
||||
|
||||
if args.timestamp:
|
||||
tty.set_timestamp(True)
|
||||
|
||||
|
||||
@@ -90,7 +90,9 @@ def from_json(stream, name=None):
|
||||
|
||||
def to_dict(self):
|
||||
if self._push_url is None:
|
||||
return self._fetch_url
|
||||
return syaml_dict([
|
||||
('fetch', self._fetch_url),
|
||||
('push', self._fetch_url)])
|
||||
else:
|
||||
return syaml_dict([
|
||||
('fetch', self._fetch_url),
|
||||
@@ -105,12 +107,12 @@ def from_dict(d, name=None):
|
||||
|
||||
def display(self, max_len=0):
|
||||
if self._push_url is None:
|
||||
_display_mirror_entry(max_len, self._name, self._fetch_url)
|
||||
_display_mirror_entry(max_len, self._name, self.fetch_url)
|
||||
else:
|
||||
_display_mirror_entry(
|
||||
max_len, self._name, self._fetch_url, "fetch")
|
||||
max_len, self._name, self.fetch_url, "fetch")
|
||||
_display_mirror_entry(
|
||||
max_len, self._name, self._push_url, "push")
|
||||
max_len, self._name, self.push_url, "push")
|
||||
|
||||
def __str__(self):
|
||||
name = self._name
|
||||
@@ -145,8 +147,8 @@ def name(self):
|
||||
def get_profile(self, url_type):
|
||||
if isinstance(self._fetch_url, dict):
|
||||
if url_type == "push":
|
||||
return self._push_url['profile']
|
||||
return self._fetch_url['profile']
|
||||
return self._push_url.get('profile', None)
|
||||
return self._fetch_url.get('profile', None)
|
||||
else:
|
||||
return None
|
||||
|
||||
@@ -159,8 +161,8 @@ def set_profile(self, url_type, profile):
|
||||
def get_access_pair(self, url_type):
|
||||
if isinstance(self._fetch_url, dict):
|
||||
if url_type == "push":
|
||||
return self._push_url['access_pair']
|
||||
return self._fetch_url['access_pair']
|
||||
return self._push_url.get('access_pair', None)
|
||||
return self._fetch_url.get('access_pair', None)
|
||||
else:
|
||||
return None
|
||||
|
||||
@@ -173,8 +175,8 @@ def set_access_pair(self, url_type, connection_tuple):
|
||||
def get_endpoint_url(self, url_type):
|
||||
if isinstance(self._fetch_url, dict):
|
||||
if url_type == "push":
|
||||
return self._push_url['endpoint_url']
|
||||
return self._fetch_url['endpoint_url']
|
||||
return self._push_url.get('endpoint_url', None)
|
||||
return self._fetch_url.get('endpoint_url', None)
|
||||
else:
|
||||
return None
|
||||
|
||||
@@ -187,8 +189,8 @@ def set_endpoint_url(self, url_type, url):
|
||||
def get_access_token(self, url_type):
|
||||
if isinstance(self._fetch_url, dict):
|
||||
if url_type == "push":
|
||||
return self._push_url['access_token']
|
||||
return self._fetch_url['access_token']
|
||||
return self._push_url.get('access_token', None)
|
||||
return self._fetch_url.get('access_token', None)
|
||||
else:
|
||||
return None
|
||||
|
||||
@@ -528,11 +530,16 @@ def add(name, url, scope, args={}):
|
||||
|
||||
items = [(n, u) for n, u in mirrors.items()]
|
||||
mirror_data = url
|
||||
key_values = ["s3_access_key_id", "s3_access_token", "s3_profile"]
|
||||
key_values = ["s3_access_key_id",
|
||||
"s3_access_token",
|
||||
"s3_profile",
|
||||
"s3_endpoint_url"]
|
||||
# On creation, assume connection data is set for both
|
||||
if any(value for value in key_values if value in args):
|
||||
# Check for value in each key, instead of presence of each key
|
||||
if any(vars(args)[value] for value in key_values if value in args):
|
||||
url_dict = {"url": url,
|
||||
"access_pair": (args.s3_access_key_id, args.s3_access_key_secret),
|
||||
"access_pair": (args.s3_access_key_id,
|
||||
args.s3_access_key_secret),
|
||||
"access_token": args.s3_access_token,
|
||||
"profile": args.s3_profile,
|
||||
"endpoint_url": args.s3_endpoint_url}
|
||||
@@ -644,6 +651,35 @@ def _add_single_spec(spec, mirror, mirror_stats):
|
||||
mirror_stats.error()
|
||||
|
||||
|
||||
def push_url_from_directory(output_directory):
|
||||
"""Given a directory in the local filesystem, return the URL on
|
||||
which to push binary packages.
|
||||
"""
|
||||
scheme = url_util.parse(output_directory, scheme='<missing>').scheme
|
||||
if scheme != '<missing>':
|
||||
raise ValueError('expected a local path, but got a URL instead')
|
||||
mirror_url = 'file://' + output_directory
|
||||
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
|
||||
return url_util.format(mirror.push_url)
|
||||
|
||||
|
||||
def push_url_from_mirror_name(mirror_name):
|
||||
"""Given a mirror name, return the URL on which to push binary packages."""
|
||||
mirror = spack.mirror.MirrorCollection().lookup(mirror_name)
|
||||
if mirror.name == "<unnamed>":
|
||||
raise ValueError('no mirror named "{0}"'.format(mirror_name))
|
||||
return url_util.format(mirror.push_url)
|
||||
|
||||
|
||||
def push_url_from_mirror_url(mirror_url):
|
||||
"""Given a mirror URL, return the URL on which to push binary packages."""
|
||||
scheme = url_util.parse(mirror_url, scheme='<missing>').scheme
|
||||
if scheme == '<missing>':
|
||||
raise ValueError('"{0}" is not a valid URL'.format(mirror_url))
|
||||
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
|
||||
return url_util.format(mirror.push_url)
|
||||
|
||||
|
||||
class MirrorError(spack.error.SpackError):
|
||||
"""Superclass of all mirror-creation related errors."""
|
||||
|
||||
|
||||
@@ -906,6 +906,9 @@ def write(self, overwrite=False):
|
||||
fp.set_permissions_by_spec(self.layout.filename, self.spec)
|
||||
|
||||
# Symlink defaults if needed
|
||||
self.update_module_defaults()
|
||||
|
||||
def update_module_defaults(self):
|
||||
if any(self.spec.satisfies(default) for default in self.conf.defaults):
|
||||
# This spec matches a default, it needs to be symlinked to default
|
||||
# Symlink to a tmp location first and move, so that existing
|
||||
|
||||
@@ -38,8 +38,7 @@
|
||||
cli = None
|
||||
|
||||
|
||||
def get_client(host, prefix="ms1", disable_auth=False, allow_fail=False, tags=None,
|
||||
save_local=False):
|
||||
def get_client(host, prefix="ms1", allow_fail=False, tags=None, save_local=False):
|
||||
"""
|
||||
Get a monitor client for a particular host and prefix.
|
||||
|
||||
@@ -57,8 +56,8 @@ def get_client(host, prefix="ms1", disable_auth=False, allow_fail=False, tags=No
|
||||
cli = SpackMonitorClient(host=host, prefix=prefix, allow_fail=allow_fail,
|
||||
tags=tags, save_local=save_local)
|
||||
|
||||
# If we don't disable auth, environment credentials are required
|
||||
if not disable_auth and not save_local:
|
||||
# Auth is always required unless we are saving locally
|
||||
if not save_local:
|
||||
cli.require_auth()
|
||||
|
||||
# We will exit early if the monitoring service is not running, but
|
||||
@@ -92,9 +91,6 @@ def get_monitor_group(subparser):
|
||||
monitor_group.add_argument(
|
||||
'--monitor-save-local', action='store_true', dest='monitor_save_local',
|
||||
default=False, help="save monitor results to .spack instead of server.")
|
||||
monitor_group.add_argument(
|
||||
'--monitor-no-auth', action='store_true', dest='monitor_disable_auth',
|
||||
default=False, help="the monitoring server does not require auth.")
|
||||
monitor_group.add_argument(
|
||||
'--monitor-tags', dest='monitor_tags', default=None,
|
||||
help="One or more (comma separated) tags for a build.")
|
||||
@@ -122,13 +118,16 @@ class SpackMonitorClient:
|
||||
|
||||
def __init__(self, host=None, prefix="ms1", allow_fail=False, tags=None,
|
||||
save_local=False):
|
||||
# We can control setting an arbitrary version if needed
|
||||
sv = spack.main.get_version()
|
||||
self.spack_version = os.environ.get("SPACKMON_SPACK_VERSION") or sv
|
||||
|
||||
self.host = host or "http://127.0.0.1"
|
||||
self.baseurl = "%s/%s" % (self.host, prefix.strip("/"))
|
||||
self.token = os.environ.get("SPACKMON_TOKEN")
|
||||
self.username = os.environ.get("SPACKMON_USER")
|
||||
self.headers = {}
|
||||
self.allow_fail = allow_fail
|
||||
self.spack_version = spack.main.get_version()
|
||||
self.capture_build_environment()
|
||||
self.tags = tags
|
||||
self.save_local = save_local
|
||||
@@ -204,6 +203,14 @@ def capture_build_environment(self):
|
||||
"""
|
||||
from spack.util.environment import get_host_environment_metadata
|
||||
self.build_environment = get_host_environment_metadata()
|
||||
keys = list(self.build_environment.keys())
|
||||
|
||||
# Allow to customize any of these values via the environment
|
||||
for key in keys:
|
||||
envar_name = "SPACKMON_%s" % key.upper()
|
||||
envar = os.environ.get(envar_name)
|
||||
if envar:
|
||||
self.build_environment[key] = envar
|
||||
|
||||
def require_auth(self):
|
||||
"""
|
||||
@@ -417,6 +424,37 @@ def new_configuration(self, specs):
|
||||
|
||||
return configs
|
||||
|
||||
def failed_concretization(self, specs):
|
||||
"""
|
||||
Given a list of abstract specs, tell spack monitor concretization failed.
|
||||
"""
|
||||
configs = {}
|
||||
|
||||
# There should only be one spec generally (what cases would have >1?)
|
||||
for spec in specs:
|
||||
|
||||
# update the spec to have build hash indicating that cannot be built
|
||||
meta = spec.to_dict()['spec']
|
||||
nodes = []
|
||||
for node in meta.get("nodes", []):
|
||||
for hashtype in ["build_hash", "full_hash"]:
|
||||
node[hashtype] = "FAILED_CONCRETIZATION"
|
||||
nodes.append(node)
|
||||
meta['nodes'] = nodes
|
||||
|
||||
# We can't concretize / hash
|
||||
as_dict = {"spec": meta,
|
||||
"spack_version": self.spack_version}
|
||||
|
||||
if self.save_local:
|
||||
filename = "spec-%s-%s-config.json" % (spec.name, spec.version)
|
||||
self.save(as_dict, filename)
|
||||
else:
|
||||
response = self.do_request("specs/new/", data=sjson.dump(as_dict))
|
||||
configs[spec.package.name] = response.get('data', {})
|
||||
|
||||
return configs
|
||||
|
||||
def new_build(self, spec):
|
||||
"""
|
||||
Create a new build.
|
||||
@@ -507,6 +545,11 @@ def fail_task(self, spec):
|
||||
"""
|
||||
return self.update_build(spec, status="FAILED")
|
||||
|
||||
def cancel_task(self, spec):
|
||||
"""Given a spec, mark it as cancelled.
|
||||
"""
|
||||
return self.update_build(spec, status="CANCELLED")
|
||||
|
||||
def send_analyze_metadata(self, pkg, metadata):
|
||||
"""
|
||||
Send spack analyzer metadata to the spack monitor server.
|
||||
|
||||
@@ -2,11 +2,31 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import platform as py_platform
|
||||
import re
|
||||
|
||||
from spack.version import Version
|
||||
|
||||
from ._operating_system import OperatingSystem
|
||||
|
||||
|
||||
def kernel_version():
|
||||
"""Return the kernel version as a Version object.
|
||||
Note that the kernel version is distinct from OS and/or
|
||||
distribution versions. For instance:
|
||||
>>> external.distro.id()
|
||||
'centos'
|
||||
>>> external.distro.version()
|
||||
'7'
|
||||
>>> platform.release()
|
||||
'5.10.84+'
|
||||
"""
|
||||
# Strip '+' characters just in case we're running a
|
||||
# version built from git/etc
|
||||
clean_version = re.sub(r'\+', r'', py_platform.release())
|
||||
return Version(clean_version)
|
||||
|
||||
|
||||
class LinuxDistro(OperatingSystem):
|
||||
""" This class will represent the autodetected operating system
|
||||
for a Linux System. Since there are many different flavors of
|
||||
|
||||
@@ -7,7 +7,6 @@
|
||||
import inspect
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.lang
|
||||
@@ -26,12 +25,6 @@
|
||||
def apply_patch(stage, patch_path, level=1, working_dir='.'):
|
||||
"""Apply the patch at patch_path to code in the stage.
|
||||
|
||||
Spack runs ``patch`` with ``-N`` so that it does not reject already-applied
|
||||
patches. This is useful for develop specs, so that the build does not fail
|
||||
due to repeated application of patches, and for easing requirements on patch
|
||||
specifications in packages -- packages won't stop working when patches we
|
||||
previously had to apply land in upstream.
|
||||
|
||||
Args:
|
||||
stage (spack.stage.Stage): stage with code that will be patched
|
||||
patch_path (str): filesystem location for the patch to apply
|
||||
@@ -41,31 +34,10 @@ def apply_patch(stage, patch_path, level=1, working_dir='.'):
|
||||
"""
|
||||
patch = which("patch", required=True)
|
||||
with llnl.util.filesystem.working_dir(stage.source_path):
|
||||
output = patch(
|
||||
'-N', # don't reject already-applied patches
|
||||
'-p', str(level), # patch level (directory depth)
|
||||
'-i', patch_path, # input source is the patch file
|
||||
'-d', working_dir, # patch chdir's to here before patching
|
||||
output=str,
|
||||
fail_on_error=False,
|
||||
)
|
||||
|
||||
if patch.returncode != 0:
|
||||
# `patch` returns 1 both:
|
||||
# a) when an error applying a patch, and
|
||||
# b) when -N is supplied and the patch has already been applied
|
||||
#
|
||||
# It returns > 1 if there's something more serious wrong.
|
||||
#
|
||||
# So, the best we can do is to look for return code 1, look for output
|
||||
# indicating that the patch was already applied, and ignore the error
|
||||
# if we see it. Most implementations (BSD and GNU) seem to have the
|
||||
# same messages, so we expect these checks to be reliable.
|
||||
if patch.returncode > 1 or not any(
|
||||
s in output for s in ("Skipping patch", "ignored")
|
||||
):
|
||||
sys.stderr.write(output)
|
||||
raise patch.error
|
||||
patch('-s',
|
||||
'-p', str(level),
|
||||
'-i', patch_path,
|
||||
'-d', working_dir)
|
||||
|
||||
|
||||
class Patch(object):
|
||||
|
||||
@@ -39,7 +39,7 @@
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
'$schema': 'http://json-schema.org/draft-07/schema#',
|
||||
'title': 'Spack bootstrap configuration file schema',
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
import spack.schema.spec
|
||||
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
'$schema': 'http://json-schema.org/draft-07/schema#',
|
||||
'title': 'Spack buildcache specfile schema',
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
'$schema': 'http://json-schema.org/draft-07/schema#',
|
||||
'title': 'Spack cdash configuration file schema',
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
|
||||
@@ -84,7 +84,7 @@
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
'$schema': 'http://json-schema.org/draft-07/schema#',
|
||||
'title': 'Spack compiler configuration file schema',
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
|
||||
@@ -110,7 +110,7 @@
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
'$schema': 'http://json-schema.org/draft-07/schema#',
|
||||
'title': 'Spack core configuration file schema',
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
'$schema': 'http://json-schema.org/draft-07/schema#',
|
||||
'title': 'Spack spec schema',
|
||||
'type': 'object',
|
||||
'required': ['database'],
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user