Merge branch 'develop' into psakiev/f/git-version-refactor
This commit is contained in:
commit
780b86f3f1
10
lib/spack/external/_vendoring/altgraph/Dot.py
vendored
10
lib/spack/external/_vendoring/altgraph/Dot.py
vendored
@ -1,8 +1,8 @@
|
||||
"""
|
||||
altgraph.Dot - Interface to the dot language
|
||||
_vendoring.altgraph.Dot - Interface to the dot language
|
||||
============================================
|
||||
|
||||
The :py:mod:`~altgraph.Dot` module provides a simple interface to the
|
||||
The :py:mod:`~_vendoring.altgraph.Dot` module provides a simple interface to the
|
||||
file format used in the
|
||||
`graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
|
||||
program. The module is intended to offload the most tedious part of the process
|
||||
@ -20,7 +20,7 @@
|
||||
|
||||
Here is a typical usage::
|
||||
|
||||
from altgraph import Graph, Dot
|
||||
from _vendoring.altgraph import Graph, Dot
|
||||
|
||||
# create a graph
|
||||
edges = [ (1,2), (1,3), (3,4), (3,5), (4,5), (5,4) ]
|
||||
@ -77,7 +77,7 @@
|
||||
|
||||
.. note::
|
||||
|
||||
dotty (invoked via :py:func:`~altgraph.Dot.display`) may not be able to
|
||||
dotty (invoked via :py:func:`~_vendoring.altgraph.Dot.display`) may not be able to
|
||||
display all graphics styles. To verify the output save it to an image file
|
||||
and look at it that way.
|
||||
|
||||
@ -111,7 +111,7 @@
|
||||
import os
|
||||
import warnings
|
||||
|
||||
from altgraph import GraphError
|
||||
from _vendoring.altgraph import GraphError
|
||||
|
||||
|
||||
class Dot(object):
|
||||
|
@ -1,5 +1,5 @@
|
||||
"""
|
||||
altgraph.Graph - Base Graph class
|
||||
_vendoring.altgraph.Graph - Base Graph class
|
||||
=================================
|
||||
|
||||
..
|
||||
@ -15,7 +15,7 @@
|
||||
|
||||
from collections import deque
|
||||
|
||||
from altgraph import GraphError
|
||||
from _vendoring.altgraph import GraphError
|
||||
|
||||
|
||||
class Graph(object):
|
||||
|
@ -1,8 +1,8 @@
|
||||
"""
|
||||
altgraph.GraphAlgo - Graph algorithms
|
||||
_vendoring.altgraph.GraphAlgo - Graph algorithms
|
||||
=====================================
|
||||
"""
|
||||
from altgraph import GraphError
|
||||
from _vendoring.altgraph import GraphError
|
||||
|
||||
|
||||
def dijkstra(graph, start, end=None):
|
||||
@ -25,7 +25,7 @@ def dijkstra(graph, start, end=None):
|
||||
and will raise an exception if it discovers that a negative edge has
|
||||
caused it to make a mistake.
|
||||
|
||||
Adapted to altgraph by Istvan Albert, Pennsylvania State University -
|
||||
Adapted to _vendoring.altgraph by Istvan Albert, Pennsylvania State University -
|
||||
June, 9 2004
|
||||
"""
|
||||
D = {} # dictionary of final distances
|
||||
|
@ -1,5 +1,5 @@
|
||||
"""
|
||||
altgraph.GraphStat - Functions providing various graph statistics
|
||||
_vendoring.altgraph.GraphStat - Functions providing various graph statistics
|
||||
=================================================================
|
||||
"""
|
||||
|
||||
|
@ -1,17 +1,17 @@
|
||||
"""
|
||||
altgraph.GraphUtil - Utility classes and functions
|
||||
_vendoring.altgraph.GraphUtil - Utility classes and functions
|
||||
==================================================
|
||||
"""
|
||||
|
||||
import random
|
||||
from collections import deque
|
||||
|
||||
from altgraph import Graph, GraphError
|
||||
from _vendoring.altgraph import Graph, GraphError
|
||||
|
||||
|
||||
def generate_random_graph(node_num, edge_num, self_loops=False, multi_edges=False):
|
||||
"""
|
||||
Generates and returns a :py:class:`~altgraph.Graph.Graph` instance with
|
||||
Generates and returns a :py:class:`~_vendoring.altgraph.Graph.Graph` instance with
|
||||
*node_num* nodes randomly connected by *edge_num* edges.
|
||||
"""
|
||||
g = Graph.Graph()
|
||||
@ -52,7 +52,7 @@ def generate_random_graph(node_num, edge_num, self_loops=False, multi_edges=Fals
|
||||
|
||||
def generate_scale_free_graph(steps, growth_num, self_loops=False, multi_edges=False):
|
||||
"""
|
||||
Generates and returns a :py:class:`~altgraph.Graph.Graph` instance that
|
||||
Generates and returns a :py:class:`~_vendoring.altgraph.Graph.Graph` instance that
|
||||
will have *steps* \\* *growth_num* nodes and a scale free (powerlaw)
|
||||
connectivity. Starting with a fully connected graph with *growth_num*
|
||||
nodes at every step *growth_num* nodes are added to the graph and are
|
||||
|
@ -1,14 +1,14 @@
|
||||
"""
|
||||
altgraph.ObjectGraph - Graph of objects with an identifier
|
||||
_vendoring.altgraph.ObjectGraph - Graph of objects with an identifier
|
||||
==========================================================
|
||||
|
||||
A graph of objects that have a "graphident" attribute.
|
||||
graphident is the key for the object in the graph
|
||||
"""
|
||||
|
||||
from altgraph import GraphError
|
||||
from altgraph.Graph import Graph
|
||||
from altgraph.GraphUtil import filter_stack
|
||||
from _vendoring.altgraph import GraphError
|
||||
from _vendoring.altgraph.Graph import Graph
|
||||
from _vendoring.altgraph.GraphUtil import filter_stack
|
||||
|
||||
|
||||
class ObjectGraph(object):
|
||||
|
@ -1,18 +1,18 @@
|
||||
"""
|
||||
altgraph - a python graph library
|
||||
_vendoring.altgraph - a python graph library
|
||||
=================================
|
||||
|
||||
altgraph is a fork of `graphlib <http://pygraphlib.sourceforge.net>`_ tailored
|
||||
_vendoring.altgraph is a fork of `graphlib <http://pygraphlib.sourceforge.net>`_ tailored
|
||||
to use newer Python 2.3+ features, including additional support used by the
|
||||
py2app suite (modulegraph and macholib, specifically).
|
||||
py2app suite (modulegraph and _vendoring.macholib, specifically).
|
||||
|
||||
altgraph is a python based graph (network) representation and manipulation
|
||||
_vendoring.altgraph is a python based graph (network) representation and manipulation
|
||||
package. It has started out as an extension to the
|
||||
`graph_lib module
|
||||
<http://www.ece.arizona.edu/~denny/python_nest/graph_lib_1.0.1.html>`_
|
||||
written by Nathan Denny it has been significantly optimized and expanded.
|
||||
|
||||
The :class:`altgraph.Graph.Graph` class is loosely modeled after the
|
||||
The :class:`_vendoring.altgraph.Graph.Graph` class is loosely modeled after the
|
||||
`LEDA <http://www.algorithmic-solutions.com/enleda.htm>`_
|
||||
(Library of Efficient Datatypes) representation. The library
|
||||
includes methods for constructing graphs, BFS and DFS traversals,
|
||||
@ -22,22 +22,22 @@
|
||||
|
||||
The package contains the following modules:
|
||||
|
||||
- the :py:mod:`altgraph.Graph` module contains the
|
||||
:class:`~altgraph.Graph.Graph` class that stores the graph data
|
||||
- the :py:mod:`_vendoring.altgraph.Graph` module contains the
|
||||
:class:`~_vendoring.altgraph.Graph.Graph` class that stores the graph data
|
||||
|
||||
- the :py:mod:`altgraph.GraphAlgo` module implements graph algorithms
|
||||
operating on graphs (:py:class:`~altgraph.Graph.Graph`} instances)
|
||||
- the :py:mod:`_vendoring.altgraph.GraphAlgo` module implements graph algorithms
|
||||
operating on graphs (:py:class:`~_vendoring.altgraph.Graph.Graph`} instances)
|
||||
|
||||
- the :py:mod:`altgraph.GraphStat` module contains functions for
|
||||
- the :py:mod:`_vendoring.altgraph.GraphStat` module contains functions for
|
||||
computing statistical measures on graphs
|
||||
|
||||
- the :py:mod:`altgraph.GraphUtil` module contains functions for
|
||||
- the :py:mod:`_vendoring.altgraph.GraphUtil` module contains functions for
|
||||
generating, reading and saving graphs
|
||||
|
||||
- the :py:mod:`altgraph.Dot` module contains functions for displaying
|
||||
- the :py:mod:`_vendoring.altgraph.Dot` module contains functions for displaying
|
||||
graphs via `graphviz <http://www.research.att.com/sw/tools/graphviz/>`_
|
||||
|
||||
- the :py:mod:`altgraph.ObjectGraph` module implements a graph of
|
||||
- the :py:mod:`_vendoring.altgraph.ObjectGraph` module implements a graph of
|
||||
objects with a unique identifier
|
||||
|
||||
Installation
|
||||
@ -62,7 +62,7 @@
|
||||
Lets assume that we want to analyze the graph below (links to the full picture)
|
||||
GRAPH_IMG. Our script then might look the following way::
|
||||
|
||||
from altgraph import Graph, GraphAlgo, Dot
|
||||
from _vendoring.altgraph import Graph, GraphAlgo, Dot
|
||||
|
||||
# these are the edges
|
||||
edges = [ (1,2), (2,4), (1,3), (2,4), (3,4), (4,5), (6,5),
|
||||
@ -141,7 +141,7 @@
|
||||
"""
|
||||
import pkg_resources
|
||||
|
||||
__version__ = pkg_resources.require("altgraph")[0].version
|
||||
__version__ = pkg_resources.require("_vendoring.altgraph")[0].version
|
||||
|
||||
|
||||
class GraphError(ValueError):
|
||||
|
2
lib/spack/external/_vendoring/attr/_make.py
vendored
2
lib/spack/external/_vendoring/attr/_make.py
vendored
@ -38,7 +38,7 @@
|
||||
"typing.ClassVar",
|
||||
"t.ClassVar",
|
||||
"ClassVar",
|
||||
"typing_extensions.ClassVar",
|
||||
"_vendoring.typing_extensions.ClassVar",
|
||||
)
|
||||
# we don't use a double-underscore prefix because that triggers
|
||||
# name mangling when trying to create a slot for the field
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr import (
|
||||
from _vendoring.attr import (
|
||||
NOTHING,
|
||||
Attribute,
|
||||
Factory,
|
||||
@ -28,7 +28,7 @@
|
||||
resolve_types,
|
||||
validate,
|
||||
)
|
||||
from attr._next_gen import asdict, astuple
|
||||
from _vendoring.attr._next_gen import asdict, astuple
|
||||
|
||||
from . import converters, exceptions, filters, setters, validators
|
||||
|
||||
|
@ -1,3 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.converters import * # noqa
|
||||
from _vendoring.attr.converters import * # noqa
|
||||
|
@ -1,3 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.exceptions import * # noqa
|
||||
from _vendoring.attr.exceptions import * # noqa
|
||||
|
@ -1,3 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.filters import * # noqa
|
||||
from _vendoring.attr.filters import * # noqa
|
||||
|
@ -1,3 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.setters import * # noqa
|
||||
from _vendoring.attr.setters import * # noqa
|
||||
|
@ -1,3 +1,3 @@
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from attr.validators import * # noqa
|
||||
from _vendoring.attr.validators import * # noqa
|
||||
|
12
lib/spack/external/_vendoring/jinja2/bccache.py
vendored
12
lib/spack/external/_vendoring/jinja2/bccache.py
vendored
@ -19,7 +19,7 @@
|
||||
from types import CodeType
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
import _vendoring.typing_extensions as te
|
||||
from .environment import Environment
|
||||
|
||||
class _MemcachedClient(te.Protocol):
|
||||
@ -101,7 +101,7 @@ def bytecode_to_string(self) -> bytes:
|
||||
class BytecodeCache:
|
||||
"""To implement your own bytecode cache you have to subclass this class
|
||||
and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of
|
||||
these methods are passed a :class:`~jinja2.bccache.Bucket`.
|
||||
these methods are passed a :class:`~_vendoring.jinja2.bccache.Bucket`.
|
||||
|
||||
A very basic bytecode cache that saves the bytecode on the file system::
|
||||
|
||||
@ -193,7 +193,7 @@ class FileSystemBytecodeCache(BytecodeCache):
|
||||
is created for the user in the system temp directory.
|
||||
|
||||
The pattern can be used to have multiple separate caches operate on the
|
||||
same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s``
|
||||
same directory. The default pattern is ``'___vendoring.jinja2_%s.cache'``. ``%s``
|
||||
is replaced with the cache key.
|
||||
|
||||
>>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
|
||||
@ -202,7 +202,7 @@ class FileSystemBytecodeCache(BytecodeCache):
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache"
|
||||
self, directory: t.Optional[str] = None, pattern: str = "___vendoring.jinja2_%s.cache"
|
||||
) -> None:
|
||||
if directory is None:
|
||||
directory = self._get_default_cache_dir()
|
||||
@ -225,7 +225,7 @@ def _unsafe_dir() -> "te.NoReturn":
|
||||
if not hasattr(os, "getuid"):
|
||||
_unsafe_dir()
|
||||
|
||||
dirname = f"_jinja2-cache-{os.getuid()}"
|
||||
dirname = f"__vendoring.jinja2-cache-{os.getuid()}"
|
||||
actual_dir = os.path.join(tmpdir, dirname)
|
||||
|
||||
try:
|
||||
@ -332,7 +332,7 @@ class MemcachedBytecodeCache(BytecodeCache):
|
||||
def __init__(
|
||||
self,
|
||||
client: "_MemcachedClient",
|
||||
prefix: str = "jinja2/bytecode/",
|
||||
prefix: str = "_vendoring.jinja2/bytecode/",
|
||||
timeout: t.Optional[int] = None,
|
||||
ignore_memcache_errors: bool = True,
|
||||
):
|
||||
|
@ -6,8 +6,8 @@
|
||||
from itertools import chain
|
||||
from keyword import iskeyword as is_python_keyword
|
||||
|
||||
from markupsafe import escape
|
||||
from markupsafe import Markup
|
||||
from _vendoring.markupsafe import escape
|
||||
from _vendoring.markupsafe import Markup
|
||||
|
||||
from . import nodes
|
||||
from .exceptions import TemplateAssertionError
|
||||
@ -23,7 +23,7 @@
|
||||
from .visitor import NodeVisitor
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
import _vendoring.typing_extensions as te
|
||||
from .environment import Environment
|
||||
|
||||
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
|
||||
@ -836,7 +836,7 @@ def visit_Template(
|
||||
exported_names = sorted(exported)
|
||||
|
||||
self.writeline("from __future__ import generator_stop") # Python < 3.7
|
||||
self.writeline("from jinja2.runtime import " + ", ".join(exported_names))
|
||||
self.writeline("from _vendoring.jinja2.runtime import " + ", ".join(exported_names))
|
||||
|
||||
# if we want a deferred initialization we cannot move the
|
||||
# environment into a local name
|
||||
|
@ -8,7 +8,7 @@
|
||||
from .utils import Namespace
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
import _vendoring.typing_extensions as te
|
||||
|
||||
# defaults for the parser / lexer
|
||||
BLOCK_START_STRING = "{%"
|
||||
|
@ -12,7 +12,7 @@
|
||||
from functools import reduce
|
||||
from types import CodeType
|
||||
|
||||
from markupsafe import Markup
|
||||
from _vendoring.markupsafe import Markup
|
||||
|
||||
from . import nodes
|
||||
from .compiler import CodeGenerator
|
||||
@ -55,7 +55,7 @@
|
||||
from .utils import missing
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
import _vendoring.typing_extensions as te
|
||||
from .bccache import BytecodeCache
|
||||
from .ext import Extension
|
||||
from .loaders import BaseLoader
|
||||
@ -126,7 +126,7 @@ def _environment_config_check(environment: "Environment") -> "Environment":
|
||||
"""Perform a sanity check on the environment."""
|
||||
assert issubclass(
|
||||
environment.undefined, Undefined
|
||||
), "'undefined' must be a subclass of 'jinja2.Undefined'."
|
||||
), "'undefined' must be a subclass of '_vendoring.jinja2.Undefined'."
|
||||
assert (
|
||||
environment.block_start_string
|
||||
!= environment.variable_start_string
|
||||
@ -221,7 +221,7 @@ class Environment:
|
||||
`autoescape`
|
||||
If set to ``True`` the XML/HTML autoescaping feature is enabled by
|
||||
default. For more details about autoescaping see
|
||||
:class:`~markupsafe.Markup`. As of Jinja 2.4 this can also
|
||||
:class:`~_vendoring.markupsafe.Markup`. As of Jinja 2.4 this can also
|
||||
be a callable that is passed the template name and has to
|
||||
return ``True`` or ``False`` depending on autoescape should be
|
||||
enabled by default.
|
||||
@ -264,7 +264,7 @@ class Environment:
|
||||
|
||||
#: if this environment is sandboxed. Modifying this variable won't make
|
||||
#: the environment sandboxed though. For a real sandboxed environment
|
||||
#: have a look at jinja2.sandbox. This flag alone controls the code
|
||||
#: have a look at _vendoring.jinja2.sandbox. This flag alone controls the code
|
||||
#: generation by the compiler.
|
||||
sandboxed = False
|
||||
|
||||
@ -279,11 +279,11 @@ class Environment:
|
||||
shared = False
|
||||
|
||||
#: the class that is used for code generation. See
|
||||
#: :class:`~jinja2.compiler.CodeGenerator` for more information.
|
||||
#: :class:`~_vendoring.jinja2.compiler.CodeGenerator` for more information.
|
||||
code_generator_class: t.Type["CodeGenerator"] = CodeGenerator
|
||||
|
||||
#: the context class that is used for templates. See
|
||||
#: :class:`~jinja2.runtime.Context` for more information.
|
||||
#: :class:`~_vendoring.jinja2.runtime.Context` for more information.
|
||||
context_class: t.Type[Context] = Context
|
||||
|
||||
template_class: t.Type["Template"]
|
||||
@ -650,7 +650,7 @@ def _tokenize(
|
||||
state: t.Optional[str] = None,
|
||||
) -> TokenStream:
|
||||
"""Called by the parser to do the preprocessing and filtering
|
||||
for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`.
|
||||
for all the extensions. Returns a :class:`~_vendoring.jinja2.lexer.TokenStream`.
|
||||
"""
|
||||
source = self.preprocess(source, name, filename)
|
||||
stream = self.lexer.tokenize(source, name, filename, state)
|
||||
@ -1547,7 +1547,7 @@ def __repr__(self) -> str:
|
||||
|
||||
|
||||
class TemplateExpression:
|
||||
"""The :meth:`jinja2.Environment.compile_expression` method returns an
|
||||
"""The :meth:`_vendoring.jinja2.Environment.compile_expression` method returns an
|
||||
instance of this object. It encapsulates the expression-like access
|
||||
to the template with an expression it wraps.
|
||||
"""
|
||||
|
18
lib/spack/external/_vendoring/jinja2/ext.py
vendored
18
lib/spack/external/_vendoring/jinja2/ext.py
vendored
@ -4,7 +4,7 @@
|
||||
import typing as t
|
||||
import warnings
|
||||
|
||||
from markupsafe import Markup
|
||||
from _vendoring.markupsafe import Markup
|
||||
|
||||
from . import defaults
|
||||
from . import nodes
|
||||
@ -18,7 +18,7 @@
|
||||
from .utils import pass_context
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
import _vendoring.typing_extensions as te
|
||||
from .lexer import Token
|
||||
from .lexer import TokenStream
|
||||
from .parser import Parser
|
||||
@ -108,10 +108,10 @@ def preprocess(
|
||||
def filter_stream(
|
||||
self, stream: "TokenStream"
|
||||
) -> t.Union["TokenStream", t.Iterable["Token"]]:
|
||||
"""It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
|
||||
"""It's passed a :class:`~_vendoring.jinja2.lexer.TokenStream` that can be used
|
||||
to filter tokens returned. This method has to return an iterable of
|
||||
:class:`~jinja2.lexer.Token`\\s, but it doesn't have to return a
|
||||
:class:`~jinja2.lexer.TokenStream`.
|
||||
:class:`~_vendoring.jinja2.lexer.Token`\\s, but it doesn't have to return a
|
||||
:class:`~_vendoring.jinja2.lexer.TokenStream`.
|
||||
"""
|
||||
return stream
|
||||
|
||||
@ -145,7 +145,7 @@ def call_method(
|
||||
lineno: t.Optional[int] = None,
|
||||
) -> nodes.Call:
|
||||
"""Call a method of the extension. This is a shortcut for
|
||||
:meth:`attr` + :class:`jinja2.nodes.Call`.
|
||||
:meth:`attr` + :class:`_vendoring.jinja2.nodes.Call`.
|
||||
"""
|
||||
if args is None:
|
||||
args = []
|
||||
@ -629,9 +629,9 @@ class DebugExtension(Extension):
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
{'context': {'cycler': <class 'jinja2.utils.Cycler'>,
|
||||
{'context': {'cycler': <class '_vendoring.jinja2.utils.Cycler'>,
|
||||
...,
|
||||
'namespace': <class 'jinja2.utils.Namespace'>},
|
||||
'namespace': <class '_vendoring.jinja2.utils.Namespace'>},
|
||||
'filters': ['abs', 'attr', 'batch', 'capitalize', 'center', 'count', 'd',
|
||||
..., 'urlencode', 'urlize', 'wordcount', 'wordwrap', 'xmlattr'],
|
||||
'tests': ['!=', '<', '<=', '==', '>', '>=', 'callable', 'defined',
|
||||
@ -679,7 +679,7 @@ def extract_from_ast(
|
||||
|
||||
This example explains the behavior:
|
||||
|
||||
>>> from jinja2 import Environment
|
||||
>>> from _vendoring.jinja2 import Environment
|
||||
>>> env = Environment()
|
||||
>>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}')
|
||||
>>> list(extract_from_ast(node))
|
||||
|
20
lib/spack/external/_vendoring/jinja2/filters.py
vendored
20
lib/spack/external/_vendoring/jinja2/filters.py
vendored
@ -9,9 +9,9 @@
|
||||
from itertools import chain
|
||||
from itertools import groupby
|
||||
|
||||
from markupsafe import escape
|
||||
from markupsafe import Markup
|
||||
from markupsafe import soft_str
|
||||
from _vendoring.markupsafe import escape
|
||||
from _vendoring.markupsafe import Markup
|
||||
from _vendoring.markupsafe import soft_str
|
||||
|
||||
from .async_utils import async_variant
|
||||
from .async_utils import auto_aiter
|
||||
@ -28,7 +28,7 @@
|
||||
from .utils import urlize
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
import _vendoring.typing_extensions as te
|
||||
from .environment import Environment
|
||||
from .nodes import EvalContext
|
||||
from .runtime import Context
|
||||
@ -48,7 +48,7 @@ def contextfilter(f: F) -> F:
|
||||
"""Pass the context as the first argument to the decorated function.
|
||||
|
||||
.. deprecated:: 3.0
|
||||
Will be removed in Jinja 3.1. Use :func:`~jinja2.pass_context`
|
||||
Will be removed in Jinja 3.1. Use :func:`~_vendoring.jinja2.pass_context`
|
||||
instead.
|
||||
"""
|
||||
warnings.warn(
|
||||
@ -66,7 +66,7 @@ def evalcontextfilter(f: F) -> F:
|
||||
|
||||
.. deprecated:: 3.0
|
||||
Will be removed in Jinja 3.1. Use
|
||||
:func:`~jinja2.pass_eval_context` instead.
|
||||
:func:`~_vendoring.jinja2.pass_eval_context` instead.
|
||||
|
||||
.. versionadded:: 2.4
|
||||
"""
|
||||
@ -85,7 +85,7 @@ def environmentfilter(f: F) -> F:
|
||||
|
||||
.. deprecated:: 3.0
|
||||
Will be removed in Jinja 3.1. Use
|
||||
:func:`~jinja2.pass_environment` instead.
|
||||
:func:`~_vendoring.jinja2.pass_environment` instead.
|
||||
"""
|
||||
warnings.warn(
|
||||
"'environmentfilter' is renamed to 'pass_environment', the old"
|
||||
@ -547,10 +547,10 @@ def do_default(
|
||||
{{ ''|default('the string was empty', true) }}
|
||||
|
||||
.. versionchanged:: 2.11
|
||||
It's now possible to configure the :class:`~jinja2.Environment` with
|
||||
:class:`~jinja2.ChainableUndefined` to make the `default` filter work
|
||||
It's now possible to configure the :class:`~_vendoring.jinja2.Environment` with
|
||||
:class:`~_vendoring.jinja2.ChainableUndefined` to make the `default` filter work
|
||||
on nested elements and attributes that may contain undefined values
|
||||
in the chain without getting an :exc:`~jinja2.UndefinedError`.
|
||||
in the chain without getting an :exc:`~_vendoring.jinja2.UndefinedError`.
|
||||
"""
|
||||
if isinstance(value, Undefined) or (boolean and not value):
|
||||
return default_value
|
||||
|
@ -14,7 +14,7 @@
|
||||
from .utils import LRUCache
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
import _vendoring.typing_extensions as te
|
||||
from .environment import Environment
|
||||
|
||||
# cache for the lexers. Exists in order to be able to have multiple
|
||||
@ -400,7 +400,7 @@ def close(self) -> None:
|
||||
|
||||
def expect(self, expr: str) -> Token:
|
||||
"""Expect a given token type and return it. This accepts the same
|
||||
argument as :meth:`jinja2.lexer.Token.test`.
|
||||
argument as :meth:`_vendoring.jinja2.lexer.Token.test`.
|
||||
"""
|
||||
if not self.current.test(expr):
|
||||
expr = describe_token_expr(expr)
|
||||
|
@ -47,7 +47,7 @@ class BaseLoader:
|
||||
A very basic example for a loader that looks up templates on the file
|
||||
system could look like this::
|
||||
|
||||
from jinja2 import BaseLoader, TemplateNotFound
|
||||
from _vendoring.jinja2 import BaseLoader, TemplateNotFound
|
||||
from os.path import join, exists, getmtime
|
||||
|
||||
class MyLoader(BaseLoader):
|
||||
@ -594,7 +594,7 @@ class ModuleLoader(BaseLoader):
|
||||
def __init__(
|
||||
self, path: t.Union[str, os.PathLike, t.Sequence[t.Union[str, os.PathLike]]]
|
||||
) -> None:
|
||||
package_name = f"_jinja2_module_templates_{id(self):x}"
|
||||
package_name = f"__vendoring.jinja2_module_templates_{id(self):x}"
|
||||
|
||||
# create a fake module that looks for the templates in the
|
||||
# path given.
|
||||
|
4
lib/spack/external/_vendoring/jinja2/meta.py
vendored
4
lib/spack/external/_vendoring/jinja2/meta.py
vendored
@ -36,7 +36,7 @@ def find_undeclared_variables(ast: nodes.Template) -> t.Set[str]:
|
||||
variables will be used depending on the path the execution takes at
|
||||
runtime, all variables are returned.
|
||||
|
||||
>>> from jinja2 import Environment, meta
|
||||
>>> from _vendoring.jinja2 import Environment, meta
|
||||
>>> env = Environment()
|
||||
>>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}')
|
||||
>>> meta.find_undeclared_variables(ast) == {'bar'}
|
||||
@ -64,7 +64,7 @@ def find_referenced_templates(ast: nodes.Template) -> t.Iterator[t.Optional[str]
|
||||
imports. If dynamic inheritance or inclusion is used, `None` will be
|
||||
yielded.
|
||||
|
||||
>>> from jinja2 import Environment, meta
|
||||
>>> from _vendoring.jinja2 import Environment, meta
|
||||
>>> env = Environment()
|
||||
>>> ast = env.parse('{% extends "layout.html" %}{% include helper %}')
|
||||
>>> list(meta.find_referenced_templates(ast))
|
||||
|
14
lib/spack/external/_vendoring/jinja2/nodes.py
vendored
14
lib/spack/external/_vendoring/jinja2/nodes.py
vendored
@ -7,12 +7,12 @@
|
||||
import typing as t
|
||||
from collections import deque
|
||||
|
||||
from markupsafe import Markup
|
||||
from _vendoring.markupsafe import Markup
|
||||
|
||||
from .utils import _PassArg
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
import _vendoring.typing_extensions as te
|
||||
from .environment import Environment
|
||||
|
||||
_NodeBound = t.TypeVar("_NodeBound", bound="Node")
|
||||
@ -1041,7 +1041,7 @@ class ExtensionAttribute(Expr):
|
||||
The identifier is the identifier of the :class:`Extension`.
|
||||
|
||||
This node is usually constructed by calling the
|
||||
:meth:`~jinja2.ext.Extension.attr` method on an extension.
|
||||
:meth:`~_vendoring.jinja2.ext.Extension.attr` method on an extension.
|
||||
"""
|
||||
|
||||
fields = ("identifier", "name")
|
||||
@ -1063,7 +1063,7 @@ class ImportedName(Expr):
|
||||
class InternalName(Expr):
|
||||
"""An internal name in the compiler. You cannot create these nodes
|
||||
yourself but the parser provides a
|
||||
:meth:`~jinja2.parser.Parser.free_identifier` method that creates
|
||||
:meth:`~_vendoring.jinja2.parser.Parser.free_identifier` method that creates
|
||||
a new identifier for you. This identifier is not available from the
|
||||
template and is not treated specially by the compiler.
|
||||
"""
|
||||
@ -1114,7 +1114,7 @@ def as_const(
|
||||
class ContextReference(Expr):
|
||||
"""Returns the current template context. It can be used like a
|
||||
:class:`Name` node, with a ``'load'`` ctx and will return the
|
||||
current :class:`~jinja2.runtime.Context` object.
|
||||
current :class:`~_vendoring.jinja2.runtime.Context` object.
|
||||
|
||||
Here an example that assigns the current template name to a
|
||||
variable named `foo`::
|
||||
@ -1123,7 +1123,7 @@ class ContextReference(Expr):
|
||||
Getattr(ContextReference(), 'name'))
|
||||
|
||||
This is basically equivalent to using the
|
||||
:func:`~jinja2.pass_context` decorator when using the high-level
|
||||
:func:`~_vendoring.jinja2.pass_context` decorator when using the high-level
|
||||
API, which causes a reference to the context to be passed as the
|
||||
first argument to a function.
|
||||
"""
|
||||
@ -1188,7 +1188,7 @@ class EvalContextModifier(Stmt):
|
||||
class ScopedEvalContextModifier(EvalContextModifier):
|
||||
"""Modifies the eval context and reverts it later. Works exactly like
|
||||
:class:`EvalContextModifier` but will only modify the
|
||||
:class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
|
||||
:class:`~_vendoring.jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
|
||||
"""
|
||||
|
||||
fields = ("body",)
|
||||
|
@ -9,7 +9,7 @@
|
||||
from .lexer import describe_token_expr
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
import _vendoring.typing_extensions as te
|
||||
from .environment import Environment
|
||||
|
||||
_ImportInclude = t.TypeVar("_ImportInclude", nodes.Import, nodes.Include)
|
||||
@ -156,7 +156,7 @@ def is_tuple_end(
|
||||
return False
|
||||
|
||||
def free_identifier(self, lineno: t.Optional[int] = None) -> nodes.InternalName:
|
||||
"""Return a new free identifier as :class:`~jinja2.nodes.InternalName`."""
|
||||
"""Return a new free identifier as :class:`~_vendoring.jinja2.nodes.InternalName`."""
|
||||
self._last_identifier += 1
|
||||
rv = object.__new__(nodes.InternalName)
|
||||
nodes.Node.__init__(rv, f"fi{self._last_identifier}", lineno=lineno)
|
||||
@ -687,7 +687,7 @@ def parse_tuple(
|
||||
explicit_parentheses: bool = False,
|
||||
) -> t.Union[nodes.Tuple, nodes.Expr]:
|
||||
"""Works like `parse_expression` but if multiple expressions are
|
||||
delimited by a comma a :class:`~jinja2.nodes.Tuple` node is created.
|
||||
delimited by a comma a :class:`~_vendoring.jinja2.nodes.Tuple` node is created.
|
||||
This method could also return a regular expression instead of a tuple
|
||||
if no commas where found.
|
||||
|
||||
|
20
lib/spack/external/_vendoring/jinja2/runtime.py
vendored
20
lib/spack/external/_vendoring/jinja2/runtime.py
vendored
@ -5,9 +5,9 @@
|
||||
from collections import abc
|
||||
from itertools import chain
|
||||
|
||||
from markupsafe import escape # noqa: F401
|
||||
from markupsafe import Markup
|
||||
from markupsafe import soft_str
|
||||
from _vendoring.markupsafe import escape # noqa: F401
|
||||
from _vendoring.markupsafe import Markup
|
||||
from _vendoring.markupsafe import soft_str
|
||||
|
||||
from .async_utils import auto_aiter
|
||||
from .async_utils import auto_await # noqa: F401
|
||||
@ -28,7 +28,7 @@
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import logging
|
||||
import typing_extensions as te
|
||||
import _vendoring.typing_extensions as te
|
||||
from .environment import Environment
|
||||
|
||||
class LoopRenderFunc(te.Protocol):
|
||||
@ -849,7 +849,7 @@ class Undefined:
|
||||
>>> foo + 42
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
jinja2.exceptions.UndefinedError: 'foo' is undefined
|
||||
_vendoring.jinja2.exceptions.UndefinedError: 'foo' is undefined
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
@ -1020,7 +1020,7 @@ class ChainableUndefined(Undefined):
|
||||
>>> foo.bar['baz'] + 42
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
jinja2.exceptions.UndefinedError: 'foo' is undefined
|
||||
_vendoring.jinja2.exceptions.UndefinedError: 'foo' is undefined
|
||||
|
||||
.. versionadded:: 2.11.0
|
||||
"""
|
||||
@ -1047,7 +1047,7 @@ class DebugUndefined(Undefined):
|
||||
>>> foo + 42
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
jinja2.exceptions.UndefinedError: 'foo' is undefined
|
||||
_vendoring.jinja2.exceptions.UndefinedError: 'foo' is undefined
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
@ -1077,15 +1077,15 @@ class StrictUndefined(Undefined):
|
||||
>>> str(foo)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
jinja2.exceptions.UndefinedError: 'foo' is undefined
|
||||
_vendoring.jinja2.exceptions.UndefinedError: 'foo' is undefined
|
||||
>>> not foo
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
jinja2.exceptions.UndefinedError: 'foo' is undefined
|
||||
_vendoring.jinja2.exceptions.UndefinedError: 'foo' is undefined
|
||||
>>> foo + 42
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
jinja2.exceptions.UndefinedError: 'foo' is undefined
|
||||
_vendoring.jinja2.exceptions.UndefinedError: 'foo' is undefined
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
@ -9,8 +9,8 @@
|
||||
from collections import deque
|
||||
from string import Formatter
|
||||
|
||||
from markupsafe import EscapeFormatter
|
||||
from markupsafe import Markup
|
||||
from _vendoring.markupsafe import EscapeFormatter
|
||||
from _vendoring.markupsafe import Markup
|
||||
|
||||
from .environment import Environment
|
||||
from .exceptions import SecurityError
|
||||
@ -128,7 +128,7 @@ def is_internal_attribute(obj: t.Any, attr: str) -> bool:
|
||||
python objects. This is useful if the environment method
|
||||
:meth:`~SandboxedEnvironment.is_safe_attribute` is overridden.
|
||||
|
||||
>>> from jinja2.sandbox import is_internal_attribute
|
||||
>>> from _vendoring.jinja2.sandbox import is_internal_attribute
|
||||
>>> is_internal_attribute(str, "mro")
|
||||
True
|
||||
>>> is_internal_attribute(str, "upper")
|
||||
|
48
lib/spack/external/_vendoring/jinja2/utils.py
vendored
48
lib/spack/external/_vendoring/jinja2/utils.py
vendored
@ -12,10 +12,10 @@
|
||||
from types import CodeType
|
||||
from urllib.parse import quote_from_bytes
|
||||
|
||||
import markupsafe
|
||||
import _vendoring.markupsafe
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
import _vendoring.typing_extensions as te
|
||||
|
||||
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
|
||||
|
||||
@ -28,7 +28,7 @@
|
||||
|
||||
|
||||
def pass_context(f: F) -> F:
|
||||
"""Pass the :class:`~jinja2.runtime.Context` as the first argument
|
||||
"""Pass the :class:`~_vendoring.jinja2.runtime.Context` as the first argument
|
||||
to the decorated function when called while rendering a template.
|
||||
|
||||
Can be used on functions, filters, and tests.
|
||||
@ -45,7 +45,7 @@ def pass_context(f: F) -> F:
|
||||
|
||||
|
||||
def pass_eval_context(f: F) -> F:
|
||||
"""Pass the :class:`~jinja2.nodes.EvalContext` as the first argument
|
||||
"""Pass the :class:`~_vendoring.jinja2.nodes.EvalContext` as the first argument
|
||||
to the decorated function when called while rendering a template.
|
||||
See :ref:`eval-context`.
|
||||
|
||||
@ -62,7 +62,7 @@ def pass_eval_context(f: F) -> F:
|
||||
|
||||
|
||||
def pass_environment(f: F) -> F:
|
||||
"""Pass the :class:`~jinja2.Environment` as the first argument to
|
||||
"""Pass the :class:`~_vendoring.jinja2.Environment` as the first argument to
|
||||
the decorated function when called while rendering a template.
|
||||
|
||||
Can be used on functions, filters, and tests.
|
||||
@ -104,7 +104,7 @@ def contextfunction(f: F) -> F:
|
||||
"""Pass the context as the first argument to the decorated function.
|
||||
|
||||
.. deprecated:: 3.0
|
||||
Will be removed in Jinja 3.1. Use :func:`~jinja2.pass_context`
|
||||
Will be removed in Jinja 3.1. Use :func:`~_vendoring.jinja2.pass_context`
|
||||
instead.
|
||||
"""
|
||||
warnings.warn(
|
||||
@ -122,7 +122,7 @@ def evalcontextfunction(f: F) -> F:
|
||||
|
||||
.. deprecated:: 3.0
|
||||
Will be removed in Jinja 3.1. Use
|
||||
:func:`~jinja2.pass_eval_context` instead.
|
||||
:func:`~_vendoring.jinja2.pass_eval_context` instead.
|
||||
|
||||
.. versionadded:: 2.4
|
||||
"""
|
||||
@ -141,7 +141,7 @@ def environmentfunction(f: F) -> F:
|
||||
|
||||
.. deprecated:: 3.0
|
||||
Will be removed in Jinja 3.1. Use
|
||||
:func:`~jinja2.pass_environment` instead.
|
||||
:func:`~_vendoring.jinja2.pass_environment` instead.
|
||||
"""
|
||||
warnings.warn(
|
||||
"'environmentfunction' is renamed to 'pass_environment', the"
|
||||
@ -335,9 +335,9 @@ def trim_url(x: str) -> str:
|
||||
def trim_url(x: str) -> str:
|
||||
return x
|
||||
|
||||
words = re.split(r"(\s+)", str(markupsafe.escape(text)))
|
||||
rel_attr = f' rel="{markupsafe.escape(rel)}"' if rel else ""
|
||||
target_attr = f' target="{markupsafe.escape(target)}"' if target else ""
|
||||
words = re.split(r"(\s+)", str(_vendoring.markupsafe.escape(text)))
|
||||
rel_attr = f' rel="{_vendoring.markupsafe.escape(rel)}"' if rel else ""
|
||||
target_attr = f' target="{_vendoring.markupsafe.escape(target)}"' if target else ""
|
||||
|
||||
for i, word in enumerate(words):
|
||||
head, middle, tail = "", word, ""
|
||||
@ -455,8 +455,8 @@ def generate_lorem_ipsum(
|
||||
|
||||
if not html:
|
||||
return "\n\n".join(result)
|
||||
return markupsafe.Markup(
|
||||
"\n".join(f"<p>{markupsafe.escape(x)}</p>" for x in result)
|
||||
return _vendoring.markupsafe.Markup(
|
||||
"\n".join(f"<p>{_vendoring.markupsafe.escape(x)}</p>" for x in result)
|
||||
)
|
||||
|
||||
|
||||
@ -658,7 +658,7 @@ def select_autoescape(
|
||||
If you want to enable it for all templates created from strings or
|
||||
for all templates with `.html` and `.xml` extensions::
|
||||
|
||||
from jinja2 import Environment, select_autoescape
|
||||
from _vendoring.jinja2 import Environment, select_autoescape
|
||||
env = Environment(autoescape=select_autoescape(
|
||||
enabled_extensions=('html', 'xml'),
|
||||
default_for_string=True,
|
||||
@ -667,7 +667,7 @@ def select_autoescape(
|
||||
Example configuration to turn it on at all times except if the template
|
||||
ends with `.txt`::
|
||||
|
||||
from jinja2 import Environment, select_autoescape
|
||||
from _vendoring.jinja2 import Environment, select_autoescape
|
||||
env = Environment(autoescape=select_autoescape(
|
||||
disabled_extensions=('txt',),
|
||||
default_for_string=True,
|
||||
@ -703,10 +703,10 @@ def autoescape(template_name: t.Optional[str]) -> bool:
|
||||
|
||||
def htmlsafe_json_dumps(
|
||||
obj: t.Any, dumps: t.Optional[t.Callable[..., str]] = None, **kwargs: t.Any
|
||||
) -> markupsafe.Markup:
|
||||
) -> _vendoring.markupsafe.Markup:
|
||||
"""Serialize an object to a string of JSON with :func:`json.dumps`,
|
||||
then replace HTML-unsafe characters with Unicode escapes and mark
|
||||
the result safe with :class:`~markupsafe.Markup`.
|
||||
the result safe with :class:`~_vendoring.markupsafe.Markup`.
|
||||
|
||||
This is available in templates as the ``|tojson`` filter.
|
||||
|
||||
@ -732,7 +732,7 @@ def htmlsafe_json_dumps(
|
||||
if dumps is None:
|
||||
dumps = json.dumps
|
||||
|
||||
return markupsafe.Markup(
|
||||
return _vendoring.markupsafe.Markup(
|
||||
dumps(obj, **kwargs)
|
||||
.replace("<", "\\u003c")
|
||||
.replace(">", "\\u003e")
|
||||
@ -833,11 +833,11 @@ def __repr__(self) -> str:
|
||||
return f"<Namespace {self.__attrs!r}>"
|
||||
|
||||
|
||||
class Markup(markupsafe.Markup):
|
||||
class Markup(_vendoring.markupsafe.Markup):
|
||||
def __new__(cls, base="", encoding=None, errors="strict"): # type: ignore
|
||||
warnings.warn(
|
||||
"'jinja2.Markup' is deprecated and will be removed in Jinja"
|
||||
" 3.1. Import 'markupsafe.Markup' instead.",
|
||||
"'_vendoring.jinja2.Markup' is deprecated and will be removed in Jinja"
|
||||
" 3.1. Import '_vendoring.markupsafe.Markup' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
@ -846,9 +846,9 @@ def __new__(cls, base="", encoding=None, errors="strict"): # type: ignore
|
||||
|
||||
def escape(s: t.Any) -> str:
|
||||
warnings.warn(
|
||||
"'jinja2.escape' is deprecated and will be removed in Jinja"
|
||||
" 3.1. Import 'markupsafe.escape' instead.",
|
||||
"'_vendoring.jinja2.escape' is deprecated and will be removed in Jinja"
|
||||
" 3.1. Import '_vendoring.markupsafe.escape' instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
return markupsafe.escape(s)
|
||||
return _vendoring.markupsafe.escape(s)
|
||||
|
@ -6,7 +6,7 @@
|
||||
from .nodes import Node
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
import _vendoring.typing_extensions as te
|
||||
|
||||
class VisitCallable(te.Protocol):
|
||||
def __call__(self, node: Node, *args: t.Any, **kwargs: t.Any) -> t.Any:
|
||||
|
@ -8,18 +8,18 @@
|
||||
instance under a schema, and will create a validator for you.
|
||||
"""
|
||||
|
||||
from jsonschema.exceptions import (
|
||||
from _vendoring.jsonschema.exceptions import (
|
||||
ErrorTree, FormatError, RefResolutionError, SchemaError, ValidationError
|
||||
)
|
||||
from jsonschema._format import (
|
||||
from _vendoring.jsonschema._format import (
|
||||
FormatChecker,
|
||||
draft3_format_checker,
|
||||
draft4_format_checker,
|
||||
draft6_format_checker,
|
||||
draft7_format_checker,
|
||||
)
|
||||
from jsonschema._types import TypeChecker
|
||||
from jsonschema.validators import (
|
||||
from _vendoring.jsonschema._types import TypeChecker
|
||||
from _vendoring.jsonschema.validators import (
|
||||
Draft3Validator,
|
||||
Draft4Validator,
|
||||
Draft6Validator,
|
||||
|
@ -1,2 +1,2 @@
|
||||
from jsonschema.cli import main
|
||||
from _vendoring.jsonschema.cli import main
|
||||
main()
|
||||
|
@ -3,8 +3,8 @@
|
||||
import socket
|
||||
import struct
|
||||
|
||||
from jsonschema.compat import str_types
|
||||
from jsonschema.exceptions import FormatError
|
||||
from _vendoring.jsonschema.compat import str_types
|
||||
from _vendoring.jsonschema.exceptions import FormatError
|
||||
|
||||
|
||||
class FormatChecker(object):
|
||||
|
@ -1,6 +1,6 @@
|
||||
from jsonschema import _utils
|
||||
from jsonschema.compat import iteritems
|
||||
from jsonschema.exceptions import ValidationError
|
||||
from _vendoring.jsonschema import _utils
|
||||
from _vendoring.jsonschema.compat import iteritems
|
||||
from _vendoring.jsonschema.exceptions import ValidationError
|
||||
|
||||
|
||||
def dependencies_draft3(validator, dependencies, instance, schema):
|
||||
|
@ -9,7 +9,7 @@
|
||||
|
||||
import sys
|
||||
|
||||
from jsonschema.compat import PY3
|
||||
from _vendoring.jsonschema.compat import PY3
|
||||
|
||||
|
||||
class _NoModuleFound(Exception):
|
||||
|
@ -1,10 +1,10 @@
|
||||
import numbers
|
||||
|
||||
from pyrsistent import pmap
|
||||
import attr
|
||||
from _vendoring.pyrsistent import pmap
|
||||
import _vendoring.attr
|
||||
|
||||
from jsonschema.compat import int_types, str_types
|
||||
from jsonschema.exceptions import UndefinedTypeCheck
|
||||
from _vendoring.jsonschema.compat import int_types, str_types
|
||||
from _vendoring.jsonschema.exceptions import UndefinedTypeCheck
|
||||
|
||||
|
||||
def is_array(checker, instance):
|
||||
@ -45,7 +45,7 @@ def is_any(checker, instance):
|
||||
return True
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
@_vendoring.attr.s(frozen=True)
|
||||
class TypeChecker(object):
|
||||
"""
|
||||
A ``type`` property checker.
|
||||
@ -61,7 +61,7 @@ class TypeChecker(object):
|
||||
|
||||
The initial mapping of types to their checking functions.
|
||||
"""
|
||||
_type_checkers = attr.ib(default=pmap(), converter=pmap)
|
||||
_type_checkers = _vendoring.attr.ib(default=pmap(), converter=pmap)
|
||||
|
||||
def is_type(self, instance, type):
|
||||
"""
|
||||
@ -131,7 +131,7 @@ def redefine_many(self, definitions=()):
|
||||
|
||||
A new `TypeChecker` instance.
|
||||
"""
|
||||
return attr.evolve(
|
||||
return _vendoring.attr.evolve(
|
||||
self, type_checkers=self._type_checkers.update(definitions),
|
||||
)
|
||||
|
||||
@ -162,7 +162,7 @@ def remove(self, *types):
|
||||
checkers = checkers.remove(each)
|
||||
except KeyError:
|
||||
raise UndefinedTypeCheck(each)
|
||||
return attr.evolve(self, type_checkers=checkers)
|
||||
return _vendoring.attr.evolve(self, type_checkers=checkers)
|
||||
|
||||
|
||||
draft3_type_checker = TypeChecker(
|
||||
|
@ -3,7 +3,7 @@
|
||||
import pkgutil
|
||||
import re
|
||||
|
||||
from jsonschema.compat import MutableMapping, str_types, urlsplit
|
||||
from _vendoring.jsonschema.compat import MutableMapping, str_types, urlsplit
|
||||
|
||||
|
||||
class URIDict(MutableMapping):
|
||||
@ -51,7 +51,7 @@ def load_schema(name):
|
||||
Load a schema from ./schemas/``name``.json and return it.
|
||||
"""
|
||||
|
||||
data = pkgutil.get_data("jsonschema", "schemas/{0}.json".format(name))
|
||||
data = pkgutil.get_data("_vendoring.jsonschema", "schemas/{0}.json".format(name))
|
||||
return json.loads(data.decode("utf-8"))
|
||||
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import re
|
||||
|
||||
from jsonschema._utils import (
|
||||
from _vendoring.jsonschema._utils import (
|
||||
ensure_list,
|
||||
equal,
|
||||
extras_msg,
|
||||
@ -9,8 +9,8 @@
|
||||
unbool,
|
||||
uniq,
|
||||
)
|
||||
from jsonschema.exceptions import FormatError, ValidationError
|
||||
from jsonschema.compat import iteritems
|
||||
from _vendoring.jsonschema.exceptions import FormatError, ValidationError
|
||||
from _vendoring.jsonschema.compat import iteritems
|
||||
|
||||
|
||||
def patternProperties(validator, patternProperties, instance, schema):
|
||||
|
@ -6,10 +6,10 @@
|
||||
"""
|
||||
from twisted.python.filepath import FilePath
|
||||
from pyperf import Runner
|
||||
from pyrsistent import m
|
||||
from _vendoring.pyrsistent import m
|
||||
|
||||
from jsonschema.tests._suite import Version
|
||||
import jsonschema
|
||||
from _vendoring.jsonschema.tests._suite import Version
|
||||
import _vendoring.jsonschema
|
||||
|
||||
|
||||
issue232 = Version(
|
||||
|
@ -7,7 +7,7 @@
|
||||
"""
|
||||
from pyperf import Runner
|
||||
|
||||
from jsonschema.tests._suite import Suite
|
||||
from _vendoring.jsonschema.tests._suite import Suite
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -6,9 +6,9 @@
|
||||
import json
|
||||
import sys
|
||||
|
||||
from jsonschema import __version__
|
||||
from jsonschema._reflect import namedAny
|
||||
from jsonschema.validators import validator_for
|
||||
from _vendoring.jsonschema import __version__
|
||||
from _vendoring.jsonschema._reflect import namedAny
|
||||
from _vendoring.jsonschema.validators import validator_for
|
||||
|
||||
|
||||
def _namedAnyWithDefault(name):
|
||||
|
@ -6,10 +6,10 @@
|
||||
import pprint
|
||||
import textwrap
|
||||
|
||||
import attr
|
||||
import _vendoring.attr
|
||||
|
||||
from jsonschema import _utils
|
||||
from jsonschema.compat import PY3, iteritems
|
||||
from _vendoring.jsonschema import _utils
|
||||
from _vendoring.jsonschema.compat import PY3, iteritems
|
||||
|
||||
|
||||
WEAK_MATCHES = frozenset(["anyOf", "oneOf"])
|
||||
@ -149,13 +149,13 @@ class SchemaError(_Error):
|
||||
_word_for_instance_in_error_message = "schema"
|
||||
|
||||
|
||||
@attr.s(hash=True)
|
||||
@_vendoring.attr.s(hash=True)
|
||||
class RefResolutionError(Exception):
|
||||
"""
|
||||
A ref could not be resolved.
|
||||
"""
|
||||
|
||||
_cause = attr.ib()
|
||||
_cause = _vendoring.attr.ib()
|
||||
|
||||
def __str__(self):
|
||||
return str(self._cause)
|
||||
|
@ -1,5 +0,0 @@
|
||||
def bug(issue=None):
|
||||
message = "A known bug."
|
||||
if issue is not None:
|
||||
message += " See issue #{issue}.".format(issue=issue)
|
||||
return message
|
@ -1,239 +0,0 @@
|
||||
"""
|
||||
Python representations of the JSON Schema Test Suite tests.
|
||||
"""
|
||||
|
||||
from functools import partial
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from twisted.python.filepath import FilePath
|
||||
import attr
|
||||
|
||||
from jsonschema.compat import PY3
|
||||
from jsonschema.validators import validators
|
||||
import jsonschema
|
||||
|
||||
|
||||
def _find_suite():
|
||||
root = os.environ.get("JSON_SCHEMA_TEST_SUITE")
|
||||
if root is not None:
|
||||
return FilePath(root)
|
||||
|
||||
root = FilePath(jsonschema.__file__).parent().sibling("json")
|
||||
if not root.isdir(): # pragma: no cover
|
||||
raise ValueError(
|
||||
(
|
||||
"Can't find the JSON-Schema-Test-Suite directory. "
|
||||
"Set the 'JSON_SCHEMA_TEST_SUITE' environment "
|
||||
"variable or run the tests from alongside a checkout "
|
||||
"of the suite."
|
||||
),
|
||||
)
|
||||
return root
|
||||
|
||||
|
||||
@attr.s(hash=True)
|
||||
class Suite(object):
|
||||
|
||||
_root = attr.ib(default=attr.Factory(_find_suite))
|
||||
|
||||
def _remotes(self):
|
||||
jsonschema_suite = self._root.descendant(["bin", "jsonschema_suite"])
|
||||
remotes = subprocess.check_output(
|
||||
[sys.executable, jsonschema_suite.path, "remotes"],
|
||||
)
|
||||
return {
|
||||
"http://localhost:1234/" + name: schema
|
||||
for name, schema in json.loads(remotes.decode("utf-8")).items()
|
||||
}
|
||||
|
||||
def benchmark(self, runner): # pragma: no cover
|
||||
for name in validators:
|
||||
self.version(name=name).benchmark(runner=runner)
|
||||
|
||||
def version(self, name):
|
||||
return Version(
|
||||
name=name,
|
||||
path=self._root.descendant(["tests", name]),
|
||||
remotes=self._remotes(),
|
||||
)
|
||||
|
||||
|
||||
@attr.s(hash=True)
|
||||
class Version(object):
|
||||
|
||||
_path = attr.ib()
|
||||
_remotes = attr.ib()
|
||||
|
||||
name = attr.ib()
|
||||
|
||||
def benchmark(self, runner, **kwargs): # pragma: no cover
|
||||
for suite in self.tests():
|
||||
for test in suite:
|
||||
runner.bench_func(
|
||||
test.fully_qualified_name,
|
||||
partial(test.validate_ignoring_errors, **kwargs),
|
||||
)
|
||||
|
||||
def tests(self):
|
||||
return (
|
||||
test
|
||||
for child in self._path.globChildren("*.json")
|
||||
for test in self._tests_in(
|
||||
subject=child.basename()[:-5],
|
||||
path=child,
|
||||
)
|
||||
)
|
||||
|
||||
def format_tests(self):
|
||||
path = self._path.descendant(["optional", "format"])
|
||||
return (
|
||||
test
|
||||
for child in path.globChildren("*.json")
|
||||
for test in self._tests_in(
|
||||
subject=child.basename()[:-5],
|
||||
path=child,
|
||||
)
|
||||
)
|
||||
|
||||
def tests_of(self, name):
|
||||
return self._tests_in(
|
||||
subject=name,
|
||||
path=self._path.child(name + ".json"),
|
||||
)
|
||||
|
||||
def optional_tests_of(self, name):
|
||||
return self._tests_in(
|
||||
subject=name,
|
||||
path=self._path.descendant(["optional", name + ".json"]),
|
||||
)
|
||||
|
||||
def to_unittest_testcase(self, *suites, **kwargs):
|
||||
name = kwargs.pop("name", "Test" + self.name.title())
|
||||
methods = {
|
||||
test.method_name: test.to_unittest_method(**kwargs)
|
||||
for suite in suites
|
||||
for tests in suite
|
||||
for test in tests
|
||||
}
|
||||
cls = type(name, (unittest.TestCase,), methods)
|
||||
|
||||
try:
|
||||
cls.__module__ = _someone_save_us_the_module_of_the_caller()
|
||||
except Exception: # pragma: no cover
|
||||
# We're doing crazy things, so if they go wrong, like a function
|
||||
# behaving differently on some other interpreter, just make them
|
||||
# not happen.
|
||||
pass
|
||||
|
||||
return cls
|
||||
|
||||
def _tests_in(self, subject, path):
|
||||
for each in json.loads(path.getContent().decode("utf-8")):
|
||||
yield (
|
||||
_Test(
|
||||
version=self,
|
||||
subject=subject,
|
||||
case_description=each["description"],
|
||||
schema=each["schema"],
|
||||
remotes=self._remotes,
|
||||
**test
|
||||
) for test in each["tests"]
|
||||
)
|
||||
|
||||
|
||||
@attr.s(hash=True, repr=False)
|
||||
class _Test(object):
|
||||
|
||||
version = attr.ib()
|
||||
|
||||
subject = attr.ib()
|
||||
case_description = attr.ib()
|
||||
description = attr.ib()
|
||||
|
||||
data = attr.ib()
|
||||
schema = attr.ib(repr=False)
|
||||
|
||||
valid = attr.ib()
|
||||
|
||||
_remotes = attr.ib()
|
||||
|
||||
def __repr__(self): # pragma: no cover
|
||||
return "<Test {}>".format(self.fully_qualified_name)
|
||||
|
||||
@property
|
||||
def fully_qualified_name(self): # pragma: no cover
|
||||
return " > ".join(
|
||||
[
|
||||
self.version.name,
|
||||
self.subject,
|
||||
self.case_description,
|
||||
self.description,
|
||||
]
|
||||
)
|
||||
|
||||
@property
|
||||
def method_name(self):
|
||||
delimiters = r"[\W\- ]+"
|
||||
name = "test_%s_%s_%s" % (
|
||||
re.sub(delimiters, "_", self.subject),
|
||||
re.sub(delimiters, "_", self.case_description),
|
||||
re.sub(delimiters, "_", self.description),
|
||||
)
|
||||
|
||||
if not PY3: # pragma: no cover
|
||||
name = name.encode("utf-8")
|
||||
return name
|
||||
|
||||
def to_unittest_method(self, skip=lambda test: None, **kwargs):
|
||||
if self.valid:
|
||||
def fn(this):
|
||||
self.validate(**kwargs)
|
||||
else:
|
||||
def fn(this):
|
||||
with this.assertRaises(jsonschema.ValidationError):
|
||||
self.validate(**kwargs)
|
||||
|
||||
fn.__name__ = self.method_name
|
||||
reason = skip(self)
|
||||
return unittest.skipIf(reason is not None, reason)(fn)
|
||||
|
||||
def validate(self, Validator, **kwargs):
|
||||
resolver = jsonschema.RefResolver.from_schema(
|
||||
schema=self.schema,
|
||||
store=self._remotes,
|
||||
id_of=Validator.ID_OF,
|
||||
)
|
||||
jsonschema.validate(
|
||||
instance=self.data,
|
||||
schema=self.schema,
|
||||
cls=Validator,
|
||||
resolver=resolver,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
def validate_ignoring_errors(self, Validator): # pragma: no cover
|
||||
try:
|
||||
self.validate(Validator=Validator)
|
||||
except jsonschema.ValidationError:
|
||||
pass
|
||||
|
||||
|
||||
def _someone_save_us_the_module_of_the_caller():
|
||||
"""
|
||||
The FQON of the module 2nd stack frames up from here.
|
||||
|
||||
This is intended to allow us to dynamicallly return test case classes that
|
||||
are indistinguishable from being defined in the module that wants them.
|
||||
|
||||
Otherwise, trial will mis-print the FQON, and copy pasting it won't re-run
|
||||
the class that really is running.
|
||||
|
||||
Save us all, this is all so so so so so terrible.
|
||||
"""
|
||||
|
||||
return sys._getframe(2).f_globals["__name__"]
|
@ -1,151 +0,0 @@
|
||||
from unittest import TestCase
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from jsonschema import Draft4Validator, ValidationError, cli, __version__
|
||||
from jsonschema.compat import NativeIO
|
||||
from jsonschema.exceptions import SchemaError
|
||||
|
||||
|
||||
def fake_validator(*errors):
|
||||
errors = list(reversed(errors))
|
||||
|
||||
class FakeValidator(object):
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def iter_errors(self, instance):
|
||||
if errors:
|
||||
return errors.pop()
|
||||
return []
|
||||
|
||||
def check_schema(self, schema):
|
||||
pass
|
||||
|
||||
return FakeValidator
|
||||
|
||||
|
||||
class TestParser(TestCase):
|
||||
|
||||
FakeValidator = fake_validator()
|
||||
instance_file = "foo.json"
|
||||
schema_file = "schema.json"
|
||||
|
||||
def setUp(self):
|
||||
cli.open = self.fake_open
|
||||
self.addCleanup(delattr, cli, "open")
|
||||
|
||||
def fake_open(self, path):
|
||||
if path == self.instance_file:
|
||||
contents = ""
|
||||
elif path == self.schema_file:
|
||||
contents = {}
|
||||
else: # pragma: no cover
|
||||
self.fail("What is {!r}".format(path))
|
||||
return NativeIO(json.dumps(contents))
|
||||
|
||||
def test_find_validator_by_fully_qualified_object_name(self):
|
||||
arguments = cli.parse_args(
|
||||
[
|
||||
"--validator",
|
||||
"jsonschema.tests.test_cli.TestParser.FakeValidator",
|
||||
"--instance", self.instance_file,
|
||||
self.schema_file,
|
||||
]
|
||||
)
|
||||
self.assertIs(arguments["validator"], self.FakeValidator)
|
||||
|
||||
def test_find_validator_in_jsonschema(self):
|
||||
arguments = cli.parse_args(
|
||||
[
|
||||
"--validator", "Draft4Validator",
|
||||
"--instance", self.instance_file,
|
||||
self.schema_file,
|
||||
]
|
||||
)
|
||||
self.assertIs(arguments["validator"], Draft4Validator)
|
||||
|
||||
|
||||
class TestCLI(TestCase):
|
||||
def test_draft3_schema_draft4_validator(self):
|
||||
stdout, stderr = NativeIO(), NativeIO()
|
||||
with self.assertRaises(SchemaError):
|
||||
cli.run(
|
||||
{
|
||||
"validator": Draft4Validator,
|
||||
"schema": {
|
||||
"anyOf": [
|
||||
{"minimum": 20},
|
||||
{"type": "string"},
|
||||
{"required": True},
|
||||
],
|
||||
},
|
||||
"instances": [1],
|
||||
"error_format": "{error.message}",
|
||||
},
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
)
|
||||
|
||||
def test_successful_validation(self):
|
||||
stdout, stderr = NativeIO(), NativeIO()
|
||||
exit_code = cli.run(
|
||||
{
|
||||
"validator": fake_validator(),
|
||||
"schema": {},
|
||||
"instances": [1],
|
||||
"error_format": "{error.message}",
|
||||
},
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
)
|
||||
self.assertFalse(stdout.getvalue())
|
||||
self.assertFalse(stderr.getvalue())
|
||||
self.assertEqual(exit_code, 0)
|
||||
|
||||
def test_unsuccessful_validation(self):
|
||||
error = ValidationError("I am an error!", instance=1)
|
||||
stdout, stderr = NativeIO(), NativeIO()
|
||||
exit_code = cli.run(
|
||||
{
|
||||
"validator": fake_validator([error]),
|
||||
"schema": {},
|
||||
"instances": [1],
|
||||
"error_format": "{error.instance} - {error.message}",
|
||||
},
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
)
|
||||
self.assertFalse(stdout.getvalue())
|
||||
self.assertEqual(stderr.getvalue(), "1 - I am an error!")
|
||||
self.assertEqual(exit_code, 1)
|
||||
|
||||
def test_unsuccessful_validation_multiple_instances(self):
|
||||
first_errors = [
|
||||
ValidationError("9", instance=1),
|
||||
ValidationError("8", instance=1),
|
||||
]
|
||||
second_errors = [ValidationError("7", instance=2)]
|
||||
stdout, stderr = NativeIO(), NativeIO()
|
||||
exit_code = cli.run(
|
||||
{
|
||||
"validator": fake_validator(first_errors, second_errors),
|
||||
"schema": {},
|
||||
"instances": [1, 2],
|
||||
"error_format": "{error.instance} - {error.message}\t",
|
||||
},
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
)
|
||||
self.assertFalse(stdout.getvalue())
|
||||
self.assertEqual(stderr.getvalue(), "1 - 9\t1 - 8\t2 - 7\t")
|
||||
self.assertEqual(exit_code, 1)
|
||||
|
||||
def test_version(self):
|
||||
version = subprocess.check_output(
|
||||
[sys.executable, "-m", "jsonschema", "--version"],
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
version = version.decode("utf-8").strip()
|
||||
self.assertEqual(version, __version__)
|
@ -1,462 +0,0 @@
|
||||
from unittest import TestCase
|
||||
import textwrap
|
||||
|
||||
from jsonschema import Draft4Validator, exceptions
|
||||
from jsonschema.compat import PY3
|
||||
|
||||
|
||||
class TestBestMatch(TestCase):
|
||||
def best_match(self, errors):
|
||||
errors = list(errors)
|
||||
best = exceptions.best_match(errors)
|
||||
reversed_best = exceptions.best_match(reversed(errors))
|
||||
msg = "Didn't return a consistent best match!\nGot: {0}\n\nThen: {1}"
|
||||
self.assertEqual(
|
||||
best._contents(), reversed_best._contents(),
|
||||
msg=msg.format(best, reversed_best),
|
||||
)
|
||||
return best
|
||||
|
||||
def test_shallower_errors_are_better_matches(self):
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"minProperties": 2,
|
||||
"properties": {"bar": {"type": "object"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({"foo": {"bar": []}}))
|
||||
self.assertEqual(best.validator, "minProperties")
|
||||
|
||||
def test_oneOf_and_anyOf_are_weak_matches(self):
|
||||
"""
|
||||
A property you *must* match is probably better than one you have to
|
||||
match a part of.
|
||||
"""
|
||||
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"minProperties": 2,
|
||||
"anyOf": [{"type": "string"}, {"type": "number"}],
|
||||
"oneOf": [{"type": "string"}, {"type": "number"}],
|
||||
}
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({}))
|
||||
self.assertEqual(best.validator, "minProperties")
|
||||
|
||||
def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self):
|
||||
"""
|
||||
If the most relevant error is an anyOf, then we traverse its context
|
||||
and select the otherwise *least* relevant error, since in this case
|
||||
that means the most specific, deep, error inside the instance.
|
||||
|
||||
I.e. since only one of the schemas must match, we look for the most
|
||||
relevant one.
|
||||
"""
|
||||
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"anyOf": [
|
||||
{"type": "string"},
|
||||
{"properties": {"bar": {"type": "array"}}},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
|
||||
self.assertEqual(best.validator_value, "array")
|
||||
|
||||
def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self):
|
||||
"""
|
||||
If the most relevant error is an oneOf, then we traverse its context
|
||||
and select the otherwise *least* relevant error, since in this case
|
||||
that means the most specific, deep, error inside the instance.
|
||||
|
||||
I.e. since only one of the schemas must match, we look for the most
|
||||
relevant one.
|
||||
"""
|
||||
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{"properties": {"bar": {"type": "array"}}},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
|
||||
self.assertEqual(best.validator_value, "array")
|
||||
|
||||
def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self):
|
||||
"""
|
||||
Now, if the error is allOf, we traverse but select the *most* relevant
|
||||
error from the context, because all schemas here must match anyways.
|
||||
"""
|
||||
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"allOf": [
|
||||
{"type": "string"},
|
||||
{"properties": {"bar": {"type": "array"}}},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
|
||||
self.assertEqual(best.validator_value, "string")
|
||||
|
||||
def test_nested_context_for_oneOf(self):
|
||||
validator = Draft4Validator(
|
||||
{
|
||||
"properties": {
|
||||
"foo": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"properties": {
|
||||
"bar": {"type": "array"},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
|
||||
self.assertEqual(best.validator_value, "array")
|
||||
|
||||
def test_one_error(self):
|
||||
validator = Draft4Validator({"minProperties": 2})
|
||||
error, = validator.iter_errors({})
|
||||
self.assertEqual(
|
||||
exceptions.best_match(validator.iter_errors({})).validator,
|
||||
"minProperties",
|
||||
)
|
||||
|
||||
def test_no_errors(self):
|
||||
validator = Draft4Validator({})
|
||||
self.assertIsNone(exceptions.best_match(validator.iter_errors({})))
|
||||
|
||||
|
||||
class TestByRelevance(TestCase):
|
||||
def test_short_paths_are_better_matches(self):
|
||||
shallow = exceptions.ValidationError("Oh no!", path=["baz"])
|
||||
deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"])
|
||||
match = max([shallow, deep], key=exceptions.relevance)
|
||||
self.assertIs(match, shallow)
|
||||
|
||||
match = max([deep, shallow], key=exceptions.relevance)
|
||||
self.assertIs(match, shallow)
|
||||
|
||||
def test_global_errors_are_even_better_matches(self):
|
||||
shallow = exceptions.ValidationError("Oh no!", path=[])
|
||||
deep = exceptions.ValidationError("Oh yes!", path=["foo"])
|
||||
|
||||
errors = sorted([shallow, deep], key=exceptions.relevance)
|
||||
self.assertEqual(
|
||||
[list(error.path) for error in errors],
|
||||
[["foo"], []],
|
||||
)
|
||||
|
||||
errors = sorted([deep, shallow], key=exceptions.relevance)
|
||||
self.assertEqual(
|
||||
[list(error.path) for error in errors],
|
||||
[["foo"], []],
|
||||
)
|
||||
|
||||
def test_weak_validators_are_lower_priority(self):
|
||||
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
|
||||
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
|
||||
|
||||
best_match = exceptions.by_relevance(weak="a")
|
||||
|
||||
match = max([weak, normal], key=best_match)
|
||||
self.assertIs(match, normal)
|
||||
|
||||
match = max([normal, weak], key=best_match)
|
||||
self.assertIs(match, normal)
|
||||
|
||||
def test_strong_validators_are_higher_priority(self):
|
||||
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
|
||||
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
|
||||
strong = exceptions.ValidationError("Oh fine!", path=[], validator="c")
|
||||
|
||||
best_match = exceptions.by_relevance(weak="a", strong="c")
|
||||
|
||||
match = max([weak, normal, strong], key=best_match)
|
||||
self.assertIs(match, strong)
|
||||
|
||||
match = max([strong, normal, weak], key=best_match)
|
||||
self.assertIs(match, strong)
|
||||
|
||||
|
||||
class TestErrorTree(TestCase):
|
||||
def test_it_knows_how_many_total_errors_it_contains(self):
|
||||
# FIXME: https://github.com/Julian/jsonschema/issues/442
|
||||
errors = [
|
||||
exceptions.ValidationError("Something", validator=i)
|
||||
for i in range(8)
|
||||
]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertEqual(tree.total_errors, 8)
|
||||
|
||||
def test_it_contains_an_item_if_the_item_had_an_error(self):
|
||||
errors = [exceptions.ValidationError("a message", path=["bar"])]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertIn("bar", tree)
|
||||
|
||||
def test_it_does_not_contain_an_item_if_the_item_had_no_error(self):
|
||||
errors = [exceptions.ValidationError("a message", path=["bar"])]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertNotIn("foo", tree)
|
||||
|
||||
def test_validators_that_failed_appear_in_errors_dict(self):
|
||||
error = exceptions.ValidationError("a message", validator="foo")
|
||||
tree = exceptions.ErrorTree([error])
|
||||
self.assertEqual(tree.errors, {"foo": error})
|
||||
|
||||
def test_it_creates_a_child_tree_for_each_nested_path(self):
|
||||
errors = [
|
||||
exceptions.ValidationError("a bar message", path=["bar"]),
|
||||
exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]),
|
||||
]
|
||||
tree = exceptions.ErrorTree(errors)
|
||||
self.assertIn(0, tree["bar"])
|
||||
self.assertNotIn(1, tree["bar"])
|
||||
|
||||
def test_children_have_their_errors_dicts_built(self):
|
||||
e1, e2 = (
|
||||
exceptions.ValidationError("1", validator="foo", path=["bar", 0]),
|
||||
exceptions.ValidationError("2", validator="quux", path=["bar", 0]),
|
||||
)
|
||||
tree = exceptions.ErrorTree([e1, e2])
|
||||
self.assertEqual(tree["bar"][0].errors, {"foo": e1, "quux": e2})
|
||||
|
||||
def test_multiple_errors_with_instance(self):
|
||||
e1, e2 = (
|
||||
exceptions.ValidationError(
|
||||
"1",
|
||||
validator="foo",
|
||||
path=["bar", "bar2"],
|
||||
instance="i1"),
|
||||
exceptions.ValidationError(
|
||||
"2",
|
||||
validator="quux",
|
||||
path=["foobar", 2],
|
||||
instance="i2"),
|
||||
)
|
||||
exceptions.ErrorTree([e1, e2])
|
||||
|
||||
def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self):
|
||||
error = exceptions.ValidationError("123", validator="foo", instance=[])
|
||||
tree = exceptions.ErrorTree([error])
|
||||
|
||||
with self.assertRaises(IndexError):
|
||||
tree[0]
|
||||
|
||||
def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self):
|
||||
"""
|
||||
If a validator is dumb (like :validator:`required` in draft 3) and
|
||||
refers to a path that isn't in the instance, the tree still properly
|
||||
returns a subtree for that path.
|
||||
"""
|
||||
|
||||
error = exceptions.ValidationError(
|
||||
"a message", validator="foo", instance={}, path=["foo"],
|
||||
)
|
||||
tree = exceptions.ErrorTree([error])
|
||||
self.assertIsInstance(tree["foo"], exceptions.ErrorTree)
|
||||
|
||||
|
||||
class TestErrorInitReprStr(TestCase):
|
||||
def make_error(self, **kwargs):
|
||||
defaults = dict(
|
||||
message=u"hello",
|
||||
validator=u"type",
|
||||
validator_value=u"string",
|
||||
instance=5,
|
||||
schema={u"type": u"string"},
|
||||
)
|
||||
defaults.update(kwargs)
|
||||
return exceptions.ValidationError(**defaults)
|
||||
|
||||
def assertShows(self, expected, **kwargs):
|
||||
if PY3: # pragma: no cover
|
||||
expected = expected.replace("u'", "'")
|
||||
expected = textwrap.dedent(expected).rstrip("\n")
|
||||
|
||||
error = self.make_error(**kwargs)
|
||||
message_line, _, rest = str(error).partition("\n")
|
||||
self.assertEqual(message_line, error.message)
|
||||
self.assertEqual(rest, expected)
|
||||
|
||||
def test_it_calls_super_and_sets_args(self):
|
||||
error = self.make_error()
|
||||
self.assertGreater(len(error.args), 1)
|
||||
|
||||
def test_repr(self):
|
||||
self.assertEqual(
|
||||
repr(exceptions.ValidationError(message="Hello!")),
|
||||
"<ValidationError: %r>" % "Hello!",
|
||||
)
|
||||
|
||||
def test_unset_error(self):
|
||||
error = exceptions.ValidationError("message")
|
||||
self.assertEqual(str(error), "message")
|
||||
|
||||
kwargs = {
|
||||
"validator": "type",
|
||||
"validator_value": "string",
|
||||
"instance": 5,
|
||||
"schema": {"type": "string"},
|
||||
}
|
||||
# Just the message should show if any of the attributes are unset
|
||||
for attr in kwargs:
|
||||
k = dict(kwargs)
|
||||
del k[attr]
|
||||
error = exceptions.ValidationError("message", **k)
|
||||
self.assertEqual(str(error), "message")
|
||||
|
||||
def test_empty_paths(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating u'type' in schema:
|
||||
{u'type': u'string'}
|
||||
|
||||
On instance:
|
||||
5
|
||||
""",
|
||||
path=[],
|
||||
schema_path=[],
|
||||
)
|
||||
|
||||
def test_one_item_paths(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating u'type' in schema:
|
||||
{u'type': u'string'}
|
||||
|
||||
On instance[0]:
|
||||
5
|
||||
""",
|
||||
path=[0],
|
||||
schema_path=["items"],
|
||||
)
|
||||
|
||||
def test_multiple_item_paths(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating u'type' in schema[u'items'][0]:
|
||||
{u'type': u'string'}
|
||||
|
||||
On instance[0][u'a']:
|
||||
5
|
||||
""",
|
||||
path=[0, u"a"],
|
||||
schema_path=[u"items", 0, 1],
|
||||
)
|
||||
|
||||
def test_uses_pprint(self):
|
||||
self.assertShows(
|
||||
"""
|
||||
Failed validating u'maxLength' in schema:
|
||||
{0: 0,
|
||||
1: 1,
|
||||
2: 2,
|
||||
3: 3,
|
||||
4: 4,
|
||||
5: 5,
|
||||
6: 6,
|
||||
7: 7,
|
||||
8: 8,
|
||||
9: 9,
|
||||
10: 10,
|
||||
11: 11,
|
||||
12: 12,
|
||||
13: 13,
|
||||
14: 14,
|
||||
15: 15,
|
||||
16: 16,
|
||||
17: 17,
|
||||
18: 18,
|
||||
19: 19}
|
||||
|
||||
On instance:
|
||||
[0,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
10,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
16,
|
||||
17,
|
||||
18,
|
||||
19,
|
||||
20,
|
||||
21,
|
||||
22,
|
||||
23,
|
||||
24]
|
||||
""",
|
||||
instance=list(range(25)),
|
||||
schema=dict(zip(range(20), range(20))),
|
||||
validator=u"maxLength",
|
||||
)
|
||||
|
||||
def test_str_works_with_instances_having_overriden_eq_operator(self):
|
||||
"""
|
||||
Check for https://github.com/Julian/jsonschema/issues/164 which
|
||||
rendered exceptions unusable when a `ValidationError` involved
|
||||
instances with an `__eq__` method that returned truthy values.
|
||||
"""
|
||||
|
||||
class DontEQMeBro(object):
|
||||
def __eq__(this, other): # pragma: no cover
|
||||
self.fail("Don't!")
|
||||
|
||||
def __ne__(this, other): # pragma: no cover
|
||||
self.fail("Don't!")
|
||||
|
||||
instance = DontEQMeBro()
|
||||
error = exceptions.ValidationError(
|
||||
"a message",
|
||||
validator="foo",
|
||||
instance=instance,
|
||||
validator_value="some",
|
||||
schema="schema",
|
||||
)
|
||||
self.assertIn(repr(instance), str(error))
|
||||
|
||||
|
||||
class TestHashable(TestCase):
|
||||
def test_hashable(self):
|
||||
set([exceptions.ValidationError("")])
|
||||
set([exceptions.SchemaError("")])
|
@ -1,89 +0,0 @@
|
||||
"""
|
||||
Tests for the parts of jsonschema related to the :validator:`format` property.
|
||||
"""
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
from jsonschema import FormatError, ValidationError, FormatChecker
|
||||
from jsonschema.validators import Draft4Validator
|
||||
|
||||
|
||||
BOOM = ValueError("Boom!")
|
||||
BANG = ZeroDivisionError("Bang!")
|
||||
|
||||
|
||||
def boom(thing):
|
||||
if thing == "bang":
|
||||
raise BANG
|
||||
raise BOOM
|
||||
|
||||
|
||||
class TestFormatChecker(TestCase):
|
||||
def test_it_can_validate_no_formats(self):
|
||||
checker = FormatChecker(formats=())
|
||||
self.assertFalse(checker.checkers)
|
||||
|
||||
def test_it_raises_a_key_error_for_unknown_formats(self):
|
||||
with self.assertRaises(KeyError):
|
||||
FormatChecker(formats=["o noes"])
|
||||
|
||||
def test_it_can_register_cls_checkers(self):
|
||||
original = dict(FormatChecker.checkers)
|
||||
self.addCleanup(FormatChecker.checkers.pop, "boom")
|
||||
FormatChecker.cls_checks("boom")(boom)
|
||||
self.assertEqual(
|
||||
FormatChecker.checkers,
|
||||
dict(original, boom=(boom, ())),
|
||||
)
|
||||
|
||||
def test_it_can_register_checkers(self):
|
||||
checker = FormatChecker()
|
||||
checker.checks("boom")(boom)
|
||||
self.assertEqual(
|
||||
checker.checkers,
|
||||
dict(FormatChecker.checkers, boom=(boom, ()))
|
||||
)
|
||||
|
||||
def test_it_catches_registered_errors(self):
|
||||
checker = FormatChecker()
|
||||
checker.checks("boom", raises=type(BOOM))(boom)
|
||||
|
||||
with self.assertRaises(FormatError) as cm:
|
||||
checker.check(instance=12, format="boom")
|
||||
|
||||
self.assertIs(cm.exception.cause, BOOM)
|
||||
self.assertIs(cm.exception.__cause__, BOOM)
|
||||
|
||||
# Unregistered errors should not be caught
|
||||
with self.assertRaises(type(BANG)):
|
||||
checker.check(instance="bang", format="boom")
|
||||
|
||||
def test_format_error_causes_become_validation_error_causes(self):
|
||||
checker = FormatChecker()
|
||||
checker.checks("boom", raises=ValueError)(boom)
|
||||
validator = Draft4Validator({"format": "boom"}, format_checker=checker)
|
||||
|
||||
with self.assertRaises(ValidationError) as cm:
|
||||
validator.validate("BOOM")
|
||||
|
||||
self.assertIs(cm.exception.cause, BOOM)
|
||||
self.assertIs(cm.exception.__cause__, BOOM)
|
||||
|
||||
def test_format_checkers_come_with_defaults(self):
|
||||
# This is bad :/ but relied upon.
|
||||
# The docs for quite awhile recommended people do things like
|
||||
# validate(..., format_checker=FormatChecker())
|
||||
# We should change that, but we can't without deprecation...
|
||||
checker = FormatChecker()
|
||||
with self.assertRaises(FormatError):
|
||||
checker.check(instance="not-an-ipv4", format="ipv4")
|
||||
|
||||
def test_repr(self):
|
||||
checker = FormatChecker(formats=())
|
||||
checker.checks("foo")(lambda thing: True)
|
||||
checker.checks("bar")(lambda thing: True)
|
||||
checker.checks("baz")(lambda thing: True)
|
||||
self.assertEqual(
|
||||
repr(checker),
|
||||
"<FormatChecker checkers=['bar', 'baz', 'foo']>",
|
||||
)
|
@ -1,277 +0,0 @@
|
||||
"""
|
||||
Test runner for the JSON Schema official test suite
|
||||
|
||||
Tests comprehensive correctness of each draft's validator.
|
||||
|
||||
See https://github.com/json-schema-org/JSON-Schema-Test-Suite for details.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from jsonschema import (
|
||||
Draft3Validator,
|
||||
Draft4Validator,
|
||||
Draft6Validator,
|
||||
Draft7Validator,
|
||||
draft3_format_checker,
|
||||
draft4_format_checker,
|
||||
draft6_format_checker,
|
||||
draft7_format_checker,
|
||||
)
|
||||
from jsonschema.tests._helpers import bug
|
||||
from jsonschema.tests._suite import Suite
|
||||
from jsonschema.validators import _DEPRECATED_DEFAULT_TYPES, create
|
||||
|
||||
|
||||
SUITE = Suite()
|
||||
DRAFT3 = SUITE.version(name="draft3")
|
||||
DRAFT4 = SUITE.version(name="draft4")
|
||||
DRAFT6 = SUITE.version(name="draft6")
|
||||
DRAFT7 = SUITE.version(name="draft7")
|
||||
|
||||
|
||||
def skip(message, **kwargs):
|
||||
def skipper(test):
|
||||
if all(value == getattr(test, attr) for attr, value in kwargs.items()):
|
||||
return message
|
||||
return skipper
|
||||
|
||||
|
||||
def missing_format(checker):
|
||||
def missing_format(test):
|
||||
schema = test.schema
|
||||
if schema is True or schema is False or "format" not in schema:
|
||||
return
|
||||
|
||||
if schema["format"] not in checker.checkers:
|
||||
return "Format checker {0!r} not found.".format(schema["format"])
|
||||
return missing_format
|
||||
|
||||
|
||||
is_narrow_build = sys.maxunicode == 2 ** 16 - 1
|
||||
if is_narrow_build: # pragma: no cover
|
||||
message = "Not running surrogate Unicode case, this Python is narrow."
|
||||
|
||||
def narrow_unicode_build(test): # pragma: no cover
|
||||
return skip(
|
||||
message=message,
|
||||
description="one supplementary Unicode code point is not long enough",
|
||||
)(test) or skip(
|
||||
message=message,
|
||||
description="two supplementary Unicode code points is long enough",
|
||||
)(test)
|
||||
else:
|
||||
def narrow_unicode_build(test): # pragma: no cover
|
||||
return
|
||||
|
||||
|
||||
TestDraft3 = DRAFT3.to_unittest_testcase(
|
||||
DRAFT3.tests(),
|
||||
DRAFT3.optional_tests_of(name="bignum"),
|
||||
DRAFT3.optional_tests_of(name="format"),
|
||||
DRAFT3.optional_tests_of(name="zeroTerminatedFloats"),
|
||||
Validator=Draft3Validator,
|
||||
format_checker=draft3_format_checker,
|
||||
skip=lambda test: (
|
||||
narrow_unicode_build(test)
|
||||
or missing_format(draft3_format_checker)(test)
|
||||
or skip(
|
||||
message="Upstream bug in strict_rfc3339",
|
||||
subject="format",
|
||||
description="case-insensitive T and Z",
|
||||
)(test)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
TestDraft4 = DRAFT4.to_unittest_testcase(
|
||||
DRAFT4.tests(),
|
||||
DRAFT4.optional_tests_of(name="bignum"),
|
||||
DRAFT4.optional_tests_of(name="format"),
|
||||
DRAFT4.optional_tests_of(name="zeroTerminatedFloats"),
|
||||
Validator=Draft4Validator,
|
||||
format_checker=draft4_format_checker,
|
||||
skip=lambda test: (
|
||||
narrow_unicode_build(test)
|
||||
or missing_format(draft4_format_checker)(test)
|
||||
or skip(
|
||||
message=bug(),
|
||||
subject="ref",
|
||||
case_description="Recursive references between schemas",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description="Location-independent identifier",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description=(
|
||||
"Location-independent identifier with absolute URI"
|
||||
),
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description=(
|
||||
"Location-independent identifier with base URI change in subschema"
|
||||
),
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(),
|
||||
subject="refRemote",
|
||||
case_description="base URI change - change folder in subschema",
|
||||
)(test)
|
||||
or skip(
|
||||
message="Upstream bug in strict_rfc3339",
|
||||
subject="format",
|
||||
description="case-insensitive T and Z",
|
||||
)(test)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
TestDraft6 = DRAFT6.to_unittest_testcase(
|
||||
DRAFT6.tests(),
|
||||
DRAFT6.optional_tests_of(name="bignum"),
|
||||
DRAFT6.optional_tests_of(name="format"),
|
||||
DRAFT6.optional_tests_of(name="zeroTerminatedFloats"),
|
||||
Validator=Draft6Validator,
|
||||
format_checker=draft6_format_checker,
|
||||
skip=lambda test: (
|
||||
narrow_unicode_build(test)
|
||||
or missing_format(draft6_format_checker)(test)
|
||||
or skip(
|
||||
message=bug(),
|
||||
subject="ref",
|
||||
case_description="Recursive references between schemas",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description="Location-independent identifier",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description=(
|
||||
"Location-independent identifier with absolute URI"
|
||||
),
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description=(
|
||||
"Location-independent identifier with base URI change in subschema"
|
||||
),
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(),
|
||||
subject="refRemote",
|
||||
case_description="base URI change - change folder in subschema",
|
||||
)(test)
|
||||
or skip(
|
||||
message="Upstream bug in strict_rfc3339",
|
||||
subject="format",
|
||||
description="case-insensitive T and Z",
|
||||
)(test)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
TestDraft7 = DRAFT7.to_unittest_testcase(
|
||||
DRAFT7.tests(),
|
||||
DRAFT7.format_tests(),
|
||||
DRAFT7.optional_tests_of(name="bignum"),
|
||||
DRAFT7.optional_tests_of(name="content"),
|
||||
DRAFT7.optional_tests_of(name="zeroTerminatedFloats"),
|
||||
Validator=Draft7Validator,
|
||||
format_checker=draft7_format_checker,
|
||||
skip=lambda test: (
|
||||
narrow_unicode_build(test)
|
||||
or missing_format(draft7_format_checker)(test)
|
||||
or skip(
|
||||
message=bug(),
|
||||
subject="ref",
|
||||
case_description="Recursive references between schemas",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description="Location-independent identifier",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description=(
|
||||
"Location-independent identifier with absolute URI"
|
||||
),
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(371),
|
||||
subject="ref",
|
||||
case_description=(
|
||||
"Location-independent identifier with base URI change in subschema"
|
||||
),
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(),
|
||||
subject="refRemote",
|
||||
case_description="base URI change - change folder in subschema",
|
||||
)(test)
|
||||
or skip(
|
||||
message="Upstream bug in strict_rfc3339",
|
||||
subject="date-time",
|
||||
description="case-insensitive T and Z",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(593),
|
||||
subject="content",
|
||||
case_description=(
|
||||
"validation of string-encoded content based on media type"
|
||||
),
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(593),
|
||||
subject="content",
|
||||
case_description="validation of binary string-encoding",
|
||||
)(test)
|
||||
or skip(
|
||||
message=bug(593),
|
||||
subject="content",
|
||||
case_description=(
|
||||
"validation of binary-encoded media type documents"
|
||||
),
|
||||
)(test)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
|
||||
TestDraft3LegacyTypeCheck = DRAFT3.to_unittest_testcase(
|
||||
# Interestingly the any part couldn't really be done w/the old API.
|
||||
(
|
||||
(test for test in each if test.schema != {"type": "any"})
|
||||
for each in DRAFT3.tests_of(name="type")
|
||||
),
|
||||
name="TestDraft3LegacyTypeCheck",
|
||||
Validator=create(
|
||||
meta_schema=Draft3Validator.META_SCHEMA,
|
||||
validators=Draft3Validator.VALIDATORS,
|
||||
default_types=_DEPRECATED_DEFAULT_TYPES,
|
||||
),
|
||||
)
|
||||
|
||||
TestDraft4LegacyTypeCheck = DRAFT4.to_unittest_testcase(
|
||||
DRAFT4.tests_of(name="type"),
|
||||
name="TestDraft4LegacyTypeCheck",
|
||||
Validator=create(
|
||||
meta_schema=Draft4Validator.META_SCHEMA,
|
||||
validators=Draft4Validator.VALIDATORS,
|
||||
default_types=_DEPRECATED_DEFAULT_TYPES,
|
||||
),
|
||||
)
|
@ -1,190 +0,0 @@
|
||||
"""
|
||||
Tests on the new type interface. The actual correctness of the type checking
|
||||
is handled in test_jsonschema_test_suite; these tests check that TypeChecker
|
||||
functions correctly and can facilitate extensions to type checking
|
||||
"""
|
||||
from collections import namedtuple
|
||||
from unittest import TestCase
|
||||
|
||||
from jsonschema import ValidationError, _validators
|
||||
from jsonschema._types import TypeChecker
|
||||
from jsonschema.exceptions import UndefinedTypeCheck
|
||||
from jsonschema.validators import Draft4Validator, extend
|
||||
|
||||
|
||||
def equals_2(checker, instance):
|
||||
return instance == 2
|
||||
|
||||
|
||||
def is_namedtuple(instance):
|
||||
return isinstance(instance, tuple) and getattr(instance, "_fields", None)
|
||||
|
||||
|
||||
def is_object_or_named_tuple(checker, instance):
|
||||
if Draft4Validator.TYPE_CHECKER.is_type(instance, "object"):
|
||||
return True
|
||||
return is_namedtuple(instance)
|
||||
|
||||
|
||||
def coerce_named_tuple(fn):
|
||||
def coerced(validator, value, instance, schema):
|
||||
if is_namedtuple(instance):
|
||||
instance = instance._asdict()
|
||||
return fn(validator, value, instance, schema)
|
||||
return coerced
|
||||
|
||||
|
||||
required = coerce_named_tuple(_validators.required)
|
||||
properties = coerce_named_tuple(_validators.properties)
|
||||
|
||||
|
||||
class TestTypeChecker(TestCase):
|
||||
def test_is_type(self):
|
||||
checker = TypeChecker({"two": equals_2})
|
||||
self.assertEqual(
|
||||
(
|
||||
checker.is_type(instance=2, type="two"),
|
||||
checker.is_type(instance="bar", type="two"),
|
||||
),
|
||||
(True, False),
|
||||
)
|
||||
|
||||
def test_is_unknown_type(self):
|
||||
with self.assertRaises(UndefinedTypeCheck) as context:
|
||||
TypeChecker().is_type(4, "foobar")
|
||||
self.assertIn("foobar", str(context.exception))
|
||||
|
||||
def test_checks_can_be_added_at_init(self):
|
||||
checker = TypeChecker({"two": equals_2})
|
||||
self.assertEqual(checker, TypeChecker().redefine("two", equals_2))
|
||||
|
||||
def test_redefine_existing_type(self):
|
||||
self.assertEqual(
|
||||
TypeChecker().redefine("two", object()).redefine("two", equals_2),
|
||||
TypeChecker().redefine("two", equals_2),
|
||||
)
|
||||
|
||||
def test_remove(self):
|
||||
self.assertEqual(
|
||||
TypeChecker({"two": equals_2}).remove("two"),
|
||||
TypeChecker(),
|
||||
)
|
||||
|
||||
def test_remove_unknown_type(self):
|
||||
with self.assertRaises(UndefinedTypeCheck) as context:
|
||||
TypeChecker().remove("foobar")
|
||||
self.assertIn("foobar", str(context.exception))
|
||||
|
||||
def test_redefine_many(self):
|
||||
self.assertEqual(
|
||||
TypeChecker().redefine_many({"foo": int, "bar": str}),
|
||||
TypeChecker().redefine("foo", int).redefine("bar", str),
|
||||
)
|
||||
|
||||
def test_remove_multiple(self):
|
||||
self.assertEqual(
|
||||
TypeChecker({"foo": int, "bar": str}).remove("foo", "bar"),
|
||||
TypeChecker(),
|
||||
)
|
||||
|
||||
def test_type_check_can_raise_key_error(self):
|
||||
"""
|
||||
Make sure no one writes:
|
||||
|
||||
try:
|
||||
self._type_checkers[type](...)
|
||||
except KeyError:
|
||||
|
||||
ignoring the fact that the function itself can raise that.
|
||||
"""
|
||||
|
||||
error = KeyError("Stuff")
|
||||
|
||||
def raises_keyerror(checker, instance):
|
||||
raise error
|
||||
|
||||
with self.assertRaises(KeyError) as context:
|
||||
TypeChecker({"foo": raises_keyerror}).is_type(4, "foo")
|
||||
|
||||
self.assertIs(context.exception, error)
|
||||
|
||||
|
||||
class TestCustomTypes(TestCase):
|
||||
def test_simple_type_can_be_extended(self):
|
||||
def int_or_str_int(checker, instance):
|
||||
if not isinstance(instance, (int, str)):
|
||||
return False
|
||||
try:
|
||||
int(instance)
|
||||
except ValueError:
|
||||
return False
|
||||
return True
|
||||
|
||||
CustomValidator = extend(
|
||||
Draft4Validator,
|
||||
type_checker=Draft4Validator.TYPE_CHECKER.redefine(
|
||||
"integer", int_or_str_int,
|
||||
),
|
||||
)
|
||||
validator = CustomValidator({"type": "integer"})
|
||||
|
||||
validator.validate(4)
|
||||
validator.validate("4")
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
validator.validate(4.4)
|
||||
|
||||
def test_object_can_be_extended(self):
|
||||
schema = {"type": "object"}
|
||||
|
||||
Point = namedtuple("Point", ["x", "y"])
|
||||
|
||||
type_checker = Draft4Validator.TYPE_CHECKER.redefine(
|
||||
u"object", is_object_or_named_tuple,
|
||||
)
|
||||
|
||||
CustomValidator = extend(Draft4Validator, type_checker=type_checker)
|
||||
validator = CustomValidator(schema)
|
||||
|
||||
validator.validate(Point(x=4, y=5))
|
||||
|
||||
def test_object_extensions_require_custom_validators(self):
|
||||
schema = {"type": "object", "required": ["x"]}
|
||||
|
||||
type_checker = Draft4Validator.TYPE_CHECKER.redefine(
|
||||
u"object", is_object_or_named_tuple,
|
||||
)
|
||||
|
||||
CustomValidator = extend(Draft4Validator, type_checker=type_checker)
|
||||
validator = CustomValidator(schema)
|
||||
|
||||
Point = namedtuple("Point", ["x", "y"])
|
||||
# Cannot handle required
|
||||
with self.assertRaises(ValidationError):
|
||||
validator.validate(Point(x=4, y=5))
|
||||
|
||||
def test_object_extensions_can_handle_custom_validators(self):
|
||||
schema = {
|
||||
"type": "object",
|
||||
"required": ["x"],
|
||||
"properties": {"x": {"type": "integer"}},
|
||||
}
|
||||
|
||||
type_checker = Draft4Validator.TYPE_CHECKER.redefine(
|
||||
u"object", is_object_or_named_tuple,
|
||||
)
|
||||
|
||||
CustomValidator = extend(
|
||||
Draft4Validator,
|
||||
type_checker=type_checker,
|
||||
validators={"required": required, "properties": properties},
|
||||
)
|
||||
|
||||
validator = CustomValidator(schema)
|
||||
|
||||
Point = namedtuple("Point", ["x", "y"])
|
||||
# Can now process required and properties
|
||||
validator.validate(Point(x=4, y=5))
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
validator.validate(Point(x="not an integer", y=5))
|
File diff suppressed because it is too large
Load Diff
@ -8,16 +8,16 @@
|
||||
import json
|
||||
import numbers
|
||||
|
||||
from six import add_metaclass
|
||||
from _vendoring.six import add_metaclass
|
||||
|
||||
from jsonschema import (
|
||||
from _vendoring.jsonschema import (
|
||||
_legacy_validators,
|
||||
_types,
|
||||
_utils,
|
||||
_validators,
|
||||
exceptions,
|
||||
)
|
||||
from jsonschema.compat import (
|
||||
from _vendoring.jsonschema.compat import (
|
||||
Sequence,
|
||||
int_types,
|
||||
iteritems,
|
||||
@ -33,7 +33,7 @@
|
||||
# Sigh. https://gitlab.com/pycqa/flake8/issues/280
|
||||
# https://github.com/pyga/ebb-lint/issues/7
|
||||
# Imported for backwards compatibility.
|
||||
from jsonschema.exceptions import ErrorTree
|
||||
from _vendoring.jsonschema.exceptions import ErrorTree
|
||||
ErrorTree
|
||||
|
||||
|
||||
|
@ -7,7 +7,7 @@
|
||||
import struct
|
||||
import sys
|
||||
|
||||
from macholib.util import fileview
|
||||
from _vendoring.macholib.util import fileview
|
||||
|
||||
from .mach_o import (
|
||||
FAT_MAGIC,
|
||||
@ -41,7 +41,7 @@
|
||||
from .ptypes import sizeof
|
||||
|
||||
try:
|
||||
from macholib.compat import bytes
|
||||
from _vendoring.macholib.compat import bytes
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
@ -5,11 +5,11 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from altgraph.ObjectGraph import ObjectGraph
|
||||
from _vendoring.altgraph.ObjectGraph import ObjectGraph
|
||||
|
||||
from macholib.dyld import dyld_find
|
||||
from macholib.itergraphreport import itergraphreport
|
||||
from macholib.MachO import MachO
|
||||
from _vendoring.macholib.dyld import dyld_find
|
||||
from _vendoring.macholib.itergraphreport import itergraphreport
|
||||
from _vendoring.macholib.MachO import MachO
|
||||
|
||||
__all__ = ["MachOGraph"]
|
||||
|
||||
|
@ -1,9 +1,9 @@
|
||||
import os
|
||||
from collections import deque
|
||||
|
||||
from macholib.dyld import framework_info
|
||||
from macholib.MachOGraph import MachOGraph, MissingMachO
|
||||
from macholib.util import (
|
||||
from _vendoring.macholib.dyld import framework_info
|
||||
from _vendoring.macholib.MachOGraph import MachOGraph, MissingMachO
|
||||
from _vendoring.macholib.util import (
|
||||
flipwritable,
|
||||
has_filename_filter,
|
||||
in_system_path,
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
import sys
|
||||
|
||||
from macholib.mach_o import (
|
||||
from _vendoring.macholib.mach_o import (
|
||||
MH_CIGAM_64,
|
||||
MH_MAGIC_64,
|
||||
dylib_module,
|
||||
|
@ -3,8 +3,8 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from macholib import macho_dump, macho_standalone
|
||||
from macholib.util import is_platform_file
|
||||
from _vendoring.macholib import macho_dump, macho_standalone
|
||||
from _vendoring.macholib.util import is_platform_file
|
||||
|
||||
gCommand = None
|
||||
|
||||
@ -43,10 +43,10 @@ def walk_tree(callback, paths):
|
||||
|
||||
def print_usage(fp):
|
||||
print("Usage:", file=fp)
|
||||
print(" python -mmacholib [help|--help]", file=fp)
|
||||
print(" python -mmacholib dump FILE ...", file=fp)
|
||||
print(" python -mmacholib find DIR ...", file=fp)
|
||||
print(" python -mmacholib standalone DIR ...", file=fp)
|
||||
print(" python -m_vendoring.macholib [help|--help]", file=fp)
|
||||
print(" python -m_vendoring.macholib dump FILE ...", file=fp)
|
||||
print(" python -m_vendoring.macholib find DIR ...", file=fp)
|
||||
print(" python -m_vendoring.macholib standalone DIR ...", file=fp)
|
||||
|
||||
|
||||
def main():
|
||||
|
@ -6,7 +6,7 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from macholib.util import is_platform_file
|
||||
from _vendoring.macholib.util import is_platform_file
|
||||
|
||||
|
||||
def check_file(fp, path, callback):
|
||||
|
@ -8,8 +8,8 @@
|
||||
import sys
|
||||
from itertools import chain
|
||||
|
||||
from macholib.dylib import dylib_info
|
||||
from macholib.framework import framework_info
|
||||
from _vendoring.macholib.dylib import dylib_info
|
||||
from _vendoring.macholib.framework import framework_info
|
||||
|
||||
__all__ = ["dyld_find", "framework_find", "framework_info", "dylib_info"]
|
||||
|
||||
|
@ -13,7 +13,7 @@
|
||||
|
||||
import time
|
||||
|
||||
from macholib.ptypes import (
|
||||
from _vendoring.macholib.ptypes import (
|
||||
Structure,
|
||||
p_int32,
|
||||
p_int64,
|
||||
|
@ -4,9 +4,9 @@
|
||||
|
||||
import sys
|
||||
|
||||
from macholib._cmdline import main as _main
|
||||
from macholib.mach_o import CPU_TYPE_NAMES, MH_CIGAM_64, MH_MAGIC_64, get_cpu_subtype
|
||||
from macholib.MachO import MachO
|
||||
from _vendoring.macholib._cmdline import main as _main
|
||||
from _vendoring.macholib.mach_o import CPU_TYPE_NAMES, MH_CIGAM_64, MH_MAGIC_64, get_cpu_subtype
|
||||
from _vendoring.macholib.MachO import MachO
|
||||
|
||||
ARCH_MAP = {
|
||||
("<", "64-bit"): "x86_64",
|
||||
@ -45,7 +45,7 @@ def print_file(fp, path):
|
||||
|
||||
def main():
|
||||
print(
|
||||
"WARNING: 'macho_dump' is deprecated, use 'python -mmacholib dump' " "instead"
|
||||
"WARNING: 'macho_dump' is deprecated, use 'python -m_vendoring.macholib dump' " "instead"
|
||||
)
|
||||
_main(print_file)
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import print_function
|
||||
|
||||
from macholib._cmdline import main as _main
|
||||
from _vendoring.macholib._cmdline import main as _main
|
||||
|
||||
|
||||
def print_file(fp, path):
|
||||
@ -10,7 +10,7 @@ def print_file(fp, path):
|
||||
|
||||
def main():
|
||||
print(
|
||||
"WARNING: 'macho_find' is deprecated, " "use 'python -mmacholib dump' instead"
|
||||
"WARNING: 'macho_find' is deprecated, " "use 'python -m_vendoring.macholib dump' instead"
|
||||
)
|
||||
_main(print_file)
|
||||
|
||||
|
@ -3,8 +3,8 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from macholib.MachOStandalone import MachOStandalone
|
||||
from macholib.util import strip_files
|
||||
from _vendoring.macholib.MachOStandalone import MachOStandalone
|
||||
from _vendoring.macholib.util import strip_files
|
||||
|
||||
|
||||
def standaloneApp(path):
|
||||
@ -18,7 +18,7 @@ def standaloneApp(path):
|
||||
def main():
|
||||
print(
|
||||
"WARNING: 'macho_standalone' is deprecated, use "
|
||||
"'python -mmacholib standalone' instead"
|
||||
"'python -m_vendoring.macholib standalone' instead"
|
||||
)
|
||||
if not sys.argv[1:]:
|
||||
raise SystemExit("usage: %s [appbundle ...]" % (sys.argv[0],))
|
||||
|
@ -4,7 +4,7 @@
|
||||
import struct
|
||||
import sys
|
||||
|
||||
from macholib import mach_o
|
||||
from _vendoring.macholib import mach_o
|
||||
|
||||
MAGIC = [
|
||||
struct.pack("!L", getattr(mach_o, "MH_" + _))
|
||||
|
@ -4,7 +4,7 @@
|
||||
import typing as t
|
||||
|
||||
if t.TYPE_CHECKING:
|
||||
import typing_extensions as te
|
||||
import _vendoring.typing_extensions as te
|
||||
|
||||
class HasHTML(te.Protocol):
|
||||
def __html__(self) -> str:
|
||||
|
@ -1,35 +1,35 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pyrsistent._pmap import pmap, m, PMap
|
||||
from _vendoring.pyrsistent._pmap import pmap, m, PMap
|
||||
|
||||
from pyrsistent._pvector import pvector, v, PVector
|
||||
from _vendoring.pyrsistent._pvector import pvector, v, PVector
|
||||
|
||||
from pyrsistent._pset import pset, s, PSet
|
||||
from _vendoring.pyrsistent._pset import pset, s, PSet
|
||||
|
||||
from pyrsistent._pbag import pbag, b, PBag
|
||||
from _vendoring.pyrsistent._pbag import pbag, b, PBag
|
||||
|
||||
from pyrsistent._plist import plist, l, PList
|
||||
from _vendoring.pyrsistent._plist import plist, l, PList
|
||||
|
||||
from pyrsistent._pdeque import pdeque, dq, PDeque
|
||||
from _vendoring.pyrsistent._pdeque import pdeque, dq, PDeque
|
||||
|
||||
from pyrsistent._checked_types import (
|
||||
from _vendoring.pyrsistent._checked_types import (
|
||||
CheckedPMap, CheckedPVector, CheckedPSet, InvariantException, CheckedKeyTypeError,
|
||||
CheckedValueTypeError, CheckedType, optional)
|
||||
|
||||
from pyrsistent._field_common import (
|
||||
from _vendoring.pyrsistent._field_common import (
|
||||
field, PTypeError, pset_field, pmap_field, pvector_field)
|
||||
|
||||
from pyrsistent._precord import PRecord
|
||||
from _vendoring.pyrsistent._precord import PRecord
|
||||
|
||||
from pyrsistent._pclass import PClass, PClassMeta
|
||||
from _vendoring.pyrsistent._pclass import PClass, PClassMeta
|
||||
|
||||
from pyrsistent._immutable import immutable
|
||||
from _vendoring.pyrsistent._immutable import immutable
|
||||
|
||||
from pyrsistent._helpers import freeze, thaw, mutant
|
||||
from _vendoring.pyrsistent._helpers import freeze, thaw, mutant
|
||||
|
||||
from pyrsistent._transformations import inc, discard, rex, ny
|
||||
from _vendoring.pyrsistent._transformations import inc, discard, rex, ny
|
||||
|
||||
from pyrsistent._toolz import get_in
|
||||
from _vendoring.pyrsistent._toolz import get_in
|
||||
|
||||
|
||||
__all__ = ('pmap', 'm', 'PMap',
|
||||
|
@ -3,9 +3,9 @@
|
||||
from abc import abstractmethod, ABCMeta
|
||||
from collections.abc import Iterable
|
||||
|
||||
from pyrsistent._pmap import PMap, pmap
|
||||
from pyrsistent._pset import PSet, pset
|
||||
from pyrsistent._pvector import PythonPVector, python_pvector
|
||||
from _vendoring.pyrsistent._pmap import PMap, pmap
|
||||
from _vendoring.pyrsistent._pset import PSet, pset
|
||||
from _vendoring.pyrsistent._pvector import PythonPVector, python_pvector
|
||||
|
||||
|
||||
class CheckedType(object):
|
||||
|
@ -1,6 +1,6 @@
|
||||
import sys
|
||||
|
||||
from pyrsistent._checked_types import (
|
||||
from _vendoring.pyrsistent._checked_types import (
|
||||
CheckedPMap,
|
||||
CheckedPSet,
|
||||
CheckedPVector,
|
||||
@ -11,8 +11,8 @@
|
||||
maybe_parse_user_type,
|
||||
maybe_parse_many_user_types,
|
||||
)
|
||||
from pyrsistent._checked_types import optional as optional_type
|
||||
from pyrsistent._checked_types import wrap_invariant
|
||||
from _vendoring.pyrsistent._checked_types import optional as optional_type
|
||||
from _vendoring.pyrsistent._checked_types import wrap_invariant
|
||||
import inspect
|
||||
|
||||
PY2 = sys.version_info[0] < 3
|
||||
|
@ -1,11 +1,11 @@
|
||||
from functools import wraps
|
||||
from pyrsistent._pmap import PMap, pmap
|
||||
from pyrsistent._pset import PSet, pset
|
||||
from pyrsistent._pvector import PVector, pvector
|
||||
from _vendoring.pyrsistent._pmap import PMap, pmap
|
||||
from _vendoring.pyrsistent._pset import PSet, pset
|
||||
from _vendoring.pyrsistent._pvector import PVector, pvector
|
||||
|
||||
def freeze(o, strict=True):
|
||||
"""
|
||||
Recursively convert simple Python containers into pyrsistent versions
|
||||
Recursively convert simple Python containers into _vendoring.pyrsistent versions
|
||||
of those containers.
|
||||
|
||||
- list is converted to pvector, recursively
|
||||
@ -47,7 +47,7 @@ def freeze(o, strict=True):
|
||||
|
||||
def thaw(o, strict=True):
|
||||
"""
|
||||
Recursively convert pyrsistent containers into simple Python containers.
|
||||
Recursively convert _vendoring.pyrsistent containers into simple Python containers.
|
||||
|
||||
- pvector is converted to list, recursively
|
||||
- pmap is converted to dict, recursively on values (but not keys)
|
||||
@ -59,7 +59,7 @@ def thaw(o, strict=True):
|
||||
- thaw is called on elements of lists
|
||||
- thaw is called on values in dicts
|
||||
|
||||
>>> from pyrsistent import s, m, v
|
||||
>>> from _vendoring.pyrsistent import s, m, v
|
||||
>>> thaw(s(1, 2))
|
||||
{1, 2}
|
||||
>>> thaw(v(1, m(a=3)))
|
||||
|
@ -94,7 +94,7 @@ def set(self, **kwargs):
|
||||
print(template)
|
||||
|
||||
from collections import namedtuple
|
||||
namespace = dict(namedtuple=namedtuple, __name__='pyrsistent_immutable')
|
||||
namespace = dict(namedtuple=namedtuple, __name__='_vendoring.pyrsistent_immutable')
|
||||
try:
|
||||
exec(template, namespace)
|
||||
except SyntaxError as e:
|
||||
|
@ -1,6 +1,6 @@
|
||||
from collections.abc import Container, Iterable, Sized, Hashable
|
||||
from functools import reduce
|
||||
from pyrsistent._pmap import pmap
|
||||
from _vendoring.pyrsistent._pmap import pmap
|
||||
|
||||
|
||||
def _add_to_counters(counters, element):
|
||||
|
@ -1,8 +1,8 @@
|
||||
from pyrsistent._checked_types import (InvariantException, CheckedType, _restore_pickle, store_invariants)
|
||||
from pyrsistent._field_common import (
|
||||
from _vendoring.pyrsistent._checked_types import (InvariantException, CheckedType, _restore_pickle, store_invariants)
|
||||
from _vendoring.pyrsistent._field_common import (
|
||||
set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants
|
||||
)
|
||||
from pyrsistent._transformations import transform
|
||||
from _vendoring.pyrsistent._transformations import transform
|
||||
|
||||
|
||||
def _is_pclass(bases):
|
||||
@ -41,7 +41,7 @@ class PClass(CheckedType, metaclass=PClassMeta):
|
||||
is not a PMap and hence not a collection but rather a plain Python object.
|
||||
|
||||
|
||||
More documentation and examples of PClass usage is available at https://github.com/tobgu/pyrsistent
|
||||
More documentation and examples of PClass usage is available at https://github.com/tobgu/_vendoring.pyrsistent
|
||||
"""
|
||||
def __new__(cls, **kwargs): # Support *args?
|
||||
result = super(PClass, cls).__new__(cls)
|
||||
@ -84,7 +84,7 @@ def set(self, *args, **kwargs):
|
||||
Set a field in the instance. Returns a new instance with the updated value. The original instance remains
|
||||
unmodified. Accepts key-value pairs or single string representing the field name and a value.
|
||||
|
||||
>>> from pyrsistent import PClass, field
|
||||
>>> from _vendoring.pyrsistent import PClass, field
|
||||
>>> class AClass(PClass):
|
||||
... x = field()
|
||||
...
|
||||
|
@ -1,7 +1,7 @@
|
||||
from collections.abc import Sequence, Hashable
|
||||
from itertools import islice, chain
|
||||
from numbers import Integral
|
||||
from pyrsistent._plist import plist
|
||||
from _vendoring.pyrsistent._plist import plist
|
||||
|
||||
|
||||
class PDeque(object):
|
||||
|
@ -1,7 +1,7 @@
|
||||
from collections.abc import Mapping, Hashable
|
||||
from itertools import chain
|
||||
from pyrsistent._pvector import pvector
|
||||
from pyrsistent._transformations import transform
|
||||
from _vendoring.pyrsistent._pvector import pvector
|
||||
from _vendoring.pyrsistent._transformations import transform
|
||||
|
||||
|
||||
class PMap(object):
|
||||
@ -256,7 +256,7 @@ def transform(self, *transformations):
|
||||
consists of two parts. One match expression that specifies which elements to transform
|
||||
and one transformation function that performs the actual transformation.
|
||||
|
||||
>>> from pyrsistent import freeze, ny
|
||||
>>> from _vendoring.pyrsistent import freeze, ny
|
||||
>>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
|
||||
... {'author': 'Steve', 'content': 'A slightly longer article'}],
|
||||
... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
|
||||
|
@ -1,8 +1,8 @@
|
||||
from pyrsistent._checked_types import CheckedType, _restore_pickle, InvariantException, store_invariants
|
||||
from pyrsistent._field_common import (
|
||||
from _vendoring.pyrsistent._checked_types import CheckedType, _restore_pickle, InvariantException, store_invariants
|
||||
from _vendoring.pyrsistent._field_common import (
|
||||
set_fields, check_type, is_field_ignore_extra_complaint, PFIELD_NO_INITIAL, serialize, check_global_invariants
|
||||
)
|
||||
from pyrsistent._pmap import PMap, pmap
|
||||
from _vendoring.pyrsistent._pmap import PMap, pmap
|
||||
|
||||
|
||||
class _PRecordMeta(type):
|
||||
@ -28,7 +28,7 @@ class PRecord(PMap, CheckedType, metaclass=_PRecordMeta):
|
||||
from PRecord. Because it is a PMap it has full support for all Mapping methods such as iteration and element
|
||||
access using subscript notation.
|
||||
|
||||
More documentation and examples of PRecord usage is available at https://github.com/tobgu/pyrsistent
|
||||
More documentation and examples of PRecord usage is available at https://github.com/tobgu/_vendoring.pyrsistent
|
||||
"""
|
||||
def __new__(cls, **kwargs):
|
||||
# Hack total! If these two special attributes exist that means we can create
|
||||
|
@ -1,6 +1,6 @@
|
||||
from collections.abc import Set, Hashable
|
||||
import sys
|
||||
from pyrsistent._pmap import pmap
|
||||
from _vendoring.pyrsistent._pmap import pmap
|
||||
|
||||
|
||||
class PSet(object):
|
||||
|
@ -2,7 +2,7 @@
|
||||
from collections.abc import Sequence, Hashable
|
||||
from numbers import Integral
|
||||
import operator
|
||||
from pyrsistent._transformations import transform
|
||||
from _vendoring.pyrsistent._transformations import transform
|
||||
|
||||
|
||||
def _bitcount(val):
|
||||
@ -626,7 +626,7 @@ def transform(self, *transformations):
|
||||
consists of two parts. One match expression that specifies which elements to transform
|
||||
and one transformation function that performs the actual transformation.
|
||||
|
||||
>>> from pyrsistent import freeze, ny
|
||||
>>> from _vendoring.pyrsistent import freeze, ny
|
||||
>>> news_paper = freeze({'articles': [{'author': 'Sara', 'content': 'A short article'},
|
||||
... {'author': 'Steve', 'content': 'A slightly longer article'}],
|
||||
... 'weather': {'temperature': '11C', 'wind': '5m/s'}})
|
||||
|
@ -56,7 +56,7 @@ def get_in(keys, coll, default=None, no_default=False):
|
||||
|
||||
``get_in`` is a generalization of ``operator.getitem`` for nested data
|
||||
structures such as dictionaries and lists.
|
||||
>>> from pyrsistent import freeze
|
||||
>>> from _vendoring.pyrsistent import freeze
|
||||
>>> transaction = freeze({'name': 'Alice',
|
||||
... 'purchase': {'items': ['Apple', 'Orange'],
|
||||
... 'costs': [0.50, 1.25]},
|
||||
|
@ -117,7 +117,7 @@ def _get_arity(f):
|
||||
|
||||
|
||||
def _update_structure(structure, kvs, path, command):
|
||||
from pyrsistent._pmap import pmap
|
||||
from _vendoring.pyrsistent._pmap import pmap
|
||||
e = structure.evolver()
|
||||
if not path and command is discard:
|
||||
# Do this in reverse to avoid index problems with vectors. See #92.
|
||||
|
@ -5,8 +5,8 @@
|
||||
|
||||
For example,
|
||||
|
||||
from pyrsistent import pvector
|
||||
from pyrsistent.typing import PVector
|
||||
from _vendoring.pyrsistent import pvector
|
||||
from _vendoring.pyrsistent.typing import PVector
|
||||
|
||||
myvector: PVector[str] = pvector(['a', 'b', 'c'])
|
||||
|
||||
|
@ -4,18 +4,18 @@
|
||||
from typing import Dict, Any # NOQA
|
||||
|
||||
_package_data = dict(
|
||||
full_package_name='ruamel.yaml',
|
||||
full_package_name='_vendoring.ruamel.yaml',
|
||||
version_info=(0, 17, 21),
|
||||
__version__='0.17.21',
|
||||
version_timestamp='2022-02-12 09:49:22',
|
||||
author='Anthon van der Neut',
|
||||
author_email='a.van.der.neut@ruamel.eu',
|
||||
description='ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order', # NOQA
|
||||
description='_vendoring.ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order', # NOQA
|
||||
entry_points=None,
|
||||
since=2014,
|
||||
extras_require={
|
||||
':platform_python_implementation=="CPython" and python_version<"3.11"': ['ruamel.yaml.clib>=0.2.6'], # NOQA
|
||||
'jinja2': ['ruamel.yaml.jinja2>=0.2'],
|
||||
':platform_python_implementation=="CPython" and python_version<"3.11"': ['_vendoring.ruamel.yaml.clib>=0.2.6'], # NOQA
|
||||
'_vendoring.jinja2': ['_vendoring.ruamel.yaml._vendoring.jinja2>=0.2'],
|
||||
'docs': ['ryd'],
|
||||
},
|
||||
classifiers=[
|
||||
@ -54,4 +54,4 @@
|
||||
except (ImportError, ValueError): # for Jython
|
||||
__with_libyaml__ = False
|
||||
|
||||
from ruamel.yaml.main import * # NOQA
|
||||
from _vendoring.ruamel.yaml.main import * # NOQA
|
||||
|
@ -10,10 +10,10 @@
|
||||
import copy
|
||||
|
||||
|
||||
from ruamel.yaml.compat import ordereddict
|
||||
from ruamel.yaml.compat import MutableSliceableSequence, _F, nprintf # NOQA
|
||||
from ruamel.yaml.scalarstring import ScalarString
|
||||
from ruamel.yaml.anchor import Anchor
|
||||
from _vendoring.ruamel.yaml.compat import ordereddict
|
||||
from _vendoring.ruamel.yaml.compat import MutableSliceableSequence, _F, nprintf # NOQA
|
||||
from _vendoring.ruamel.yaml.scalarstring import ScalarString
|
||||
from _vendoring.ruamel.yaml.anchor import Anchor
|
||||
|
||||
from collections.abc import MutableSet, Sized, Set, Mapping
|
||||
|
||||
@ -375,8 +375,8 @@ def yaml_set_comment_before_after_key(
|
||||
"""
|
||||
expects comment (before/after) to be without `#` and possible have multiple lines
|
||||
"""
|
||||
from ruamel.yaml.error import CommentMark
|
||||
from ruamel.yaml.tokens import CommentToken
|
||||
from _vendoring.ruamel.yaml.error import CommentMark
|
||||
from _vendoring.ruamel.yaml.tokens import CommentToken
|
||||
|
||||
def comment_token(s, mark):
|
||||
# type: (Any, Any) -> Any
|
||||
|
@ -169,7 +169,7 @@ def fp(self, mode='a'):
|
||||
|
||||
|
||||
nprint = Nprint()
|
||||
nprintf = Nprint('/var/tmp/ruamel.yaml.log')
|
||||
nprintf = Nprint('/var/tmp/_vendoring.ruamel.yaml.log')
|
||||
|
||||
# char checkers following production rules
|
||||
|
||||
@ -197,9 +197,9 @@ def check_anchorname_char(ch):
|
||||
def version_tnf(t1, t2=None):
|
||||
# type: (Any, Any) -> Any
|
||||
"""
|
||||
return True if ruamel.yaml version_info < t1, None if t2 is specified and bigger else False
|
||||
return True if _vendoring.ruamel.yaml version_info < t1, None if t2 is specified and bigger else False
|
||||
"""
|
||||
from ruamel.yaml import version_info # NOQA
|
||||
from _vendoring.ruamel.yaml import version_info # NOQA
|
||||
|
||||
if version_info < t1:
|
||||
return True
|
||||
|
@ -2,10 +2,10 @@
|
||||
|
||||
import warnings
|
||||
|
||||
from ruamel.yaml.error import MarkedYAMLError, ReusedAnchorWarning
|
||||
from ruamel.yaml.compat import _F, nprint, nprintf # NOQA
|
||||
from _vendoring.ruamel.yaml.error import MarkedYAMLError, ReusedAnchorWarning
|
||||
from _vendoring.ruamel.yaml.compat import _F, nprint, nprintf # NOQA
|
||||
|
||||
from ruamel.yaml.events import (
|
||||
from _vendoring.ruamel.yaml.events import (
|
||||
StreamStartEvent,
|
||||
StreamEndEvent,
|
||||
MappingStartEvent,
|
||||
@ -15,7 +15,7 @@
|
||||
AliasEvent,
|
||||
ScalarEvent,
|
||||
)
|
||||
from ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode
|
||||
from _vendoring.ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List # NOQA
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
import warnings
|
||||
|
||||
from ruamel.yaml.util import configobj_walker as new_configobj_walker
|
||||
from _vendoring.ruamel.yaml.util import configobj_walker as new_configobj_walker
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any # NOQA
|
||||
@ -10,5 +10,5 @@
|
||||
|
||||
def configobj_walker(cfg):
|
||||
# type: (Any) -> Any
|
||||
warnings.warn('configobj_walker has moved to ruamel.yaml.util, please update your code')
|
||||
warnings.warn('configobj_walker has moved to _vendoring.ruamel.yaml.util, please update your code')
|
||||
return new_configobj_walker(cfg)
|
||||
|
@ -9,29 +9,29 @@
|
||||
from collections.abc import Hashable, MutableSequence, MutableMapping
|
||||
|
||||
# fmt: off
|
||||
from ruamel.yaml.error import (MarkedYAMLError, MarkedYAMLFutureWarning,
|
||||
from _vendoring.ruamel.yaml.error import (MarkedYAMLError, MarkedYAMLFutureWarning,
|
||||
MantissaNoDotYAML1_1Warning)
|
||||
from ruamel.yaml.nodes import * # NOQA
|
||||
from ruamel.yaml.nodes import (SequenceNode, MappingNode, ScalarNode)
|
||||
from ruamel.yaml.compat import (_F, builtins_module, # NOQA
|
||||
from _vendoring.ruamel.yaml.nodes import * # NOQA
|
||||
from _vendoring.ruamel.yaml.nodes import (SequenceNode, MappingNode, ScalarNode)
|
||||
from _vendoring.ruamel.yaml.compat import (_F, builtins_module, # NOQA
|
||||
nprint, nprintf, version_tnf)
|
||||
from ruamel.yaml.compat import ordereddict
|
||||
from _vendoring.ruamel.yaml.compat import ordereddict
|
||||
|
||||
from ruamel.yaml.comments import * # NOQA
|
||||
from ruamel.yaml.comments import (CommentedMap, CommentedOrderedMap, CommentedSet,
|
||||
from _vendoring.ruamel.yaml.comments import * # NOQA
|
||||
from _vendoring.ruamel.yaml.comments import (CommentedMap, CommentedOrderedMap, CommentedSet,
|
||||
CommentedKeySeq, CommentedSeq, TaggedScalar,
|
||||
CommentedKeyMap,
|
||||
C_KEY_PRE, C_KEY_EOL, C_KEY_POST,
|
||||
C_VALUE_PRE, C_VALUE_EOL, C_VALUE_POST,
|
||||
)
|
||||
from ruamel.yaml.scalarstring import (SingleQuotedScalarString, DoubleQuotedScalarString,
|
||||
from _vendoring.ruamel.yaml.scalarstring import (SingleQuotedScalarString, DoubleQuotedScalarString,
|
||||
LiteralScalarString, FoldedScalarString,
|
||||
PlainScalarString, ScalarString,)
|
||||
from ruamel.yaml.scalarint import ScalarInt, BinaryInt, OctalInt, HexInt, HexCapsInt
|
||||
from ruamel.yaml.scalarfloat import ScalarFloat
|
||||
from ruamel.yaml.scalarbool import ScalarBoolean
|
||||
from ruamel.yaml.timestamp import TimeStamp
|
||||
from ruamel.yaml.util import timestamp_regexp, create_timestamp
|
||||
from _vendoring.ruamel.yaml.scalarint import ScalarInt, BinaryInt, OctalInt, HexInt, HexCapsInt
|
||||
from _vendoring.ruamel.yaml.scalarfloat import ScalarFloat
|
||||
from _vendoring.ruamel.yaml.scalarbool import ScalarBoolean
|
||||
from _vendoring.ruamel.yaml.timestamp import TimeStamp
|
||||
from _vendoring.ruamel.yaml.util import timestamp_regexp, create_timestamp
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, List, Set, Generator, Union, Optional # NOQA
|
||||
@ -1282,7 +1282,7 @@ def construct_rt_sequence(self, node, seqtyp, deep=False):
|
||||
if node.comment:
|
||||
nprintf('nc3', node.comment)
|
||||
if node.anchor:
|
||||
from ruamel.yaml.serializer import templated_id
|
||||
from _vendoring.ruamel.yaml.serializer import templated_id
|
||||
|
||||
if not templated_id(node.anchor):
|
||||
seqtyp.yaml_set_anchor(node.anchor)
|
||||
@ -1418,7 +1418,7 @@ def construct_mapping(self, node, maptyp, deep=False): # type: ignore
|
||||
for cmnt in self.comments(node.comment, 0):
|
||||
maptyp.ca.pre.append(cmnt)
|
||||
if node.anchor:
|
||||
from ruamel.yaml.serializer import templated_id
|
||||
from _vendoring.ruamel.yaml.serializer import templated_id
|
||||
|
||||
if not templated_id(node.anchor):
|
||||
maptyp.yaml_set_anchor(node.anchor)
|
||||
@ -1517,7 +1517,7 @@ def construct_setting(self, node, typ, deep=False):
|
||||
if node.comment:
|
||||
nprintf('nc6', node.comment)
|
||||
if node.anchor:
|
||||
from ruamel.yaml.serializer import templated_id
|
||||
from _vendoring.ruamel.yaml.serializer import templated_id
|
||||
|
||||
if not templated_id(node.anchor):
|
||||
typ.yaml_set_anchor(node.anchor)
|
||||
@ -1592,8 +1592,8 @@ def construct_yaml_object(self, node, cls):
|
||||
else:
|
||||
data.__dict__.update(state)
|
||||
if node.anchor:
|
||||
from ruamel.yaml.serializer import templated_id
|
||||
from ruamel.yaml.anchor import Anchor
|
||||
from _vendoring.ruamel.yaml.serializer import templated_id
|
||||
from _vendoring.ruamel.yaml.anchor import Anchor
|
||||
|
||||
if not templated_id(node.anchor):
|
||||
if not hasattr(data, Anchor.attrib):
|
||||
@ -1691,7 +1691,7 @@ def construct_undefined(self, node):
|
||||
data.yaml_set_tag(node.tag)
|
||||
yield data
|
||||
if node.anchor:
|
||||
from ruamel.yaml.serializer import templated_id
|
||||
from _vendoring.ruamel.yaml.serializer import templated_id
|
||||
|
||||
if not templated_id(node.anchor):
|
||||
data.yaml_set_anchor(node.anchor)
|
||||
@ -1704,7 +1704,7 @@ def construct_undefined(self, node):
|
||||
data2.yaml_set_tag(node.tag)
|
||||
yield data2
|
||||
if node.anchor:
|
||||
from ruamel.yaml.serializer import templated_id
|
||||
from _vendoring.ruamel.yaml.serializer import templated_id
|
||||
|
||||
if not templated_id(node.anchor):
|
||||
data2.yaml_set_anchor(node.anchor, always_dump=True)
|
||||
@ -1719,7 +1719,7 @@ def construct_undefined(self, node):
|
||||
data3.yaml_set_tag(node.tag)
|
||||
yield data3
|
||||
if node.anchor:
|
||||
from ruamel.yaml.serializer import templated_id
|
||||
from _vendoring.ruamel.yaml.serializer import templated_id
|
||||
|
||||
if not templated_id(node.anchor):
|
||||
data3.yaml_set_anchor(node.anchor)
|
||||
|
@ -1,14 +1,14 @@
|
||||
# coding: utf-8
|
||||
|
||||
from _ruamel_yaml import CParser, CEmitter # type: ignore
|
||||
from __vendoring.ruamel.yaml import CParser, CEmitter # type: ignore
|
||||
|
||||
from ruamel.yaml.constructor import Constructor, BaseConstructor, SafeConstructor
|
||||
from ruamel.yaml.representer import Representer, SafeRepresenter, BaseRepresenter
|
||||
from ruamel.yaml.resolver import Resolver, BaseResolver
|
||||
from _vendoring.ruamel.yaml.constructor import Constructor, BaseConstructor, SafeConstructor
|
||||
from _vendoring.ruamel.yaml.representer import Representer, SafeRepresenter, BaseRepresenter
|
||||
from _vendoring.ruamel.yaml.resolver import Resolver, BaseResolver
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Union, Optional # NOQA
|
||||
from ruamel.yaml.compat import StreamTextType, StreamType, VersionType # NOQA
|
||||
from _vendoring.ruamel.yaml.compat import StreamTextType, StreamType, VersionType # NOQA
|
||||
|
||||
__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', 'CBaseDumper', 'CSafeDumper', 'CDumper']
|
||||
|
||||
|
@ -1,18 +1,18 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.emitter import Emitter
|
||||
from ruamel.yaml.serializer import Serializer
|
||||
from ruamel.yaml.representer import (
|
||||
from _vendoring.ruamel.yaml.emitter import Emitter
|
||||
from _vendoring.ruamel.yaml.serializer import Serializer
|
||||
from _vendoring.ruamel.yaml.representer import (
|
||||
Representer,
|
||||
SafeRepresenter,
|
||||
BaseRepresenter,
|
||||
RoundTripRepresenter,
|
||||
)
|
||||
from ruamel.yaml.resolver import Resolver, BaseResolver, VersionedResolver
|
||||
from _vendoring.ruamel.yaml.resolver import Resolver, BaseResolver, VersionedResolver
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, List, Union, Optional # NOQA
|
||||
from ruamel.yaml.compat import StreamType, VersionType # NOQA
|
||||
from _vendoring.ruamel.yaml.compat import StreamType, VersionType # NOQA
|
||||
|
||||
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper', 'RoundTripDumper']
|
||||
|
||||
|
@ -8,17 +8,17 @@
|
||||
# mapping ::= MAPPING-START (node node)* MAPPING-END
|
||||
|
||||
import sys
|
||||
from ruamel.yaml.error import YAMLError, YAMLStreamError
|
||||
from ruamel.yaml.events import * # NOQA
|
||||
from _vendoring.ruamel.yaml.error import YAMLError, YAMLStreamError
|
||||
from _vendoring.ruamel.yaml.events import * # NOQA
|
||||
|
||||
# fmt: off
|
||||
from ruamel.yaml.compat import _F, nprint, dbg, DBG_EVENT, \
|
||||
from _vendoring.ruamel.yaml.compat import _F, nprint, dbg, DBG_EVENT, \
|
||||
check_anchorname_char, nprintf # NOQA
|
||||
# fmt: on
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, List, Union, Text, Tuple, Optional # NOQA
|
||||
from ruamel.yaml.compat import StreamType # NOQA
|
||||
from _vendoring.ruamel.yaml.compat import StreamType # NOQA
|
||||
|
||||
__all__ = ['Emitter', 'EmitterError']
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
import warnings
|
||||
import textwrap
|
||||
|
||||
from ruamel.yaml.compat import _F
|
||||
from _vendoring.ruamel.yaml.compat import _F
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List, Text # NOQA
|
||||
@ -240,11 +240,11 @@ class ReusedAnchorWarning(YAMLWarning):
|
||||
class UnsafeLoaderWarning(YAMLWarning):
|
||||
text = """
|
||||
The default 'Loader' for 'load(stream)' without further arguments can be unsafe.
|
||||
Use 'load(stream, Loader=ruamel.yaml.Loader)' explicitly if that is OK.
|
||||
Use 'load(stream, Loader=_vendoring.ruamel.yaml.Loader)' explicitly if that is OK.
|
||||
Alternatively include the following in your code:
|
||||
|
||||
import warnings
|
||||
warnings.simplefilter('ignore', ruamel.yaml.error.UnsafeLoaderWarning)
|
||||
warnings.simplefilter('ignore', _vendoring.ruamel.yaml.error.UnsafeLoaderWarning)
|
||||
|
||||
In most other cases you should consider using 'safe_load(stream)'"""
|
||||
pass
|
||||
@ -273,7 +273,7 @@ def __str__(self):
|
||||
or alternatively include the following in your code:
|
||||
|
||||
import warnings
|
||||
warnings.simplefilter('ignore', ruamel.yaml.error.MantissaNoDotYAML1_1Warning)
|
||||
warnings.simplefilter('ignore', _vendoring.ruamel.yaml.error.MantissaNoDotYAML1_1Warning)
|
||||
|
||||
""".format(
|
||||
self.flt, line, col
|
||||
|
@ -1,6 +1,6 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.compat import _F
|
||||
from _vendoring.ruamel.yaml.compat import _F
|
||||
|
||||
# Abstract classes.
|
||||
|
||||
|
@ -1,20 +1,20 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.reader import Reader
|
||||
from ruamel.yaml.scanner import Scanner, RoundTripScanner
|
||||
from ruamel.yaml.parser import Parser, RoundTripParser
|
||||
from ruamel.yaml.composer import Composer
|
||||
from ruamel.yaml.constructor import (
|
||||
from _vendoring.ruamel.yaml.reader import Reader
|
||||
from _vendoring.ruamel.yaml.scanner import Scanner, RoundTripScanner
|
||||
from _vendoring.ruamel.yaml.parser import Parser, RoundTripParser
|
||||
from _vendoring.ruamel.yaml.composer import Composer
|
||||
from _vendoring.ruamel.yaml.constructor import (
|
||||
BaseConstructor,
|
||||
SafeConstructor,
|
||||
Constructor,
|
||||
RoundTripConstructor,
|
||||
)
|
||||
from ruamel.yaml.resolver import VersionedResolver
|
||||
from _vendoring.ruamel.yaml.resolver import VersionedResolver
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, List, Union, Optional # NOQA
|
||||
from ruamel.yaml.compat import StreamTextType, VersionType # NOQA
|
||||
from _vendoring.ruamel.yaml.compat import StreamTextType, VersionType # NOQA
|
||||
|
||||
__all__ = ['BaseLoader', 'SafeLoader', 'Loader', 'RoundTripLoader']
|
||||
|
||||
|
128
lib/spack/external/_vendoring/ruamel/yaml/main.py
vendored
128
lib/spack/external/_vendoring/ruamel/yaml/main.py
vendored
@ -7,39 +7,39 @@
|
||||
from importlib import import_module
|
||||
|
||||
|
||||
import ruamel.yaml
|
||||
from ruamel.yaml.error import UnsafeLoaderWarning, YAMLError # NOQA
|
||||
import _vendoring.ruamel.yaml
|
||||
from _vendoring.ruamel.yaml.error import UnsafeLoaderWarning, YAMLError # NOQA
|
||||
|
||||
from ruamel.yaml.tokens import * # NOQA
|
||||
from ruamel.yaml.events import * # NOQA
|
||||
from ruamel.yaml.nodes import * # NOQA
|
||||
from _vendoring.ruamel.yaml.tokens import * # NOQA
|
||||
from _vendoring.ruamel.yaml.events import * # NOQA
|
||||
from _vendoring.ruamel.yaml.nodes import * # NOQA
|
||||
|
||||
from ruamel.yaml.loader import BaseLoader, SafeLoader, Loader, RoundTripLoader # NOQA
|
||||
from ruamel.yaml.dumper import BaseDumper, SafeDumper, Dumper, RoundTripDumper # NOQA
|
||||
from ruamel.yaml.compat import StringIO, BytesIO, with_metaclass, nprint, nprintf # NOQA
|
||||
from ruamel.yaml.resolver import VersionedResolver, Resolver # NOQA
|
||||
from ruamel.yaml.representer import (
|
||||
from _vendoring.ruamel.yaml.loader import BaseLoader, SafeLoader, Loader, RoundTripLoader # NOQA
|
||||
from _vendoring.ruamel.yaml.dumper import BaseDumper, SafeDumper, Dumper, RoundTripDumper # NOQA
|
||||
from _vendoring.ruamel.yaml.compat import StringIO, BytesIO, with_metaclass, nprint, nprintf # NOQA
|
||||
from _vendoring.ruamel.yaml.resolver import VersionedResolver, Resolver # NOQA
|
||||
from _vendoring.ruamel.yaml.representer import (
|
||||
BaseRepresenter,
|
||||
SafeRepresenter,
|
||||
Representer,
|
||||
RoundTripRepresenter,
|
||||
)
|
||||
from ruamel.yaml.constructor import (
|
||||
from _vendoring.ruamel.yaml.constructor import (
|
||||
BaseConstructor,
|
||||
SafeConstructor,
|
||||
Constructor,
|
||||
RoundTripConstructor,
|
||||
)
|
||||
from ruamel.yaml.loader import Loader as UnsafeLoader
|
||||
from ruamel.yaml.comments import CommentedMap, CommentedSeq, C_PRE
|
||||
from _vendoring.ruamel.yaml.loader import Loader as UnsafeLoader
|
||||
from _vendoring.ruamel.yaml.comments import CommentedMap, CommentedSeq, C_PRE
|
||||
|
||||
if False: # MYPY
|
||||
from typing import List, Set, Dict, Union, Any, Callable, Optional, Text # NOQA
|
||||
from ruamel.yaml.compat import StreamType, StreamTextType, VersionType # NOQA
|
||||
from _vendoring.ruamel.yaml.compat import StreamType, StreamTextType, VersionType # NOQA
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
from _ruamel_yaml import CParser, CEmitter # type: ignore
|
||||
from __vendoring.ruamel.yaml import CParser, CEmitter # type: ignore
|
||||
except: # NOQA
|
||||
CParser = CEmitter = None
|
||||
|
||||
@ -74,7 +74,7 @@ def __init__(self, *, typ=None, pure=False, output=None, plug_ins=None): # inpu
|
||||
for pu in ([] if plug_ins is None else plug_ins) + self.official_plug_ins():
|
||||
file_name = pu.replace(os.sep, '.')
|
||||
self.plug_ins.append(import_module(file_name))
|
||||
self.Resolver = ruamel.yaml.resolver.VersionedResolver # type: Any
|
||||
self.Resolver = _vendoring.ruamel.yaml.resolver.VersionedResolver # type: Any
|
||||
self.allow_unicode = True
|
||||
self.Reader = None # type: Any
|
||||
self.Representer = None # type: Any
|
||||
@ -89,37 +89,37 @@ def __init__(self, *, typ=None, pure=False, output=None, plug_ins=None): # inpu
|
||||
setup_rt = True
|
||||
elif 'safe' in self.typ:
|
||||
self.Emitter = (
|
||||
ruamel.yaml.emitter.Emitter if pure or CEmitter is None else CEmitter
|
||||
_vendoring.ruamel.yaml.emitter.Emitter if pure or CEmitter is None else CEmitter
|
||||
)
|
||||
self.Representer = ruamel.yaml.representer.SafeRepresenter
|
||||
self.Parser = ruamel.yaml.parser.Parser if pure or CParser is None else CParser
|
||||
self.Composer = ruamel.yaml.composer.Composer
|
||||
self.Constructor = ruamel.yaml.constructor.SafeConstructor
|
||||
self.Representer = _vendoring.ruamel.yaml.representer.SafeRepresenter
|
||||
self.Parser = _vendoring.ruamel.yaml.parser.Parser if pure or CParser is None else CParser
|
||||
self.Composer = _vendoring.ruamel.yaml.composer.Composer
|
||||
self.Constructor = _vendoring.ruamel.yaml.constructor.SafeConstructor
|
||||
elif 'base' in self.typ:
|
||||
self.Emitter = ruamel.yaml.emitter.Emitter
|
||||
self.Representer = ruamel.yaml.representer.BaseRepresenter
|
||||
self.Parser = ruamel.yaml.parser.Parser if pure or CParser is None else CParser
|
||||
self.Composer = ruamel.yaml.composer.Composer
|
||||
self.Constructor = ruamel.yaml.constructor.BaseConstructor
|
||||
self.Emitter = _vendoring.ruamel.yaml.emitter.Emitter
|
||||
self.Representer = _vendoring.ruamel.yaml.representer.BaseRepresenter
|
||||
self.Parser = _vendoring.ruamel.yaml.parser.Parser if pure or CParser is None else CParser
|
||||
self.Composer = _vendoring.ruamel.yaml.composer.Composer
|
||||
self.Constructor = _vendoring.ruamel.yaml.constructor.BaseConstructor
|
||||
elif 'unsafe' in self.typ:
|
||||
self.Emitter = (
|
||||
ruamel.yaml.emitter.Emitter if pure or CEmitter is None else CEmitter
|
||||
_vendoring.ruamel.yaml.emitter.Emitter if pure or CEmitter is None else CEmitter
|
||||
)
|
||||
self.Representer = ruamel.yaml.representer.Representer
|
||||
self.Parser = ruamel.yaml.parser.Parser if pure or CParser is None else CParser
|
||||
self.Composer = ruamel.yaml.composer.Composer
|
||||
self.Constructor = ruamel.yaml.constructor.Constructor
|
||||
self.Representer = _vendoring.ruamel.yaml.representer.Representer
|
||||
self.Parser = _vendoring.ruamel.yaml.parser.Parser if pure or CParser is None else CParser
|
||||
self.Composer = _vendoring.ruamel.yaml.composer.Composer
|
||||
self.Constructor = _vendoring.ruamel.yaml.constructor.Constructor
|
||||
elif 'rtsc' in self.typ:
|
||||
self.default_flow_style = False
|
||||
# no optimized rt-dumper yet
|
||||
self.Emitter = ruamel.yaml.emitter.Emitter
|
||||
self.Serializer = ruamel.yaml.serializer.Serializer
|
||||
self.Representer = ruamel.yaml.representer.RoundTripRepresenter
|
||||
self.Scanner = ruamel.yaml.scanner.RoundTripScannerSC
|
||||
self.Emitter = _vendoring.ruamel.yaml.emitter.Emitter
|
||||
self.Serializer = _vendoring.ruamel.yaml.serializer.Serializer
|
||||
self.Representer = _vendoring.ruamel.yaml.representer.RoundTripRepresenter
|
||||
self.Scanner = _vendoring.ruamel.yaml.scanner.RoundTripScannerSC
|
||||
# no optimized rt-parser yet
|
||||
self.Parser = ruamel.yaml.parser.RoundTripParserSC
|
||||
self.Composer = ruamel.yaml.composer.Composer
|
||||
self.Constructor = ruamel.yaml.constructor.RoundTripConstructor
|
||||
self.Parser = _vendoring.ruamel.yaml.parser.RoundTripParserSC
|
||||
self.Composer = _vendoring.ruamel.yaml.composer.Composer
|
||||
self.Constructor = _vendoring.ruamel.yaml.constructor.RoundTripConstructor
|
||||
self.comment_handling = C_PRE
|
||||
else:
|
||||
setup_rt = True
|
||||
@ -127,14 +127,14 @@ def __init__(self, *, typ=None, pure=False, output=None, plug_ins=None): # inpu
|
||||
if setup_rt:
|
||||
self.default_flow_style = False
|
||||
# no optimized rt-dumper yet
|
||||
self.Emitter = ruamel.yaml.emitter.Emitter
|
||||
self.Serializer = ruamel.yaml.serializer.Serializer
|
||||
self.Representer = ruamel.yaml.representer.RoundTripRepresenter
|
||||
self.Scanner = ruamel.yaml.scanner.RoundTripScanner
|
||||
self.Emitter = _vendoring.ruamel.yaml.emitter.Emitter
|
||||
self.Serializer = _vendoring.ruamel.yaml.serializer.Serializer
|
||||
self.Representer = _vendoring.ruamel.yaml.representer.RoundTripRepresenter
|
||||
self.Scanner = _vendoring.ruamel.yaml.scanner.RoundTripScanner
|
||||
# no optimized rt-parser yet
|
||||
self.Parser = ruamel.yaml.parser.RoundTripParser
|
||||
self.Composer = ruamel.yaml.composer.Composer
|
||||
self.Constructor = ruamel.yaml.constructor.RoundTripConstructor
|
||||
self.Parser = _vendoring.ruamel.yaml.parser.RoundTripParser
|
||||
self.Composer = _vendoring.ruamel.yaml.composer.Composer
|
||||
self.Constructor = _vendoring.ruamel.yaml.constructor.RoundTripConstructor
|
||||
del setup_rt
|
||||
self.stream = None
|
||||
self.canonical = None
|
||||
@ -477,20 +477,20 @@ def get_constructor_parser(self, stream):
|
||||
"""
|
||||
if self.Parser is not CParser:
|
||||
if self.Reader is None:
|
||||
self.Reader = ruamel.yaml.reader.Reader
|
||||
self.Reader = _vendoring.ruamel.yaml.reader.Reader
|
||||
if self.Scanner is None:
|
||||
self.Scanner = ruamel.yaml.scanner.Scanner
|
||||
self.Scanner = _vendoring.ruamel.yaml.scanner.Scanner
|
||||
self.reader.stream = stream
|
||||
else:
|
||||
if self.Reader is not None:
|
||||
if self.Scanner is None:
|
||||
self.Scanner = ruamel.yaml.scanner.Scanner
|
||||
self.Parser = ruamel.yaml.parser.Parser
|
||||
self.Scanner = _vendoring.ruamel.yaml.scanner.Scanner
|
||||
self.Parser = _vendoring.ruamel.yaml.parser.Parser
|
||||
self.reader.stream = stream
|
||||
elif self.Scanner is not None:
|
||||
if self.Reader is None:
|
||||
self.Reader = ruamel.yaml.reader.Reader
|
||||
self.Parser = ruamel.yaml.parser.Parser
|
||||
self.Reader = _vendoring.ruamel.yaml.reader.Reader
|
||||
self.Parser = _vendoring.ruamel.yaml.parser.Parser
|
||||
self.reader.stream = stream
|
||||
else:
|
||||
# combined C level reader>scanner>parser
|
||||
@ -498,8 +498,8 @@ def get_constructor_parser(self, stream):
|
||||
# if you just initialise the CParser, to much of resolver.py
|
||||
# is actually used
|
||||
rslvr = self.Resolver
|
||||
# if rslvr is ruamel.yaml.resolver.VersionedResolver:
|
||||
# rslvr = ruamel.yaml.resolver.Resolver
|
||||
# if rslvr is _vendoring.ruamel.yaml.resolver.VersionedResolver:
|
||||
# rslvr = _vendoring.ruamel.yaml.resolver.Resolver
|
||||
|
||||
class XLoader(self.Parser, self.Constructor, rslvr): # type: ignore
|
||||
def __init__(selfx, stream, version=self.version, preserve_quotes=None):
|
||||
@ -640,7 +640,7 @@ def get_serializer_representer_emitter(self, stream, tlca):
|
||||
# we have only .Serializer to deal with (vs .Reader & .Scanner), much simpler
|
||||
if self.Emitter is not CEmitter:
|
||||
if self.Serializer is None:
|
||||
self.Serializer = ruamel.yaml.serializer.Serializer
|
||||
self.Serializer = _vendoring.ruamel.yaml.serializer.Serializer
|
||||
self.emitter.stream = stream
|
||||
self.emitter.top_level_colon_align = tlca
|
||||
if self.scalar_after_indicator is not None:
|
||||
@ -648,7 +648,7 @@ def get_serializer_representer_emitter(self, stream, tlca):
|
||||
return self.serializer, self.representer, self.emitter
|
||||
if self.Serializer is not None:
|
||||
# cannot set serializer with CEmitter
|
||||
self.Emitter = ruamel.yaml.emitter.Emitter
|
||||
self.Emitter = _vendoring.ruamel.yaml.emitter.Emitter
|
||||
self.emitter.stream = stream
|
||||
self.emitter.top_level_colon_align = tlca
|
||||
if self.scalar_after_indicator is not None:
|
||||
@ -657,9 +657,9 @@ def get_serializer_representer_emitter(self, stream, tlca):
|
||||
# C routines
|
||||
|
||||
rslvr = (
|
||||
ruamel.yaml.resolver.BaseResolver
|
||||
_vendoring.ruamel.yaml.resolver.BaseResolver
|
||||
if 'base' in self.typ
|
||||
else ruamel.yaml.resolver.Resolver
|
||||
else _vendoring.ruamel.yaml.resolver.Resolver
|
||||
)
|
||||
|
||||
class XDumper(CEmitter, self.Representer, rslvr): # type: ignore
|
||||
@ -743,7 +743,7 @@ def official_plug_ins(self):
|
||||
single file installers that are not properly emulating a file-system (issue 324)
|
||||
no plug-ins will be found. If any are packaged, you know which file that are
|
||||
and you can explicitly provide it during instantiation:
|
||||
yaml = ruamel.yaml.YAML(plug_ins=['ruamel/yaml/jinja2/__plug_in__'])
|
||||
yaml = _vendoring.ruamel.yaml.YAML(plug_ins=['_vendoring.ruamel.yaml/_vendoring.jinja2/__plug_in__'])
|
||||
"""
|
||||
try:
|
||||
bd = os.path.dirname(__file__)
|
||||
@ -982,7 +982,7 @@ def f_y(constructor, node):
|
||||
########################################################################################
|
||||
def warn_deprecation(fun, method, arg=''):
|
||||
# type: (Any, Any, str) -> None
|
||||
from ruamel.yaml.compat import _F
|
||||
from _vendoring.ruamel.yaml.compat import _F
|
||||
|
||||
warnings.warn(
|
||||
_F(
|
||||
@ -1469,7 +1469,7 @@ def add_implicit_resolver(
|
||||
if hasattr(Loader, 'add_implicit_resolver'):
|
||||
Loader.add_implicit_resolver(tag, regexp, first)
|
||||
elif issubclass(
|
||||
Loader, (BaseLoader, SafeLoader, ruamel.yaml.loader.Loader, RoundTripLoader)
|
||||
Loader, (BaseLoader, SafeLoader, _vendoring.ruamel.yaml.loader.Loader, RoundTripLoader)
|
||||
):
|
||||
Resolver.add_implicit_resolver(tag, regexp, first)
|
||||
else:
|
||||
@ -1478,7 +1478,7 @@ def add_implicit_resolver(
|
||||
if hasattr(Dumper, 'add_implicit_resolver'):
|
||||
Dumper.add_implicit_resolver(tag, regexp, first)
|
||||
elif issubclass(
|
||||
Dumper, (BaseDumper, SafeDumper, ruamel.yaml.dumper.Dumper, RoundTripDumper)
|
||||
Dumper, (BaseDumper, SafeDumper, _vendoring.ruamel.yaml.dumper.Dumper, RoundTripDumper)
|
||||
):
|
||||
Resolver.add_implicit_resolver(tag, regexp, first)
|
||||
else:
|
||||
@ -1501,7 +1501,7 @@ def add_path_resolver(tag, path, kind=None, Loader=None, Dumper=None, resolver=R
|
||||
if hasattr(Loader, 'add_path_resolver'):
|
||||
Loader.add_path_resolver(tag, path, kind)
|
||||
elif issubclass(
|
||||
Loader, (BaseLoader, SafeLoader, ruamel.yaml.loader.Loader, RoundTripLoader)
|
||||
Loader, (BaseLoader, SafeLoader, _vendoring.ruamel.yaml.loader.Loader, RoundTripLoader)
|
||||
):
|
||||
Resolver.add_path_resolver(tag, path, kind)
|
||||
else:
|
||||
@ -1510,7 +1510,7 @@ def add_path_resolver(tag, path, kind=None, Loader=None, Dumper=None, resolver=R
|
||||
if hasattr(Dumper, 'add_path_resolver'):
|
||||
Dumper.add_path_resolver(tag, path, kind)
|
||||
elif issubclass(
|
||||
Dumper, (BaseDumper, SafeDumper, ruamel.yaml.dumper.Dumper, RoundTripDumper)
|
||||
Dumper, (BaseDumper, SafeDumper, _vendoring.ruamel.yaml.dumper.Dumper, RoundTripDumper)
|
||||
):
|
||||
Resolver.add_path_resolver(tag, path, kind)
|
||||
else:
|
||||
@ -1560,7 +1560,7 @@ def add_multi_constructor(tag_prefix, multi_constructor, Loader=None, constructo
|
||||
BaseConstructor.add_multi_constructor(tag_prefix, multi_constructor)
|
||||
elif issubclass(Loader, SafeLoader):
|
||||
SafeConstructor.add_multi_constructor(tag_prefix, multi_constructor)
|
||||
elif issubclass(Loader, ruamel.yaml.loader.Loader):
|
||||
elif issubclass(Loader, _vendoring.ruamel.yaml.loader.Loader):
|
||||
Constructor.add_multi_constructor(tag_prefix, multi_constructor)
|
||||
elif issubclass(Loader, RoundTripLoader):
|
||||
RoundTripConstructor.add_multi_constructor(tag_prefix, multi_constructor)
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
import sys
|
||||
|
||||
from ruamel.yaml.compat import _F
|
||||
from _vendoring.ruamel.yaml.compat import _F
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Dict, Any, Text # NOQA
|
||||
|
@ -74,13 +74,13 @@
|
||||
# and for Jython too
|
||||
|
||||
|
||||
from ruamel.yaml.error import MarkedYAMLError
|
||||
from ruamel.yaml.tokens import * # NOQA
|
||||
from ruamel.yaml.events import * # NOQA
|
||||
from ruamel.yaml.scanner import Scanner, RoundTripScanner, ScannerError # NOQA
|
||||
from ruamel.yaml.scanner import BlankLineComment
|
||||
from ruamel.yaml.comments import C_PRE, C_POST, C_SPLIT_ON_FIRST_BLANK
|
||||
from ruamel.yaml.compat import _F, nprint, nprintf # NOQA
|
||||
from _vendoring.ruamel.yaml.error import MarkedYAMLError
|
||||
from _vendoring.ruamel.yaml.tokens import * # NOQA
|
||||
from _vendoring.ruamel.yaml.events import * # NOQA
|
||||
from _vendoring.ruamel.yaml.scanner import Scanner, RoundTripScanner, ScannerError # NOQA
|
||||
from _vendoring.ruamel.yaml.scanner import BlankLineComment
|
||||
from _vendoring.ruamel.yaml.comments import C_PRE, C_POST, C_SPLIT_ON_FIRST_BLANK
|
||||
from _vendoring.ruamel.yaml.compat import _F, nprint, nprintf # NOQA
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List, Optional # NOQA
|
||||
|
@ -21,13 +21,13 @@
|
||||
|
||||
import codecs
|
||||
|
||||
from ruamel.yaml.error import YAMLError, FileMark, StringMark, YAMLStreamError
|
||||
from ruamel.yaml.compat import _F # NOQA
|
||||
from ruamel.yaml.util import RegExp
|
||||
from _vendoring.ruamel.yaml.error import YAMLError, FileMark, StringMark, YAMLStreamError
|
||||
from _vendoring.ruamel.yaml.compat import _F # NOQA
|
||||
from _vendoring.ruamel.yaml.util import RegExp
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, Optional, List, Union, Text, Tuple, Optional # NOQA
|
||||
# from ruamel.yaml.compat import StreamTextType # NOQA
|
||||
# from _vendoring.ruamel.yaml.compat import StreamTextType # NOQA
|
||||
|
||||
__all__ = ['Reader', 'ReaderError']
|
||||
|
||||
|
@ -1,17 +1,17 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.error import * # NOQA
|
||||
from ruamel.yaml.nodes import * # NOQA
|
||||
from ruamel.yaml.compat import ordereddict
|
||||
from ruamel.yaml.compat import _F, nprint, nprintf # NOQA
|
||||
from ruamel.yaml.scalarstring import (
|
||||
from _vendoring.ruamel.yaml.error import * # NOQA
|
||||
from _vendoring.ruamel.yaml.nodes import * # NOQA
|
||||
from _vendoring.ruamel.yaml.compat import ordereddict
|
||||
from _vendoring.ruamel.yaml.compat import _F, nprint, nprintf # NOQA
|
||||
from _vendoring.ruamel.yaml.scalarstring import (
|
||||
LiteralScalarString,
|
||||
FoldedScalarString,
|
||||
SingleQuotedScalarString,
|
||||
DoubleQuotedScalarString,
|
||||
PlainScalarString,
|
||||
)
|
||||
from ruamel.yaml.comments import (
|
||||
from _vendoring.ruamel.yaml.comments import (
|
||||
CommentedMap,
|
||||
CommentedOrderedMap,
|
||||
CommentedSeq,
|
||||
@ -22,11 +22,11 @@
|
||||
merge_attrib,
|
||||
TaggedScalar,
|
||||
)
|
||||
from ruamel.yaml.scalarint import ScalarInt, BinaryInt, OctalInt, HexInt, HexCapsInt
|
||||
from ruamel.yaml.scalarfloat import ScalarFloat
|
||||
from ruamel.yaml.scalarbool import ScalarBoolean
|
||||
from ruamel.yaml.timestamp import TimeStamp
|
||||
from ruamel.yaml.anchor import Anchor
|
||||
from _vendoring.ruamel.yaml.scalarint import ScalarInt, BinaryInt, OctalInt, HexInt, HexCapsInt
|
||||
from _vendoring.ruamel.yaml.scalarfloat import ScalarFloat
|
||||
from _vendoring.ruamel.yaml.scalarbool import ScalarBoolean
|
||||
from _vendoring.ruamel.yaml.timestamp import TimeStamp
|
||||
from _vendoring.ruamel.yaml.anchor import Anchor
|
||||
|
||||
import datetime
|
||||
import sys
|
||||
|
@ -4,12 +4,12 @@
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Any, Dict, List, Union, Text, Optional # NOQA
|
||||
from ruamel.yaml.compat import VersionType # NOQA
|
||||
from _vendoring.ruamel.yaml.compat import VersionType # NOQA
|
||||
|
||||
from ruamel.yaml.compat import _DEFAULT_YAML_VERSION, _F # NOQA
|
||||
from ruamel.yaml.error import * # NOQA
|
||||
from ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode # NOQA
|
||||
from ruamel.yaml.util import RegExp # NOQA
|
||||
from _vendoring.ruamel.yaml.compat import _DEFAULT_YAML_VERSION, _F # NOQA
|
||||
from _vendoring.ruamel.yaml.error import * # NOQA
|
||||
from _vendoring.ruamel.yaml.nodes import MappingNode, ScalarNode, SequenceNode # NOQA
|
||||
from _vendoring.ruamel.yaml.util import RegExp # NOQA
|
||||
|
||||
__all__ = ['BaseResolver', 'Resolver', 'VersionedResolver']
|
||||
|
||||
|
@ -9,7 +9,7 @@
|
||||
You can use these in an if statement, but not when testing equivalence
|
||||
"""
|
||||
|
||||
from ruamel.yaml.anchor import Anchor
|
||||
from _vendoring.ruamel.yaml.anchor import Anchor
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Text, Any, Dict, List # NOQA
|
||||
|
@ -1,7 +1,7 @@
|
||||
# coding: utf-8
|
||||
|
||||
import sys
|
||||
from ruamel.yaml.anchor import Anchor
|
||||
from _vendoring.ruamel.yaml.anchor import Anchor
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Text, Any, Dict, List # NOQA
|
||||
|
@ -1,6 +1,6 @@
|
||||
# coding: utf-8
|
||||
|
||||
from ruamel.yaml.anchor import Anchor
|
||||
from _vendoring.ruamel.yaml.anchor import Anchor
|
||||
|
||||
if False: # MYPY
|
||||
from typing import Text, Any, Dict, List # NOQA
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user