8th day of python challenges 111-117

This commit is contained in:
abd.shallal
2019-08-04 15:26:35 +03:00
parent b04c1b055f
commit 627802c383
3215 changed files with 760227 additions and 491 deletions

View File

@@ -0,0 +1,337 @@
# Copyright (c) 2016-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2017 Chris Philip <chrisp533@gmail.com>
# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
# Copyright (c) 2017 ioanatia <ioanatia@users.noreply.github.com>
# Copyright (c) 2017 Calen Pennington <cale@edx.org>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
import abc
import collections
import enum
import imp
import os
import sys
import zipimport
try:
import importlib.machinery
_HAS_MACHINERY = True
except ImportError:
_HAS_MACHINERY = False
try:
from functools import lru_cache
except ImportError:
from backports.functools_lru_cache import lru_cache
from . import util
ModuleType = enum.Enum(
"ModuleType",
"C_BUILTIN C_EXTENSION PKG_DIRECTORY "
"PY_CODERESOURCE PY_COMPILED PY_FROZEN PY_RESOURCE "
"PY_SOURCE PY_ZIPMODULE PY_NAMESPACE",
)
_ImpTypes = {
imp.C_BUILTIN: ModuleType.C_BUILTIN,
imp.C_EXTENSION: ModuleType.C_EXTENSION,
imp.PKG_DIRECTORY: ModuleType.PKG_DIRECTORY,
imp.PY_COMPILED: ModuleType.PY_COMPILED,
imp.PY_FROZEN: ModuleType.PY_FROZEN,
imp.PY_SOURCE: ModuleType.PY_SOURCE,
}
if hasattr(imp, "PY_RESOURCE"):
_ImpTypes[imp.PY_RESOURCE] = ModuleType.PY_RESOURCE
if hasattr(imp, "PY_CODERESOURCE"):
_ImpTypes[imp.PY_CODERESOURCE] = ModuleType.PY_CODERESOURCE
def _imp_type_to_module_type(imp_type):
return _ImpTypes[imp_type]
_ModuleSpec = collections.namedtuple(
"_ModuleSpec", "name type location " "origin submodule_search_locations"
)
class ModuleSpec(_ModuleSpec):
"""Defines a class similar to PEP 420's ModuleSpec
A module spec defines a name of a module, its type, location
and where submodules can be found, if the module is a package.
"""
def __new__(
cls,
name,
module_type,
location=None,
origin=None,
submodule_search_locations=None,
):
return _ModuleSpec.__new__(
cls,
name=name,
type=module_type,
location=location,
origin=origin,
submodule_search_locations=submodule_search_locations,
)
class Finder:
"""A finder is a class which knows how to find a particular module."""
def __init__(self, path=None):
self._path = path or sys.path
@abc.abstractmethod
def find_module(self, modname, module_parts, processed, submodule_path):
"""Find the given module
Each finder is responsible for each protocol of finding, as long as
they all return a ModuleSpec.
:param str modname: The module which needs to be searched.
:param list module_parts: It should be a list of strings,
where each part contributes to the module's
namespace.
:param list processed: What parts from the module parts were processed
so far.
:param list submodule_path: A list of paths where the module
can be looked into.
:returns: A ModuleSpec, describing how and where the module was found,
None, otherwise.
"""
def contribute_to_path(self, spec, processed):
"""Get a list of extra paths where this finder can search."""
class ImpFinder(Finder):
"""A finder based on the imp module."""
def find_module(self, modname, module_parts, processed, submodule_path):
if submodule_path is not None:
submodule_path = list(submodule_path)
try:
stream, mp_filename, mp_desc = imp.find_module(modname, submodule_path)
except ImportError:
return None
# Close resources.
if stream:
stream.close()
return ModuleSpec(
name=modname,
location=mp_filename,
module_type=_imp_type_to_module_type(mp_desc[2]),
)
def contribute_to_path(self, spec, processed):
if spec.location is None:
# Builtin.
return None
if _is_setuptools_namespace(spec.location):
# extend_path is called, search sys.path for module/packages
# of this name see pkgutil.extend_path documentation
path = [
os.path.join(p, *processed)
for p in sys.path
if os.path.isdir(os.path.join(p, *processed))
]
else:
path = [spec.location]
return path
class ExplicitNamespacePackageFinder(ImpFinder):
"""A finder for the explicit namespace packages, generated through pkg_resources."""
def find_module(self, modname, module_parts, processed, submodule_path):
if processed:
modname = ".".join(processed + [modname])
if util.is_namespace(modname) and modname in sys.modules:
submodule_path = sys.modules[modname].__path__
return ModuleSpec(
name=modname,
location="",
origin="namespace",
module_type=ModuleType.PY_NAMESPACE,
submodule_search_locations=submodule_path,
)
return None
def contribute_to_path(self, spec, processed):
return spec.submodule_search_locations
class ZipFinder(Finder):
"""Finder that knows how to find a module inside zip files."""
def __init__(self, path):
super(ZipFinder, self).__init__(path)
self._zipimporters = _precache_zipimporters(path)
def find_module(self, modname, module_parts, processed, submodule_path):
try:
file_type, filename, path = _search_zip(module_parts, self._zipimporters)
except ImportError:
return None
return ModuleSpec(
name=modname,
location=filename,
origin="egg",
module_type=file_type,
submodule_search_locations=path,
)
class PathSpecFinder(Finder):
"""Finder based on importlib.machinery.PathFinder."""
def find_module(self, modname, module_parts, processed, submodule_path):
spec = importlib.machinery.PathFinder.find_spec(modname, path=submodule_path)
if spec:
# origin can be either a string on older Python versions
# or None in case it is a namespace package:
# https://github.com/python/cpython/pull/5481
is_namespace_pkg = spec.origin in ("namespace", None)
location = spec.origin if not is_namespace_pkg else None
module_type = ModuleType.PY_NAMESPACE if is_namespace_pkg else None
spec = ModuleSpec(
name=spec.name,
location=location,
origin=spec.origin,
module_type=module_type,
submodule_search_locations=list(spec.submodule_search_locations or []),
)
return spec
def contribute_to_path(self, spec, processed):
if spec.type == ModuleType.PY_NAMESPACE:
return spec.submodule_search_locations
return None
_SPEC_FINDERS = (ImpFinder, ZipFinder)
if _HAS_MACHINERY and sys.version_info[:2] >= (3, 4):
_SPEC_FINDERS += (PathSpecFinder,)
_SPEC_FINDERS += (ExplicitNamespacePackageFinder,)
def _is_setuptools_namespace(location):
try:
with open(os.path.join(location, "__init__.py"), "rb") as stream:
data = stream.read(4096)
except IOError:
pass
else:
extend_path = b"pkgutil" in data and b"extend_path" in data
declare_namespace = (
b"pkg_resources" in data and b"declare_namespace(__name__)" in data
)
return extend_path or declare_namespace
@lru_cache()
def _cached_set_diff(left, right):
result = set(left)
result.difference_update(right)
return result
def _precache_zipimporters(path=None):
pic = sys.path_importer_cache
# When measured, despite having the same complexity (O(n)),
# converting to tuples and then caching the conversion to sets
# and the set difference is faster than converting to sets
# and then only caching the set difference.
req_paths = tuple(path or sys.path)
cached_paths = tuple(pic)
new_paths = _cached_set_diff(req_paths, cached_paths)
for entry_path in new_paths:
try:
pic[entry_path] = zipimport.zipimporter(entry_path)
except zipimport.ZipImportError:
continue
return pic
def _search_zip(modpath, pic):
for filepath, importer in list(pic.items()):
if importer is not None:
found = importer.find_module(modpath[0])
if found:
if not importer.find_module(os.path.sep.join(modpath)):
raise ImportError(
"No module named %s in %s/%s"
% (".".join(modpath[1:]), filepath, modpath)
)
# import code; code.interact(local=locals())
return (
ModuleType.PY_ZIPMODULE,
os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath),
filepath,
)
raise ImportError("No module named %s" % ".".join(modpath))
def _find_spec_with_path(search_path, modname, module_parts, processed, submodule_path):
finders = [finder(search_path) for finder in _SPEC_FINDERS]
for finder in finders:
spec = finder.find_module(modname, module_parts, processed, submodule_path)
if spec is None:
continue
return finder, spec
raise ImportError("No module named %s" % ".".join(module_parts))
def find_spec(modpath, path=None):
"""Find a spec for the given module.
:type modpath: list or tuple
:param modpath:
split module's name (i.e name of a module or package split
on '.'), with leading empty strings for explicit relative import
:type path: list or None
:param path:
optional list of path where the module or package should be
searched (use sys.path if nothing or None is given)
:rtype: ModuleSpec
:return: A module spec, which describes how the module was
found and where.
"""
_path = path or sys.path
# Need a copy for not mutating the argument.
modpath = modpath[:]
submodule_path = None
module_parts = modpath[:]
processed = []
while modpath:
modname = modpath.pop(0)
finder, spec = _find_spec_with_path(
_path, modname, module_parts, processed, submodule_path or path
)
processed.append(modname)
if modpath:
submodule_path = finder.contribute_to_path(spec, processed)
if spec.type == ModuleType.PKG_DIRECTORY:
spec = spec._replace(submodule_search_locations=submodule_path)
return spec

View File

@@ -0,0 +1,10 @@
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
try:
import pkg_resources
except ImportError:
pkg_resources = None
def is_namespace(modname):
return pkg_resources is not None and modname in pkg_resources._namespace_packages

View File

@@ -0,0 +1,66 @@
# Copyright (c) 2016-2018 Claudiu Popa <pcmanticore@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Contains logic for retrieving special methods.
This implementation does not rely on the dot attribute access
logic, found in ``.getattr()``. The difference between these two
is that the dunder methods are looked with the type slots
(you can find more about these here
http://lucumr.pocoo.org/2014/8/16/the-python-i-would-like-to-see/)
As such, the lookup for the special methods is actually simpler than
the dot attribute access.
"""
import itertools
import astroid
from astroid import exceptions
def _lookup_in_mro(node, name):
attrs = node.locals.get(name, [])
nodes = itertools.chain.from_iterable(
ancestor.locals.get(name, []) for ancestor in node.ancestors(recurs=True)
)
values = list(itertools.chain(attrs, nodes))
if not values:
raise exceptions.AttributeInferenceError(attribute=name, target=node)
return values
def lookup(node, name):
"""Lookup the given special method name in the given *node*
If the special method was found, then a list of attributes
will be returned. Otherwise, `astroid.AttributeInferenceError`
is going to be raised.
"""
if isinstance(
node, (astroid.List, astroid.Tuple, astroid.Const, astroid.Dict, astroid.Set)
):
return _builtin_lookup(node, name)
if isinstance(node, astroid.Instance):
return _lookup_in_mro(node, name)
if isinstance(node, astroid.ClassDef):
return _class_lookup(node, name)
raise exceptions.AttributeInferenceError(attribute=name, target=node)
def _class_lookup(node, name):
metaclass = node.metaclass()
if metaclass is None:
raise exceptions.AttributeInferenceError(attribute=name, target=node)
return _lookup_in_mro(metaclass, name)
def _builtin_lookup(node, name):
values = node.locals.get(name, [])
if not values:
raise exceptions.AttributeInferenceError(attribute=name, target=node)
return values

View File

@@ -0,0 +1,728 @@
# Copyright (c) 2016-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Copyright (c) 2017 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2017 Calen Pennington <cale@edx.org>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""
Data object model, as per https://docs.python.org/3/reference/datamodel.html.
This module describes, at least partially, a data object model for some
of astroid's nodes. The model contains special attributes that nodes such
as functions, classes, modules etc have, such as __doc__, __class__,
__module__ etc, being used when doing attribute lookups over nodes.
For instance, inferring `obj.__class__` will first trigger an inference
of the `obj` variable. If it was successfully inferred, then an attribute
`__class__ will be looked for in the inferred object. This is the part
where the data model occurs. The model is attached to those nodes
and the lookup mechanism will try to see if attributes such as
`__class__` are defined by the model or not. If they are defined,
the model will be requested to return the corresponding value of that
attribute. Thus the model can be viewed as a special part of the lookup
mechanism.
"""
import itertools
import pprint
import os
import types
from functools import lru_cache
import astroid
from astroid import context as contextmod
from astroid import exceptions
from astroid import node_classes
IMPL_PREFIX = "attr_"
def _dunder_dict(instance, attributes):
obj = node_classes.Dict(parent=instance)
# Convert the keys to node strings
keys = [
node_classes.Const(value=value, parent=obj) for value in list(attributes.keys())
]
# The original attribute has a list of elements for each key,
# but that is not useful for retrieving the special attribute's value.
# In this case, we're picking the last value from each list.
values = [elem[-1] for elem in attributes.values()]
obj.postinit(list(zip(keys, values)))
return obj
class ObjectModel:
def __init__(self):
self._instance = None
def __repr__(self):
result = []
cname = type(self).__name__
string = "%(cname)s(%(fields)s)"
alignment = len(cname) + 1
for field in sorted(self.attributes()):
width = 80 - len(field) - alignment
lines = pprint.pformat(field, indent=2, width=width).splitlines(True)
inner = [lines[0]]
for line in lines[1:]:
inner.append(" " * alignment + line)
result.append(field)
return string % {
"cname": cname,
"fields": (",\n" + " " * alignment).join(result),
}
def __call__(self, instance):
self._instance = instance
return self
def __get__(self, instance, cls=None):
# ObjectModel needs to be a descriptor so that just doing
# `special_attributes = SomeObjectModel` should be enough in the body of a node.
# But at the same time, node.special_attributes should return an object
# which can be used for manipulating the special attributes. That's the reason
# we pass the instance through which it got accessed to ObjectModel.__call__,
# returning itself afterwards, so we can still have access to the
# underlying data model and to the instance for which it got accessed.
return self(instance)
def __contains__(self, name):
return name in self.attributes()
@lru_cache(maxsize=None)
def attributes(self):
"""Get the attributes which are exported by this object model."""
return [
obj[len(IMPL_PREFIX) :] for obj in dir(self) if obj.startswith(IMPL_PREFIX)
]
def lookup(self, name):
"""Look up the given *name* in the current model
It should return an AST or an interpreter object,
but if the name is not found, then an AttributeInferenceError will be raised.
"""
if name in self.attributes():
return getattr(self, IMPL_PREFIX + name)
raise exceptions.AttributeInferenceError(target=self._instance, attribute=name)
class ModuleModel(ObjectModel):
def _builtins(self):
builtins_ast_module = astroid.MANAGER.builtins_module
return builtins_ast_module.special_attributes.lookup("__dict__")
@property
def attr_builtins(self):
return self._builtins()
@property
def attr___path__(self):
if not self._instance.package:
raise exceptions.AttributeInferenceError(
target=self._instance, attribute="__path__"
)
path_objs = [
node_classes.Const(
value=path
if not path.endswith("__init__.py")
else os.path.dirname(path),
parent=self._instance,
)
for path in self._instance.path
]
container = node_classes.List(parent=self._instance)
container.postinit(path_objs)
return container
@property
def attr___name__(self):
return node_classes.Const(value=self._instance.name, parent=self._instance)
@property
def attr___doc__(self):
return node_classes.Const(value=self._instance.doc, parent=self._instance)
@property
def attr___file__(self):
return node_classes.Const(value=self._instance.file, parent=self._instance)
@property
def attr___dict__(self):
return _dunder_dict(self._instance, self._instance.globals)
@property
def attr___package__(self):
if not self._instance.package:
value = ""
else:
value = self._instance.name
return node_classes.Const(value=value, parent=self._instance)
# These are related to the Python 3 implementation of the
# import system,
# https://docs.python.org/3/reference/import.html#import-related-module-attributes
@property
def attr___spec__(self):
# No handling for now.
return node_classes.Unknown()
@property
def attr___loader__(self):
# No handling for now.
return node_classes.Unknown()
@property
def attr___cached__(self):
# No handling for now.
return node_classes.Unknown()
class FunctionModel(ObjectModel):
@property
def attr___name__(self):
return node_classes.Const(value=self._instance.name, parent=self._instance)
@property
def attr___doc__(self):
return node_classes.Const(value=self._instance.doc, parent=self._instance)
@property
def attr___qualname__(self):
return node_classes.Const(value=self._instance.qname(), parent=self._instance)
@property
def attr___defaults__(self):
func = self._instance
if not func.args.defaults:
return node_classes.Const(value=None, parent=func)
defaults_obj = node_classes.Tuple(parent=func)
defaults_obj.postinit(func.args.defaults)
return defaults_obj
@property
def attr___annotations__(self):
obj = node_classes.Dict(parent=self._instance)
if not self._instance.returns:
returns = None
else:
returns = self._instance.returns
args = self._instance.args
pair_annotations = itertools.chain(
zip(args.args or [], args.annotations),
zip(args.kwonlyargs, args.kwonlyargs_annotations),
)
annotations = {
arg.name: annotation for (arg, annotation) in pair_annotations if annotation
}
if args.varargannotation:
annotations[args.vararg] = args.varargannotation
if args.kwargannotation:
annotations[args.kwarg] = args.kwargannotation
if returns:
annotations["return"] = returns
items = [
(node_classes.Const(key, parent=obj), value)
for (key, value) in annotations.items()
]
obj.postinit(items)
return obj
@property
def attr___dict__(self):
return node_classes.Dict(parent=self._instance)
attr___globals__ = attr___dict__
@property
def attr___kwdefaults__(self):
def _default_args(args, parent):
for arg in args.kwonlyargs:
try:
default = args.default_value(arg.name)
except exceptions.NoDefault:
continue
name = node_classes.Const(arg.name, parent=parent)
yield name, default
args = self._instance.args
obj = node_classes.Dict(parent=self._instance)
defaults = dict(_default_args(args, obj))
obj.postinit(list(defaults.items()))
return obj
@property
def attr___module__(self):
return node_classes.Const(self._instance.root().qname())
@property
def attr___get__(self):
from astroid import bases
func = self._instance
class DescriptorBoundMethod(bases.BoundMethod):
"""Bound method which knows how to understand calling descriptor binding."""
def implicit_parameters(self):
# Different than BoundMethod since the signature
# is different.
return 0
def infer_call_result(self, caller, context=None):
if len(caller.args) != 2:
raise exceptions.InferenceError(
"Invalid arguments for descriptor binding",
target=self,
context=context,
)
context = contextmod.copy_context(context)
cls = next(caller.args[0].infer(context=context))
if cls is astroid.Uninferable:
raise exceptions.InferenceError(
"Invalid class inferred", target=self, context=context
)
# For some reason func is a Node that the below
# code is not expecting
if isinstance(func, bases.BoundMethod):
yield func
return
# Rebuild the original value, but with the parent set as the
# class where it will be bound.
new_func = func.__class__(
name=func.name,
doc=func.doc,
lineno=func.lineno,
col_offset=func.col_offset,
parent=cls,
)
# pylint: disable=no-member
new_func.postinit(func.args, func.body, func.decorators, func.returns)
# Build a proper bound method that points to our newly built function.
proxy = bases.UnboundMethod(new_func)
yield bases.BoundMethod(proxy=proxy, bound=cls)
@property
def args(self):
"""Overwrite the underlying args to match those of the underlying func
Usually the underlying *func* is a function/method, as in:
def test(self):
pass
This has only the *self* parameter but when we access test.__get__
we get a new object which has two parameters, *self* and *type*.
"""
nonlocal func
params = func.args.args.copy()
params.append(astroid.AssignName(name="type"))
arguments = astroid.Arguments(parent=func.args.parent)
arguments.postinit(
args=params,
defaults=[],
kwonlyargs=[],
kw_defaults=[],
annotations=[],
)
return arguments
return DescriptorBoundMethod(proxy=self._instance, bound=self._instance)
# These are here just for completion.
@property
def attr___ne__(self):
return node_classes.Unknown()
attr___subclasshook__ = attr___ne__
attr___str__ = attr___ne__
attr___sizeof__ = attr___ne__
attr___setattr___ = attr___ne__
attr___repr__ = attr___ne__
attr___reduce__ = attr___ne__
attr___reduce_ex__ = attr___ne__
attr___new__ = attr___ne__
attr___lt__ = attr___ne__
attr___eq__ = attr___ne__
attr___gt__ = attr___ne__
attr___format__ = attr___ne__
attr___delattr___ = attr___ne__
attr___getattribute__ = attr___ne__
attr___hash__ = attr___ne__
attr___init__ = attr___ne__
attr___dir__ = attr___ne__
attr___call__ = attr___ne__
attr___class__ = attr___ne__
attr___closure__ = attr___ne__
attr___code__ = attr___ne__
class ClassModel(ObjectModel):
@property
def attr___module__(self):
return node_classes.Const(self._instance.root().qname())
@property
def attr___name__(self):
return node_classes.Const(self._instance.name)
@property
def attr___qualname__(self):
return node_classes.Const(self._instance.qname())
@property
def attr___doc__(self):
return node_classes.Const(self._instance.doc)
@property
def attr___mro__(self):
if not self._instance.newstyle:
raise exceptions.AttributeInferenceError(
target=self._instance, attribute="__mro__"
)
mro = self._instance.mro()
obj = node_classes.Tuple(parent=self._instance)
obj.postinit(mro)
return obj
@property
def attr_mro(self):
if not self._instance.newstyle:
raise exceptions.AttributeInferenceError(
target=self._instance, attribute="mro"
)
from astroid import bases
other_self = self
# Cls.mro is a method and we need to return one in order to have a proper inference.
# The method we're returning is capable of inferring the underlying MRO though.
class MroBoundMethod(bases.BoundMethod):
def infer_call_result(self, caller, context=None):
yield other_self.attr___mro__
implicit_metaclass = self._instance.implicit_metaclass()
mro_method = implicit_metaclass.locals["mro"][0]
return MroBoundMethod(proxy=mro_method, bound=implicit_metaclass)
@property
def attr___bases__(self):
obj = node_classes.Tuple()
context = contextmod.InferenceContext()
elts = list(self._instance._inferred_bases(context))
obj.postinit(elts=elts)
return obj
@property
def attr___class__(self):
from astroid import helpers
return helpers.object_type(self._instance)
@property
def attr___subclasses__(self):
"""Get the subclasses of the underlying class
This looks only in the current module for retrieving the subclasses,
thus it might miss a couple of them.
"""
from astroid import bases
from astroid import scoped_nodes
if not self._instance.newstyle:
raise exceptions.AttributeInferenceError(
target=self._instance, attribute="__subclasses__"
)
qname = self._instance.qname()
root = self._instance.root()
classes = [
cls
for cls in root.nodes_of_class(scoped_nodes.ClassDef)
if cls != self._instance and cls.is_subtype_of(qname)
]
obj = node_classes.List(parent=self._instance)
obj.postinit(classes)
class SubclassesBoundMethod(bases.BoundMethod):
def infer_call_result(self, caller, context=None):
yield obj
implicit_metaclass = self._instance.implicit_metaclass()
subclasses_method = implicit_metaclass.locals["__subclasses__"][0]
return SubclassesBoundMethod(proxy=subclasses_method, bound=implicit_metaclass)
@property
def attr___dict__(self):
return node_classes.Dict(parent=self._instance)
class SuperModel(ObjectModel):
@property
def attr___thisclass__(self):
return self._instance.mro_pointer
@property
def attr___self_class__(self):
return self._instance._self_class
@property
def attr___self__(self):
return self._instance.type
@property
def attr___class__(self):
return self._instance._proxied
class UnboundMethodModel(ObjectModel):
@property
def attr___class__(self):
from astroid import helpers
return helpers.object_type(self._instance)
@property
def attr___func__(self):
return self._instance._proxied
@property
def attr___self__(self):
return node_classes.Const(value=None, parent=self._instance)
attr_im_func = attr___func__
attr_im_class = attr___class__
attr_im_self = attr___self__
class BoundMethodModel(FunctionModel):
@property
def attr___func__(self):
return self._instance._proxied._proxied
@property
def attr___self__(self):
return self._instance.bound
class GeneratorModel(FunctionModel):
def __new__(cls, *args, **kwargs):
# Append the values from the GeneratorType unto this object.
ret = super(GeneratorModel, cls).__new__(cls, *args, **kwargs)
generator = astroid.MANAGER.builtins_module["generator"]
for name, values in generator.locals.items():
method = values[0]
patched = lambda cls, meth=method: meth
setattr(type(ret), IMPL_PREFIX + name, property(patched))
return ret
@property
def attr___name__(self):
return node_classes.Const(
value=self._instance.parent.name, parent=self._instance
)
@property
def attr___doc__(self):
return node_classes.Const(
value=self._instance.parent.doc, parent=self._instance
)
class AsyncGeneratorModel(GeneratorModel):
def __new__(cls, *args, **kwargs):
# Append the values from the AGeneratorType unto this object.
ret = super().__new__(cls, *args, **kwargs)
astroid_builtins = astroid.MANAGER.builtins_module
generator = astroid_builtins.get("async_generator")
if generator is None:
# Make it backward compatible.
generator = astroid_builtins.get("generator")
for name, values in generator.locals.items():
method = values[0]
patched = lambda cls, meth=method: meth
setattr(type(ret), IMPL_PREFIX + name, property(patched))
return ret
class InstanceModel(ObjectModel):
@property
def attr___class__(self):
return self._instance._proxied
@property
def attr___module__(self):
return node_classes.Const(self._instance.root().qname())
@property
def attr___doc__(self):
return node_classes.Const(self._instance.doc)
@property
def attr___dict__(self):
return _dunder_dict(self._instance, self._instance.instance_attrs)
# Exception instances
class ExceptionInstanceModel(InstanceModel):
@property
def attr_args(self):
message = node_classes.Const("")
args = node_classes.Tuple(parent=self._instance)
args.postinit((message,))
return args
@property
def attr___traceback__(self):
builtins_ast_module = astroid.MANAGER.builtins_module
traceback_type = builtins_ast_module[types.TracebackType.__name__]
return traceback_type.instantiate_class()
class SyntaxErrorInstanceModel(ExceptionInstanceModel):
@property
def attr_text(self):
return node_classes.Const("")
class OSErrorInstanceModel(ExceptionInstanceModel):
@property
def attr_filename(self):
return node_classes.Const("")
@property
def attr_errno(self):
return node_classes.Const(0)
@property
def attr_strerror(self):
return node_classes.Const("")
attr_filename2 = attr_filename
class ImportErrorInstanceModel(ExceptionInstanceModel):
@property
def attr_name(self):
return node_classes.Const("")
@property
def attr_path(self):
return node_classes.Const("")
BUILTIN_EXCEPTIONS = {
"builtins.SyntaxError": SyntaxErrorInstanceModel,
"builtins.ImportError": ImportErrorInstanceModel,
# These are all similar to OSError in terms of attributes
"builtins.OSError": OSErrorInstanceModel,
"builtins.BlockingIOError": OSErrorInstanceModel,
"builtins.BrokenPipeError": OSErrorInstanceModel,
"builtins.ChildProcessError": OSErrorInstanceModel,
"builtins.ConnectionAbortedError": OSErrorInstanceModel,
"builtins.ConnectionError": OSErrorInstanceModel,
"builtins.ConnectionRefusedError": OSErrorInstanceModel,
"builtins.ConnectionResetError": OSErrorInstanceModel,
"builtins.FileExistsError": OSErrorInstanceModel,
"builtins.FileNotFoundError": OSErrorInstanceModel,
"builtins.InterruptedError": OSErrorInstanceModel,
"builtins.IsADirectoryError": OSErrorInstanceModel,
"builtins.NotADirectoryError": OSErrorInstanceModel,
"builtins.PermissionError": OSErrorInstanceModel,
"builtins.ProcessLookupError": OSErrorInstanceModel,
"builtins.TimeoutError": OSErrorInstanceModel,
}
class DictModel(ObjectModel):
@property
def attr___class__(self):
return self._instance._proxied
def _generic_dict_attribute(self, obj, name):
"""Generate a bound method that can infer the given *obj*."""
class DictMethodBoundMethod(astroid.BoundMethod):
def infer_call_result(self, caller, context=None):
yield obj
meth = next(self._instance._proxied.igetattr(name))
return DictMethodBoundMethod(proxy=meth, bound=self._instance)
@property
def attr_items(self):
elems = []
obj = node_classes.List(parent=self._instance)
for key, value in self._instance.items:
elem = node_classes.Tuple(parent=obj)
elem.postinit((key, value))
elems.append(elem)
obj.postinit(elts=elems)
from astroid import objects
obj = objects.DictItems(obj)
return self._generic_dict_attribute(obj, "items")
@property
def attr_keys(self):
keys = [key for (key, _) in self._instance.items]
obj = node_classes.List(parent=self._instance)
obj.postinit(elts=keys)
from astroid import objects
obj = objects.DictKeys(obj)
return self._generic_dict_attribute(obj, "keys")
@property
def attr_values(self):
values = [value for (_, value) in self._instance.items]
obj = node_classes.List(parent=self._instance)
obj.postinit(values)
from astroid import objects
obj = objects.DictValues(obj)
return self._generic_dict_attribute(obj, "values")