8th day of python challenges 111-117
This commit is contained in:
134
venv/lib/python3.6/site-packages/pylint/checkers/__init__.py
Normal file
134
venv/lib/python3.6/site-packages/pylint/checkers/__init__.py
Normal file
@@ -0,0 +1,134 @@
|
||||
# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2013 buck@yelp.com <buck@yelp.com>
|
||||
# Copyright (c) 2014-2017 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""utilities methods and classes for checkers
|
||||
|
||||
Base id of standard checkers (used in msg and report ids):
|
||||
01: base
|
||||
02: classes
|
||||
03: format
|
||||
04: import
|
||||
05: misc
|
||||
06: variables
|
||||
07: exceptions
|
||||
08: similar
|
||||
09: design_analysis
|
||||
10: newstyle
|
||||
11: typecheck
|
||||
12: logging
|
||||
13: string_format
|
||||
14: string_constant
|
||||
15: stdlib
|
||||
16: python3
|
||||
17: refactoring
|
||||
18-50: not yet used: reserved for future internal checkers.
|
||||
51-99: perhaps used: reserved for external checkers
|
||||
|
||||
The raw_metrics checker has no number associated since it doesn't emit any
|
||||
messages nor reports. XXX not true, emit a 07 report !
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
import tokenize
|
||||
import warnings
|
||||
from typing import Any
|
||||
|
||||
from pylint.config import OptionsProviderMixIn
|
||||
from pylint.reporters import diff_string
|
||||
from pylint.utils import register_plugins
|
||||
from pylint.interfaces import UNDEFINED
|
||||
|
||||
|
||||
def table_lines_from_stats(stats, old_stats, columns):
|
||||
"""get values listed in <columns> from <stats> and <old_stats>,
|
||||
and return a formated list of values, designed to be given to a
|
||||
ureport.Table object
|
||||
"""
|
||||
lines = []
|
||||
for m_type in columns:
|
||||
new = stats[m_type]
|
||||
format = str # pylint: disable=redefined-builtin
|
||||
if isinstance(new, float):
|
||||
format = lambda num: "%.3f" % num
|
||||
old = old_stats.get(m_type)
|
||||
if old is not None:
|
||||
diff_str = diff_string(old, new)
|
||||
old = format(old)
|
||||
else:
|
||||
old, diff_str = "NC", "NC"
|
||||
lines += (m_type.replace("_", " "), format(new), old, diff_str)
|
||||
return lines
|
||||
|
||||
|
||||
class BaseChecker(OptionsProviderMixIn):
|
||||
"""base class for checkers"""
|
||||
|
||||
# checker name (you may reuse an existing one)
|
||||
name = None # type: str
|
||||
# options level (0 will be displaying in --help, 1 in --long-help)
|
||||
level = 1
|
||||
# ordered list of options to control the ckecker behaviour
|
||||
options = () # type: Any
|
||||
# messages issued by this checker
|
||||
msgs = {} # type: Any
|
||||
# reports issued by this checker
|
||||
reports = () # type: Any
|
||||
# mark this checker as enabled or not.
|
||||
enabled = True
|
||||
|
||||
def __init__(self, linter=None):
|
||||
"""checker instances should have the linter as argument
|
||||
|
||||
linter is an object implementing ILinter
|
||||
"""
|
||||
if self.name is not None:
|
||||
self.name = self.name.lower()
|
||||
OptionsProviderMixIn.__init__(self)
|
||||
self.linter = linter
|
||||
|
||||
def add_message(
|
||||
self,
|
||||
msg_id,
|
||||
line=None,
|
||||
node=None,
|
||||
args=None,
|
||||
confidence=UNDEFINED,
|
||||
col_offset=None,
|
||||
):
|
||||
"""add a message of a given type"""
|
||||
self.linter.add_message(msg_id, line, node, args, confidence, col_offset)
|
||||
|
||||
# dummy methods implementing the IChecker interface
|
||||
|
||||
def open(self):
|
||||
"""called before visiting project (i.e set of modules)"""
|
||||
|
||||
def close(self):
|
||||
"""called after visiting project (i.e set of modules)"""
|
||||
|
||||
|
||||
class BaseTokenChecker(BaseChecker):
|
||||
"""Base class for checkers that want to have access to the token stream."""
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
"""Should be overridden by subclasses."""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def initialize(linter):
|
||||
"""initialize linter with checkers in this package """
|
||||
register_plugins(linter, __path__[0])
|
||||
|
||||
|
||||
__all__ = ("BaseChecker", "BaseTokenChecker", "initialize")
|
92
venv/lib/python3.6/site-packages/pylint/checkers/async.py
Normal file
92
venv/lib/python3.6/site-packages/pylint/checkers/async.py
Normal file
@@ -0,0 +1,92 @@
|
||||
# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2017 Derek Gustafson <degustaf@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checker for anything related to the async protocol (PEP 492)."""
|
||||
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
from astroid import bases
|
||||
from astroid import exceptions
|
||||
|
||||
from pylint import checkers
|
||||
from pylint.checkers import utils as checker_utils
|
||||
from pylint import interfaces
|
||||
from pylint import utils
|
||||
from pylint.checkers.utils import decorated_with
|
||||
|
||||
|
||||
class AsyncChecker(checkers.BaseChecker):
|
||||
__implements__ = interfaces.IAstroidChecker
|
||||
name = "async"
|
||||
msgs = {
|
||||
"E1700": (
|
||||
"Yield inside async function",
|
||||
"yield-inside-async-function",
|
||||
"Used when an `yield` or `yield from` statement is "
|
||||
"found inside an async function.",
|
||||
{"minversion": (3, 5)},
|
||||
),
|
||||
"E1701": (
|
||||
"Async context manager '%s' doesn't implement __aenter__ and __aexit__.",
|
||||
"not-async-context-manager",
|
||||
"Used when an async context manager is used with an object "
|
||||
"that does not implement the async context management protocol.",
|
||||
{"minversion": (3, 5)},
|
||||
),
|
||||
}
|
||||
|
||||
def open(self):
|
||||
self._ignore_mixin_members = utils.get_global_option(
|
||||
self, "ignore-mixin-members"
|
||||
)
|
||||
self._async_generators = ["contextlib.asynccontextmanager"]
|
||||
|
||||
@checker_utils.check_messages("yield-inside-async-function")
|
||||
def visit_asyncfunctiondef(self, node):
|
||||
for child in node.nodes_of_class(astroid.Yield):
|
||||
if child.scope() is node and (
|
||||
sys.version_info[:2] == (3, 5) or isinstance(child, astroid.YieldFrom)
|
||||
):
|
||||
self.add_message("yield-inside-async-function", node=child)
|
||||
|
||||
@checker_utils.check_messages("not-async-context-manager")
|
||||
def visit_asyncwith(self, node):
|
||||
for ctx_mgr, _ in node.items:
|
||||
inferred = checker_utils.safe_infer(ctx_mgr)
|
||||
if inferred is None or inferred is astroid.Uninferable:
|
||||
continue
|
||||
|
||||
if isinstance(inferred, bases.AsyncGenerator):
|
||||
# Check if we are dealing with a function decorated
|
||||
# with contextlib.asynccontextmanager.
|
||||
if decorated_with(inferred.parent, self._async_generators):
|
||||
continue
|
||||
else:
|
||||
try:
|
||||
inferred.getattr("__aenter__")
|
||||
inferred.getattr("__aexit__")
|
||||
except exceptions.NotFoundError:
|
||||
if isinstance(inferred, astroid.Instance):
|
||||
# If we do not know the bases of this class,
|
||||
# just skip it.
|
||||
if not checker_utils.has_known_bases(inferred):
|
||||
continue
|
||||
# Just ignore mixin classes.
|
||||
if self._ignore_mixin_members:
|
||||
if inferred.name[-5:].lower() == "mixin":
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
|
||||
self.add_message(
|
||||
"not-async-context-manager", node=node, args=(inferred.name,)
|
||||
)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker"""
|
||||
linter.register_checker(AsyncChecker(linter))
|
2222
venv/lib/python3.6/site-packages/pylint/checkers/base.py
Normal file
2222
venv/lib/python3.6/site-packages/pylint/checkers/base.py
Normal file
File diff suppressed because it is too large
Load Diff
1750
venv/lib/python3.6/site-packages/pylint/checkers/classes.py
Normal file
1750
venv/lib/python3.6/site-packages/pylint/checkers/classes.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,530 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006, 2009-2010, 2012-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012, 2014 Google, Inc.
|
||||
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 ahirnish <ahirnish@gmail.com>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 Mark Miller <725mrm@gmail.com>
|
||||
# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
|
||||
# Copyright (c) 2018 Jakub Wilk <jwilk@jwilk.net>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""check for signs of poor design"""
|
||||
|
||||
from collections import defaultdict
|
||||
import re
|
||||
|
||||
import astroid
|
||||
from astroid import If, BoolOp
|
||||
from astroid import decorators
|
||||
|
||||
from pylint.interfaces import IAstroidChecker
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.checkers.utils import check_messages
|
||||
from pylint import utils
|
||||
|
||||
|
||||
MSGS = {
|
||||
"R0901": (
|
||||
"Too many ancestors (%s/%s)",
|
||||
"too-many-ancestors",
|
||||
"Used when class has too many parent classes, try to reduce "
|
||||
"this to get a simpler (and so easier to use) class.",
|
||||
),
|
||||
"R0902": (
|
||||
"Too many instance attributes (%s/%s)",
|
||||
"too-many-instance-attributes",
|
||||
"Used when class has too many instance attributes, try to reduce "
|
||||
"this to get a simpler (and so easier to use) class.",
|
||||
),
|
||||
"R0903": (
|
||||
"Too few public methods (%s/%s)",
|
||||
"too-few-public-methods",
|
||||
"Used when class has too few public methods, so be sure it's "
|
||||
"really worth it.",
|
||||
),
|
||||
"R0904": (
|
||||
"Too many public methods (%s/%s)",
|
||||
"too-many-public-methods",
|
||||
"Used when class has too many public methods, try to reduce "
|
||||
"this to get a simpler (and so easier to use) class.",
|
||||
),
|
||||
"R0911": (
|
||||
"Too many return statements (%s/%s)",
|
||||
"too-many-return-statements",
|
||||
"Used when a function or method has too many return statement, "
|
||||
"making it hard to follow.",
|
||||
),
|
||||
"R0912": (
|
||||
"Too many branches (%s/%s)",
|
||||
"too-many-branches",
|
||||
"Used when a function or method has too many branches, "
|
||||
"making it hard to follow.",
|
||||
),
|
||||
"R0913": (
|
||||
"Too many arguments (%s/%s)",
|
||||
"too-many-arguments",
|
||||
"Used when a function or method takes too many arguments.",
|
||||
),
|
||||
"R0914": (
|
||||
"Too many local variables (%s/%s)",
|
||||
"too-many-locals",
|
||||
"Used when a function or method has too many local variables.",
|
||||
),
|
||||
"R0915": (
|
||||
"Too many statements (%s/%s)",
|
||||
"too-many-statements",
|
||||
"Used when a function or method has too many statements. You "
|
||||
"should then split it in smaller functions / methods.",
|
||||
),
|
||||
"R0916": (
|
||||
"Too many boolean expressions in if statement (%s/%s)",
|
||||
"too-many-boolean-expressions",
|
||||
"Used when an if statement contains too many boolean expressions.",
|
||||
),
|
||||
}
|
||||
SPECIAL_OBJ = re.compile("^_{2}[a-z]+_{2}$")
|
||||
DATACLASS_DECORATOR = "dataclass"
|
||||
DATACLASS_IMPORT = "dataclasses"
|
||||
TYPING_NAMEDTUPLE = "typing.NamedTuple"
|
||||
|
||||
|
||||
def _is_typing_namedtuple(node: astroid.ClassDef) -> bool:
|
||||
"""Check if a class node is a typing.NamedTuple class"""
|
||||
for base in node.ancestors():
|
||||
if base.qname() == TYPING_NAMEDTUPLE:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _is_enum_class(node: astroid.ClassDef) -> bool:
|
||||
"""Check if a class definition defines an Enum class.
|
||||
|
||||
:param node: The class node to check.
|
||||
:type node: astroid.ClassDef
|
||||
|
||||
:returns: True if the given node represents an Enum class. False otherwise.
|
||||
:rtype: bool
|
||||
"""
|
||||
for base in node.bases:
|
||||
try:
|
||||
inferred_bases = base.inferred()
|
||||
except astroid.InferenceError:
|
||||
continue
|
||||
|
||||
for ancestor in inferred_bases:
|
||||
if not isinstance(ancestor, astroid.ClassDef):
|
||||
continue
|
||||
|
||||
if ancestor.name == "Enum" and ancestor.root().name == "enum":
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _is_dataclass(node: astroid.ClassDef) -> bool:
|
||||
"""Check if a class definition defines a Python 3.7+ dataclass
|
||||
|
||||
:param node: The class node to check.
|
||||
:type node: astroid.ClassDef
|
||||
|
||||
:returns: True if the given node represents a dataclass class. False otherwise.
|
||||
:rtype: bool
|
||||
"""
|
||||
if not node.decorators:
|
||||
return False
|
||||
|
||||
root_locals = node.root().locals
|
||||
for decorator in node.decorators.nodes:
|
||||
if isinstance(decorator, astroid.Call):
|
||||
decorator = decorator.func
|
||||
if not isinstance(decorator, (astroid.Name, astroid.Attribute)):
|
||||
continue
|
||||
if isinstance(decorator, astroid.Name):
|
||||
name = decorator.name
|
||||
else:
|
||||
name = decorator.attrname
|
||||
if name == DATACLASS_DECORATOR and DATACLASS_DECORATOR in root_locals:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _count_boolean_expressions(bool_op):
|
||||
"""Counts the number of boolean expressions in BoolOp `bool_op` (recursive)
|
||||
|
||||
example: a and (b or c or (d and e)) ==> 5 boolean expressions
|
||||
"""
|
||||
nb_bool_expr = 0
|
||||
for bool_expr in bool_op.get_children():
|
||||
if isinstance(bool_expr, BoolOp):
|
||||
nb_bool_expr += _count_boolean_expressions(bool_expr)
|
||||
else:
|
||||
nb_bool_expr += 1
|
||||
return nb_bool_expr
|
||||
|
||||
|
||||
def _count_methods_in_class(node):
|
||||
all_methods = sum(1 for method in node.methods() if not method.name.startswith("_"))
|
||||
# Special methods count towards the number of public methods,
|
||||
# but don't count towards there being too many methods.
|
||||
for method in node.mymethods():
|
||||
if SPECIAL_OBJ.search(method.name) and method.name != "__init__":
|
||||
all_methods += 1
|
||||
return all_methods
|
||||
|
||||
|
||||
class MisdesignChecker(BaseChecker):
|
||||
"""checks for sign of poor/misdesign:
|
||||
* number of methods, attributes, local variables...
|
||||
* size, complexity of functions, methods
|
||||
"""
|
||||
|
||||
__implements__ = (IAstroidChecker,)
|
||||
|
||||
# configuration section name
|
||||
name = "design"
|
||||
# messages
|
||||
msgs = MSGS
|
||||
priority = -2
|
||||
# configuration options
|
||||
options = (
|
||||
(
|
||||
"max-args",
|
||||
{
|
||||
"default": 5,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of arguments for function / method.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-locals",
|
||||
{
|
||||
"default": 15,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of locals for function / method body.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-returns",
|
||||
{
|
||||
"default": 6,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of return / yield for function / "
|
||||
"method body.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-branches",
|
||||
{
|
||||
"default": 12,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of branch for function / method body.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-statements",
|
||||
{
|
||||
"default": 50,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Maximum number of statements in function / method " "body.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-parents",
|
||||
{
|
||||
"default": 7,
|
||||
"type": "int",
|
||||
"metavar": "<num>",
|
||||
"help": "Maximum number of parents for a class (see R0901).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-attributes",
|
||||
{
|
||||
"default": 7,
|
||||
"type": "int",
|
||||
"metavar": "<num>",
|
||||
"help": "Maximum number of attributes for a class \
|
||||
(see R0902).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"min-public-methods",
|
||||
{
|
||||
"default": 2,
|
||||
"type": "int",
|
||||
"metavar": "<num>",
|
||||
"help": "Minimum number of public methods for a class \
|
||||
(see R0903).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-public-methods",
|
||||
{
|
||||
"default": 20,
|
||||
"type": "int",
|
||||
"metavar": "<num>",
|
||||
"help": "Maximum number of public methods for a class \
|
||||
(see R0904).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-bool-expr",
|
||||
{
|
||||
"default": 5,
|
||||
"type": "int",
|
||||
"metavar": "<num>",
|
||||
"help": "Maximum number of boolean expressions in an if " "statement.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def __init__(self, linter=None):
|
||||
BaseChecker.__init__(self, linter)
|
||||
self.stats = None
|
||||
self._returns = None
|
||||
self._branches = None
|
||||
self._stmts = None
|
||||
|
||||
def open(self):
|
||||
"""initialize visit variables"""
|
||||
self.stats = self.linter.add_stats()
|
||||
self._returns = []
|
||||
self._branches = defaultdict(int)
|
||||
self._stmts = []
|
||||
|
||||
def _inc_all_stmts(self, amount):
|
||||
for i in range(len(self._stmts)):
|
||||
self._stmts[i] += amount
|
||||
|
||||
@decorators.cachedproperty
|
||||
def _ignored_argument_names(self):
|
||||
return utils.get_global_option(self, "ignored-argument-names", default=None)
|
||||
|
||||
@check_messages(
|
||||
"too-many-ancestors",
|
||||
"too-many-instance-attributes",
|
||||
"too-few-public-methods",
|
||||
"too-many-public-methods",
|
||||
)
|
||||
def visit_classdef(self, node):
|
||||
"""check size of inheritance hierarchy and number of instance attributes
|
||||
"""
|
||||
nb_parents = len(list(node.ancestors()))
|
||||
if nb_parents > self.config.max_parents:
|
||||
self.add_message(
|
||||
"too-many-ancestors",
|
||||
node=node,
|
||||
args=(nb_parents, self.config.max_parents),
|
||||
)
|
||||
|
||||
if len(node.instance_attrs) > self.config.max_attributes:
|
||||
self.add_message(
|
||||
"too-many-instance-attributes",
|
||||
node=node,
|
||||
args=(len(node.instance_attrs), self.config.max_attributes),
|
||||
)
|
||||
|
||||
@check_messages("too-few-public-methods", "too-many-public-methods")
|
||||
def leave_classdef(self, node):
|
||||
"""check number of public methods"""
|
||||
my_methods = sum(
|
||||
1 for method in node.mymethods() if not method.name.startswith("_")
|
||||
)
|
||||
|
||||
# Does the class contain less than n public methods ?
|
||||
# This checks only the methods defined in the current class,
|
||||
# since the user might not have control over the classes
|
||||
# from the ancestors. It avoids some false positives
|
||||
# for classes such as unittest.TestCase, which provides
|
||||
# a lot of assert methods. It doesn't make sense to warn
|
||||
# when the user subclasses TestCase to add his own tests.
|
||||
if my_methods > self.config.max_public_methods:
|
||||
self.add_message(
|
||||
"too-many-public-methods",
|
||||
node=node,
|
||||
args=(my_methods, self.config.max_public_methods),
|
||||
)
|
||||
|
||||
# Stop here for exception, metaclass, interface classes and other
|
||||
# classes for which we don't need to count the methods.
|
||||
if (
|
||||
node.type != "class"
|
||||
or _is_enum_class(node)
|
||||
or _is_dataclass(node)
|
||||
or _is_typing_namedtuple(node)
|
||||
):
|
||||
return
|
||||
|
||||
# Does the class contain more than n public methods ?
|
||||
# This checks all the methods defined by ancestors and
|
||||
# by the current class.
|
||||
all_methods = _count_methods_in_class(node)
|
||||
if all_methods < self.config.min_public_methods:
|
||||
self.add_message(
|
||||
"too-few-public-methods",
|
||||
node=node,
|
||||
args=(all_methods, self.config.min_public_methods),
|
||||
)
|
||||
|
||||
@check_messages(
|
||||
"too-many-return-statements",
|
||||
"too-many-branches",
|
||||
"too-many-arguments",
|
||||
"too-many-locals",
|
||||
"too-many-statements",
|
||||
"keyword-arg-before-vararg",
|
||||
)
|
||||
def visit_functiondef(self, node):
|
||||
"""check function name, docstring, arguments, redefinition,
|
||||
variable names, max locals
|
||||
"""
|
||||
# init branch and returns counters
|
||||
self._returns.append(0)
|
||||
# check number of arguments
|
||||
args = node.args.args
|
||||
ignored_argument_names = self._ignored_argument_names
|
||||
if args is not None:
|
||||
ignored_args_num = 0
|
||||
if ignored_argument_names:
|
||||
ignored_args_num = sum(
|
||||
1 for arg in args if ignored_argument_names.match(arg.name)
|
||||
)
|
||||
|
||||
argnum = len(args) - ignored_args_num
|
||||
if argnum > self.config.max_args:
|
||||
self.add_message(
|
||||
"too-many-arguments",
|
||||
node=node,
|
||||
args=(len(args), self.config.max_args),
|
||||
)
|
||||
else:
|
||||
ignored_args_num = 0
|
||||
# check number of local variables
|
||||
locnum = len(node.locals) - ignored_args_num
|
||||
if locnum > self.config.max_locals:
|
||||
self.add_message(
|
||||
"too-many-locals", node=node, args=(locnum, self.config.max_locals)
|
||||
)
|
||||
# init new statements counter
|
||||
self._stmts.append(1)
|
||||
|
||||
visit_asyncfunctiondef = visit_functiondef
|
||||
|
||||
@check_messages(
|
||||
"too-many-return-statements",
|
||||
"too-many-branches",
|
||||
"too-many-arguments",
|
||||
"too-many-locals",
|
||||
"too-many-statements",
|
||||
)
|
||||
def leave_functiondef(self, node):
|
||||
"""most of the work is done here on close:
|
||||
checks for max returns, branch, return in __init__
|
||||
"""
|
||||
returns = self._returns.pop()
|
||||
if returns > self.config.max_returns:
|
||||
self.add_message(
|
||||
"too-many-return-statements",
|
||||
node=node,
|
||||
args=(returns, self.config.max_returns),
|
||||
)
|
||||
branches = self._branches[node]
|
||||
if branches > self.config.max_branches:
|
||||
self.add_message(
|
||||
"too-many-branches",
|
||||
node=node,
|
||||
args=(branches, self.config.max_branches),
|
||||
)
|
||||
# check number of statements
|
||||
stmts = self._stmts.pop()
|
||||
if stmts > self.config.max_statements:
|
||||
self.add_message(
|
||||
"too-many-statements",
|
||||
node=node,
|
||||
args=(stmts, self.config.max_statements),
|
||||
)
|
||||
|
||||
leave_asyncfunctiondef = leave_functiondef
|
||||
|
||||
def visit_return(self, _):
|
||||
"""count number of returns"""
|
||||
if not self._returns:
|
||||
return # return outside function, reported by the base checker
|
||||
self._returns[-1] += 1
|
||||
|
||||
def visit_default(self, node):
|
||||
"""default visit method -> increments the statements counter if
|
||||
necessary
|
||||
"""
|
||||
if node.is_statement:
|
||||
self._inc_all_stmts(1)
|
||||
|
||||
def visit_tryexcept(self, node):
|
||||
"""increments the branches counter"""
|
||||
branches = len(node.handlers)
|
||||
if node.orelse:
|
||||
branches += 1
|
||||
self._inc_branch(node, branches)
|
||||
self._inc_all_stmts(branches)
|
||||
|
||||
def visit_tryfinally(self, node):
|
||||
"""increments the branches counter"""
|
||||
self._inc_branch(node, 2)
|
||||
self._inc_all_stmts(2)
|
||||
|
||||
@check_messages("too-many-boolean-expressions")
|
||||
def visit_if(self, node):
|
||||
"""increments the branches counter and checks boolean expressions"""
|
||||
self._check_boolean_expressions(node)
|
||||
branches = 1
|
||||
# don't double count If nodes coming from some 'elif'
|
||||
if node.orelse and (len(node.orelse) > 1 or not isinstance(node.orelse[0], If)):
|
||||
branches += 1
|
||||
self._inc_branch(node, branches)
|
||||
self._inc_all_stmts(branches)
|
||||
|
||||
def _check_boolean_expressions(self, node):
|
||||
"""Go through "if" node `node` and counts its boolean expressions
|
||||
|
||||
if the "if" node test is a BoolOp node
|
||||
"""
|
||||
condition = node.test
|
||||
if not isinstance(condition, BoolOp):
|
||||
return
|
||||
nb_bool_expr = _count_boolean_expressions(condition)
|
||||
if nb_bool_expr > self.config.max_bool_expr:
|
||||
self.add_message(
|
||||
"too-many-boolean-expressions",
|
||||
node=condition,
|
||||
args=(nb_bool_expr, self.config.max_bool_expr),
|
||||
)
|
||||
|
||||
def visit_while(self, node):
|
||||
"""increments the branches counter"""
|
||||
branches = 1
|
||||
if node.orelse:
|
||||
branches += 1
|
||||
self._inc_branch(node, branches)
|
||||
|
||||
visit_for = visit_while
|
||||
|
||||
def _inc_branch(self, node, branchesnum=1):
|
||||
"""increments the branches counter"""
|
||||
self._branches[node.scope()] += branchesnum
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(MisdesignChecker(linter))
|
561
venv/lib/python3.6/site-packages/pylint/checkers/exceptions.py
Normal file
561
venv/lib/python3.6/site-packages/pylint/checkers/exceptions.py
Normal file
@@ -0,0 +1,561 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2011-2014 Google, Inc.
|
||||
# Copyright (c) 2012 Tim Hatch <tim@timhatch.com>
|
||||
# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2015 Steven Myint <hg@stevenmyint.com>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Erik <erik.eriksson@yahoo.com>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Martin von Gagern <gagern@google.com>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
# Copyright (c) 2018 Alexander Todorov <atodorov@otb.bg>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@upcloud.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checks for various exception related errors."""
|
||||
import builtins
|
||||
import inspect
|
||||
import sys
|
||||
import typing
|
||||
|
||||
import astroid
|
||||
|
||||
from pylint import checkers
|
||||
from pylint.checkers import utils
|
||||
from pylint import interfaces
|
||||
|
||||
|
||||
def _builtin_exceptions():
|
||||
def predicate(obj):
|
||||
return isinstance(obj, type) and issubclass(obj, BaseException)
|
||||
|
||||
members = inspect.getmembers(builtins, predicate)
|
||||
return {exc.__name__ for (_, exc) in members}
|
||||
|
||||
|
||||
def _annotated_unpack_infer(stmt, context=None):
|
||||
"""
|
||||
Recursively generate nodes inferred by the given statement.
|
||||
If the inferred value is a list or a tuple, recurse on the elements.
|
||||
Returns an iterator which yields tuples in the format
|
||||
('original node', 'infered node').
|
||||
"""
|
||||
if isinstance(stmt, (astroid.List, astroid.Tuple)):
|
||||
for elt in stmt.elts:
|
||||
inferred = utils.safe_infer(elt)
|
||||
if inferred and inferred is not astroid.Uninferable:
|
||||
yield elt, inferred
|
||||
return
|
||||
for infered in stmt.infer(context):
|
||||
if infered is astroid.Uninferable:
|
||||
continue
|
||||
yield stmt, infered
|
||||
|
||||
|
||||
def _is_raising(body: typing.List) -> bool:
|
||||
"""Return true if the given statement node raise an exception"""
|
||||
for node in body:
|
||||
if isinstance(node, astroid.Raise):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
PY3K = sys.version_info >= (3, 0)
|
||||
OVERGENERAL_EXCEPTIONS = ("BaseException", "Exception")
|
||||
BUILTINS_NAME = builtins.__name__
|
||||
|
||||
MSGS = {
|
||||
"E0701": (
|
||||
"Bad except clauses order (%s)",
|
||||
"bad-except-order",
|
||||
"Used when except clauses are not in the correct order (from the "
|
||||
"more specific to the more generic). If you don't fix the order, "
|
||||
"some exceptions may not be caught by the most specific handler.",
|
||||
),
|
||||
"E0702": (
|
||||
"Raising %s while only classes or instances are allowed",
|
||||
"raising-bad-type",
|
||||
"Used when something which is neither a class, an instance or a "
|
||||
"string is raised (i.e. a `TypeError` will be raised).",
|
||||
),
|
||||
"E0703": (
|
||||
"Exception context set to something which is not an exception, nor None",
|
||||
"bad-exception-context",
|
||||
'Used when using the syntax "raise ... from ...", '
|
||||
"where the exception context is not an exception, "
|
||||
"nor None.",
|
||||
),
|
||||
"E0704": (
|
||||
"The raise statement is not inside an except clause",
|
||||
"misplaced-bare-raise",
|
||||
"Used when a bare raise is not used inside an except clause. "
|
||||
"This generates an error, since there are no active exceptions "
|
||||
"to be reraised. An exception to this rule is represented by "
|
||||
"a bare raise inside a finally clause, which might work, as long "
|
||||
"as an exception is raised inside the try block, but it is "
|
||||
"nevertheless a code smell that must not be relied upon.",
|
||||
),
|
||||
"E0710": (
|
||||
"Raising a new style class which doesn't inherit from BaseException",
|
||||
"raising-non-exception",
|
||||
"Used when a new style class which doesn't inherit from "
|
||||
"BaseException is raised.",
|
||||
),
|
||||
"E0711": (
|
||||
"NotImplemented raised - should raise NotImplementedError",
|
||||
"notimplemented-raised",
|
||||
"Used when NotImplemented is raised instead of NotImplementedError",
|
||||
),
|
||||
"E0712": (
|
||||
"Catching an exception which doesn't inherit from Exception: %s",
|
||||
"catching-non-exception",
|
||||
"Used when a class which doesn't inherit from "
|
||||
"Exception is used as an exception in an except clause.",
|
||||
),
|
||||
"W0702": (
|
||||
"No exception type(s) specified",
|
||||
"bare-except",
|
||||
"Used when an except clause doesn't specify exceptions type to catch.",
|
||||
),
|
||||
"W0703": (
|
||||
"Catching too general exception %s",
|
||||
"broad-except",
|
||||
"Used when an except catches a too general exception, "
|
||||
"possibly burying unrelated errors.",
|
||||
),
|
||||
"W0705": (
|
||||
"Catching previously caught exception type %s",
|
||||
"duplicate-except",
|
||||
"Used when an except catches a type that was already caught by "
|
||||
"a previous handler.",
|
||||
),
|
||||
"W0706": (
|
||||
"The except handler raises immediately",
|
||||
"try-except-raise",
|
||||
"Used when an except handler uses raise as its first or only "
|
||||
"operator. This is useless because it raises back the exception "
|
||||
"immediately. Remove the raise operator or the entire "
|
||||
"try-except-raise block!",
|
||||
),
|
||||
"W0711": (
|
||||
'Exception to catch is the result of a binary "%s" operation',
|
||||
"binary-op-exception",
|
||||
"Used when the exception to catch is of the form "
|
||||
'"except A or B:". If intending to catch multiple, '
|
||||
'rewrite as "except (A, B):"',
|
||||
),
|
||||
"W0715": (
|
||||
"Exception arguments suggest string formatting might be intended",
|
||||
"raising-format-tuple",
|
||||
"Used when passing multiple arguments to an exception "
|
||||
"constructor, the first of them a string literal containing what "
|
||||
"appears to be placeholders intended for formatting",
|
||||
),
|
||||
"W0716": (
|
||||
"Invalid exception operation. %s",
|
||||
"wrong-exception-operation",
|
||||
"Used when an operation is done against an exception, but the operation "
|
||||
"is not valid for the exception in question. Usually emitted when having "
|
||||
"binary operations between exceptions in except handlers.",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
class BaseVisitor:
|
||||
"""Base class for visitors defined in this module."""
|
||||
|
||||
def __init__(self, checker, node):
|
||||
self._checker = checker
|
||||
self._node = node
|
||||
|
||||
def visit(self, node):
|
||||
name = node.__class__.__name__.lower()
|
||||
dispatch_meth = getattr(self, "visit_" + name, None)
|
||||
if dispatch_meth:
|
||||
dispatch_meth(node)
|
||||
else:
|
||||
self.visit_default(node)
|
||||
|
||||
def visit_default(self, node): # pylint: disable=unused-argument
|
||||
"""Default implementation for all the nodes."""
|
||||
|
||||
|
||||
class ExceptionRaiseRefVisitor(BaseVisitor):
|
||||
"""Visit references (anything that is not an AST leaf)."""
|
||||
|
||||
def visit_name(self, name):
|
||||
if name.name == "NotImplemented":
|
||||
self._checker.add_message("notimplemented-raised", node=self._node)
|
||||
|
||||
def visit_call(self, call):
|
||||
if isinstance(call.func, astroid.Name):
|
||||
self.visit_name(call.func)
|
||||
if (
|
||||
len(call.args) > 1
|
||||
and isinstance(call.args[0], astroid.Const)
|
||||
and isinstance(call.args[0].value, str)
|
||||
):
|
||||
msg = call.args[0].value
|
||||
if "%" in msg or ("{" in msg and "}" in msg):
|
||||
self._checker.add_message("raising-format-tuple", node=self._node)
|
||||
|
||||
|
||||
class ExceptionRaiseLeafVisitor(BaseVisitor):
|
||||
"""Visitor for handling leaf kinds of a raise value."""
|
||||
|
||||
def visit_const(self, const):
|
||||
if not isinstance(const.value, str):
|
||||
# raising-string will be emitted from python3 porting checker.
|
||||
self._checker.add_message(
|
||||
"raising-bad-type", node=self._node, args=const.value.__class__.__name__
|
||||
)
|
||||
|
||||
def visit_instance(self, instance):
|
||||
# pylint: disable=protected-access
|
||||
cls = instance._proxied
|
||||
self.visit_classdef(cls)
|
||||
|
||||
# Exception instances have a particular class type
|
||||
visit_exceptioninstance = visit_instance
|
||||
|
||||
def visit_classdef(self, cls):
|
||||
if not utils.inherit_from_std_ex(cls) and utils.has_known_bases(cls):
|
||||
if cls.newstyle:
|
||||
self._checker.add_message("raising-non-exception", node=self._node)
|
||||
else:
|
||||
self._checker.add_message("nonstandard-exception", node=self._node)
|
||||
|
||||
def visit_tuple(self, tuple_node):
|
||||
if PY3K or not tuple_node.elts:
|
||||
self._checker.add_message("raising-bad-type", node=self._node, args="tuple")
|
||||
return
|
||||
|
||||
# On Python 2, using the following is not an error:
|
||||
# raise (ZeroDivisionError, None)
|
||||
# raise (ZeroDivisionError, )
|
||||
# What's left to do is to check that the first
|
||||
# argument is indeed an exception. Verifying the other arguments
|
||||
# is not the scope of this check.
|
||||
first = tuple_node.elts[0]
|
||||
inferred = utils.safe_infer(first)
|
||||
if not inferred or inferred is astroid.Uninferable:
|
||||
return
|
||||
|
||||
if (
|
||||
isinstance(inferred, astroid.Instance)
|
||||
and inferred.__class__.__name__ != "Instance"
|
||||
):
|
||||
# TODO: explain why
|
||||
self.visit_default(tuple_node)
|
||||
else:
|
||||
self.visit(inferred)
|
||||
|
||||
def visit_default(self, node):
|
||||
name = getattr(node, "name", node.__class__.__name__)
|
||||
self._checker.add_message("raising-bad-type", node=self._node, args=name)
|
||||
|
||||
|
||||
class ExceptionsChecker(checkers.BaseChecker):
|
||||
"""Exception related checks."""
|
||||
|
||||
__implements__ = interfaces.IAstroidChecker
|
||||
|
||||
name = "exceptions"
|
||||
msgs = MSGS
|
||||
priority = -4
|
||||
options = (
|
||||
(
|
||||
"overgeneral-exceptions",
|
||||
{
|
||||
"default": OVERGENERAL_EXCEPTIONS,
|
||||
"type": "csv",
|
||||
"metavar": "<comma-separated class names>",
|
||||
"help": "Exceptions that will emit a warning "
|
||||
'when being caught. Defaults to "%s".'
|
||||
% (", ".join(OVERGENERAL_EXCEPTIONS),),
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def open(self):
|
||||
self._builtin_exceptions = _builtin_exceptions()
|
||||
super(ExceptionsChecker, self).open()
|
||||
|
||||
@utils.check_messages(
|
||||
"nonstandard-exception",
|
||||
"misplaced-bare-raise",
|
||||
"raising-bad-type",
|
||||
"raising-non-exception",
|
||||
"notimplemented-raised",
|
||||
"bad-exception-context",
|
||||
"raising-format-tuple",
|
||||
)
|
||||
def visit_raise(self, node):
|
||||
if node.exc is None:
|
||||
self._check_misplaced_bare_raise(node)
|
||||
return
|
||||
|
||||
if PY3K and node.cause:
|
||||
self._check_bad_exception_context(node)
|
||||
|
||||
expr = node.exc
|
||||
ExceptionRaiseRefVisitor(self, node).visit(expr)
|
||||
|
||||
try:
|
||||
inferred_value = expr.inferred()[-1]
|
||||
except astroid.InferenceError:
|
||||
pass
|
||||
else:
|
||||
if inferred_value:
|
||||
ExceptionRaiseLeafVisitor(self, node).visit(inferred_value)
|
||||
|
||||
def _check_misplaced_bare_raise(self, node):
|
||||
# Filter out if it's present in __exit__.
|
||||
scope = node.scope()
|
||||
if (
|
||||
isinstance(scope, astroid.FunctionDef)
|
||||
and scope.is_method()
|
||||
and scope.name == "__exit__"
|
||||
):
|
||||
return
|
||||
|
||||
current = node
|
||||
# Stop when a new scope is generated or when the raise
|
||||
# statement is found inside a TryFinally.
|
||||
ignores = (astroid.ExceptHandler, astroid.FunctionDef)
|
||||
while current and not isinstance(current.parent, ignores):
|
||||
current = current.parent
|
||||
|
||||
expected = (astroid.ExceptHandler,)
|
||||
if not current or not isinstance(current.parent, expected):
|
||||
self.add_message("misplaced-bare-raise", node=node)
|
||||
|
||||
def _check_bad_exception_context(self, node):
|
||||
"""Verify that the exception context is properly set.
|
||||
|
||||
An exception context can be only `None` or an exception.
|
||||
"""
|
||||
cause = utils.safe_infer(node.cause)
|
||||
if cause in (astroid.Uninferable, None):
|
||||
return
|
||||
|
||||
if isinstance(cause, astroid.Const):
|
||||
if cause.value is not None:
|
||||
self.add_message("bad-exception-context", node=node)
|
||||
elif not isinstance(cause, astroid.ClassDef) and not utils.inherit_from_std_ex(
|
||||
cause
|
||||
):
|
||||
self.add_message("bad-exception-context", node=node)
|
||||
|
||||
def _check_catching_non_exception(self, handler, exc, part):
|
||||
if isinstance(exc, astroid.Tuple):
|
||||
# Check if it is a tuple of exceptions.
|
||||
inferred = [utils.safe_infer(elt) for elt in exc.elts]
|
||||
if any(node is astroid.Uninferable for node in inferred):
|
||||
# Don't emit if we don't know every component.
|
||||
return
|
||||
if all(
|
||||
node
|
||||
and (utils.inherit_from_std_ex(node) or not utils.has_known_bases(node))
|
||||
for node in inferred
|
||||
):
|
||||
return
|
||||
|
||||
if not isinstance(exc, astroid.ClassDef):
|
||||
# Don't emit the warning if the infered stmt
|
||||
# is None, but the exception handler is something else,
|
||||
# maybe it was redefined.
|
||||
if isinstance(exc, astroid.Const) and exc.value is None:
|
||||
if (
|
||||
isinstance(handler.type, astroid.Const)
|
||||
and handler.type.value is None
|
||||
) or handler.type.parent_of(exc):
|
||||
# If the exception handler catches None or
|
||||
# the exception component, which is None, is
|
||||
# defined by the entire exception handler, then
|
||||
# emit a warning.
|
||||
self.add_message(
|
||||
"catching-non-exception",
|
||||
node=handler.type,
|
||||
args=(part.as_string(),),
|
||||
)
|
||||
else:
|
||||
self.add_message(
|
||||
"catching-non-exception",
|
||||
node=handler.type,
|
||||
args=(part.as_string(),),
|
||||
)
|
||||
return
|
||||
|
||||
if (
|
||||
not utils.inherit_from_std_ex(exc)
|
||||
and exc.name not in self._builtin_exceptions
|
||||
):
|
||||
if utils.has_known_bases(exc):
|
||||
self.add_message(
|
||||
"catching-non-exception", node=handler.type, args=(exc.name,)
|
||||
)
|
||||
|
||||
def _check_try_except_raise(self, node):
|
||||
def gather_exceptions_from_handler(handler):
|
||||
exceptions = []
|
||||
if handler.type:
|
||||
exceptions_in_handler = utils.safe_infer(handler.type)
|
||||
if isinstance(exceptions_in_handler, astroid.Tuple):
|
||||
exceptions = {
|
||||
exception
|
||||
for exception in exceptions_in_handler.elts
|
||||
if isinstance(exception, astroid.Name)
|
||||
}
|
||||
elif exceptions_in_handler:
|
||||
exceptions = [exceptions_in_handler]
|
||||
return exceptions
|
||||
|
||||
bare_raise = False
|
||||
handler_having_bare_raise = None
|
||||
excs_in_bare_handler = []
|
||||
for handler in node.handlers:
|
||||
if bare_raise:
|
||||
# check that subsequent handler is not parent of handler which had bare raise.
|
||||
# since utils.safe_infer can fail for bare except, check it before.
|
||||
# also break early if bare except is followed by bare except.
|
||||
|
||||
excs_in_current_handler = gather_exceptions_from_handler(handler)
|
||||
if not excs_in_current_handler:
|
||||
bare_raise = False
|
||||
break
|
||||
|
||||
for exc_in_current_handler in excs_in_current_handler:
|
||||
inferred_current = utils.safe_infer(exc_in_current_handler)
|
||||
if any(
|
||||
utils.is_subclass_of(
|
||||
utils.safe_infer(exc_in_bare_handler), inferred_current
|
||||
)
|
||||
for exc_in_bare_handler in excs_in_bare_handler
|
||||
):
|
||||
bare_raise = False
|
||||
break
|
||||
|
||||
# `raise` as the first operator inside the except handler
|
||||
if _is_raising([handler.body[0]]):
|
||||
# flags when there is a bare raise
|
||||
if handler.body[0].exc is None:
|
||||
bare_raise = True
|
||||
handler_having_bare_raise = handler
|
||||
excs_in_bare_handler = gather_exceptions_from_handler(handler)
|
||||
if bare_raise:
|
||||
self.add_message("try-except-raise", node=handler_having_bare_raise)
|
||||
|
||||
@utils.check_messages("wrong-exception-operation")
|
||||
def visit_binop(self, node):
|
||||
if isinstance(node.parent, astroid.ExceptHandler):
|
||||
# except (V | A)
|
||||
suggestion = "Did you mean '(%s, %s)' instead?" % (
|
||||
node.left.as_string(),
|
||||
node.right.as_string(),
|
||||
)
|
||||
self.add_message("wrong-exception-operation", node=node, args=(suggestion,))
|
||||
|
||||
@utils.check_messages("wrong-exception-operation")
|
||||
def visit_compare(self, node):
|
||||
if isinstance(node.parent, astroid.ExceptHandler):
|
||||
# except (V < A)
|
||||
suggestion = "Did you mean '(%s, %s)' instead?" % (
|
||||
node.left.as_string(),
|
||||
", ".join(operand.as_string() for _, operand in node.ops),
|
||||
)
|
||||
self.add_message("wrong-exception-operation", node=node, args=(suggestion,))
|
||||
|
||||
@utils.check_messages(
|
||||
"bare-except",
|
||||
"broad-except",
|
||||
"try-except-raise",
|
||||
"binary-op-exception",
|
||||
"bad-except-order",
|
||||
"catching-non-exception",
|
||||
"duplicate-except",
|
||||
)
|
||||
def visit_tryexcept(self, node):
|
||||
"""check for empty except"""
|
||||
self._check_try_except_raise(node)
|
||||
exceptions_classes = []
|
||||
nb_handlers = len(node.handlers)
|
||||
for index, handler in enumerate(node.handlers):
|
||||
if handler.type is None:
|
||||
if not _is_raising(handler.body):
|
||||
self.add_message("bare-except", node=handler)
|
||||
|
||||
# check if an "except:" is followed by some other
|
||||
# except
|
||||
if index < (nb_handlers - 1):
|
||||
msg = "empty except clause should always appear last"
|
||||
self.add_message("bad-except-order", node=node, args=msg)
|
||||
|
||||
elif isinstance(handler.type, astroid.BoolOp):
|
||||
self.add_message(
|
||||
"binary-op-exception", node=handler, args=handler.type.op
|
||||
)
|
||||
else:
|
||||
try:
|
||||
excs = list(_annotated_unpack_infer(handler.type))
|
||||
except astroid.InferenceError:
|
||||
continue
|
||||
|
||||
for part, exc in excs:
|
||||
if exc is astroid.Uninferable:
|
||||
continue
|
||||
if isinstance(exc, astroid.Instance) and utils.inherit_from_std_ex(
|
||||
exc
|
||||
):
|
||||
# pylint: disable=protected-access
|
||||
exc = exc._proxied
|
||||
|
||||
self._check_catching_non_exception(handler, exc, part)
|
||||
|
||||
if not isinstance(exc, astroid.ClassDef):
|
||||
continue
|
||||
|
||||
exc_ancestors = [
|
||||
anc
|
||||
for anc in exc.ancestors()
|
||||
if isinstance(anc, astroid.ClassDef)
|
||||
]
|
||||
|
||||
for previous_exc in exceptions_classes:
|
||||
if previous_exc in exc_ancestors:
|
||||
msg = "%s is an ancestor class of %s" % (
|
||||
previous_exc.name,
|
||||
exc.name,
|
||||
)
|
||||
self.add_message(
|
||||
"bad-except-order", node=handler.type, args=msg
|
||||
)
|
||||
if (
|
||||
exc.name in self.config.overgeneral_exceptions
|
||||
and exc.root().name == utils.EXCEPTIONS_MODULE
|
||||
and not _is_raising(handler.body)
|
||||
):
|
||||
self.add_message(
|
||||
"broad-except", args=exc.name, node=handler.type
|
||||
)
|
||||
|
||||
if exc in exceptions_classes:
|
||||
self.add_message(
|
||||
"duplicate-except", args=exc.name, node=handler.type
|
||||
)
|
||||
|
||||
exceptions_classes += [exc for _, exc in excs]
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker"""
|
||||
linter.register_checker(ExceptionsChecker(linter))
|
1343
venv/lib/python3.6/site-packages/pylint/checkers/format.py
Normal file
1343
venv/lib/python3.6/site-packages/pylint/checkers/format.py
Normal file
File diff suppressed because it is too large
Load Diff
934
venv/lib/python3.6/site-packages/pylint/checkers/imports.py
Normal file
934
venv/lib/python3.6/site-packages/pylint/checkers/imports.py
Normal file
@@ -0,0 +1,934 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012-2014 Google, Inc.
|
||||
# Copyright (c) 2013 buck@yelp.com <buck@yelp.com>
|
||||
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015-2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
|
||||
# Copyright (c) 2015 Cezar <celnazli@bitdefender.com>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2015 Noam Yorav-Raphael <noamraph@gmail.com>
|
||||
# Copyright (c) 2015 James Morgensen <james.morgensen@gmail.com>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Jared Garst <cultofjared@gmail.com>
|
||||
# Copyright (c) 2016 Maik Röder <maikroeder@gmail.com>
|
||||
# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
|
||||
# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2017 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2017 Michka Popoff <michkapopoff@gmail.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Erik Wright <erik.wright@shopify.com>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Marianna Polatoglou <mpolatoglou@bloomberg.net>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""imports checkers for Python code"""
|
||||
|
||||
import collections
|
||||
from distutils import sysconfig
|
||||
import os
|
||||
import sys
|
||||
import copy
|
||||
|
||||
import astroid
|
||||
from astroid import are_exclusive, decorators
|
||||
from astroid.modutils import get_module_part, is_standard_module
|
||||
import isort
|
||||
|
||||
from pylint.interfaces import IAstroidChecker
|
||||
from pylint.utils import get_global_option
|
||||
from pylint.exceptions import EmptyReportError
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.checkers.utils import (
|
||||
check_messages,
|
||||
node_ignores_exception,
|
||||
is_from_fallback_block,
|
||||
)
|
||||
from pylint.graph import get_cycles, DotBackend
|
||||
from pylint.reporters.ureports.nodes import VerbatimText, Paragraph
|
||||
|
||||
|
||||
def _qualified_names(modname):
|
||||
"""Split the names of the given module into subparts
|
||||
|
||||
For example,
|
||||
_qualified_names('pylint.checkers.ImportsChecker')
|
||||
returns
|
||||
['pylint', 'pylint.checkers', 'pylint.checkers.ImportsChecker']
|
||||
"""
|
||||
names = modname.split(".")
|
||||
return [".".join(names[0 : i + 1]) for i in range(len(names))]
|
||||
|
||||
|
||||
def _get_import_name(importnode, modname):
|
||||
"""Get a prepared module name from the given import node
|
||||
|
||||
In the case of relative imports, this will return the
|
||||
absolute qualified module name, which might be useful
|
||||
for debugging. Otherwise, the initial module name
|
||||
is returned unchanged.
|
||||
"""
|
||||
if isinstance(importnode, astroid.ImportFrom):
|
||||
if importnode.level:
|
||||
root = importnode.root()
|
||||
if isinstance(root, astroid.Module):
|
||||
modname = root.relative_to_absolute_name(
|
||||
modname, level=importnode.level
|
||||
)
|
||||
return modname
|
||||
|
||||
|
||||
def _get_first_import(node, context, name, base, level, alias):
|
||||
"""return the node where [base.]<name> is imported or None if not found
|
||||
"""
|
||||
fullname = "%s.%s" % (base, name) if base else name
|
||||
|
||||
first = None
|
||||
found = False
|
||||
for first in context.body:
|
||||
if first is node:
|
||||
continue
|
||||
if first.scope() is node.scope() and first.fromlineno > node.fromlineno:
|
||||
continue
|
||||
if isinstance(first, astroid.Import):
|
||||
if any(fullname == iname[0] for iname in first.names):
|
||||
found = True
|
||||
break
|
||||
elif isinstance(first, astroid.ImportFrom):
|
||||
if level == first.level:
|
||||
for imported_name, imported_alias in first.names:
|
||||
if fullname == "%s.%s" % (first.modname, imported_name):
|
||||
found = True
|
||||
break
|
||||
if (
|
||||
name != "*"
|
||||
and name == imported_name
|
||||
and not (alias or imported_alias)
|
||||
):
|
||||
found = True
|
||||
break
|
||||
if found:
|
||||
break
|
||||
if found and not are_exclusive(first, node):
|
||||
return first
|
||||
return None
|
||||
|
||||
|
||||
def _ignore_import_failure(node, modname, ignored_modules):
|
||||
for submodule in _qualified_names(modname):
|
||||
if submodule in ignored_modules:
|
||||
return True
|
||||
|
||||
return node_ignores_exception(node, ImportError)
|
||||
|
||||
|
||||
# utilities to represents import dependencies as tree and dot graph ###########
|
||||
|
||||
|
||||
def _make_tree_defs(mod_files_list):
|
||||
"""get a list of 2-uple (module, list_of_files_which_import_this_module),
|
||||
it will return a dictionary to represent this as a tree
|
||||
"""
|
||||
tree_defs = {}
|
||||
for mod, files in mod_files_list:
|
||||
node = (tree_defs, ())
|
||||
for prefix in mod.split("."):
|
||||
node = node[0].setdefault(prefix, [{}, []])
|
||||
node[1] += files
|
||||
return tree_defs
|
||||
|
||||
|
||||
def _repr_tree_defs(data, indent_str=None):
|
||||
"""return a string which represents imports as a tree"""
|
||||
lines = []
|
||||
nodes = data.items()
|
||||
for i, (mod, (sub, files)) in enumerate(sorted(nodes, key=lambda x: x[0])):
|
||||
if not files:
|
||||
files = ""
|
||||
else:
|
||||
files = "(%s)" % ",".join(sorted(files))
|
||||
if indent_str is None:
|
||||
lines.append("%s %s" % (mod, files))
|
||||
sub_indent_str = " "
|
||||
else:
|
||||
lines.append(r"%s\-%s %s" % (indent_str, mod, files))
|
||||
if i == len(nodes) - 1:
|
||||
sub_indent_str = "%s " % indent_str
|
||||
else:
|
||||
sub_indent_str = "%s| " % indent_str
|
||||
if sub:
|
||||
lines.append(_repr_tree_defs(sub, sub_indent_str))
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def _dependencies_graph(filename, dep_info):
|
||||
"""write dependencies as a dot (graphviz) file
|
||||
"""
|
||||
done = {}
|
||||
printer = DotBackend(filename[:-4], rankdir="LR")
|
||||
printer.emit('URL="." node[shape="box"]')
|
||||
for modname, dependencies in sorted(dep_info.items()):
|
||||
done[modname] = 1
|
||||
printer.emit_node(modname)
|
||||
for depmodname in dependencies:
|
||||
if depmodname not in done:
|
||||
done[depmodname] = 1
|
||||
printer.emit_node(depmodname)
|
||||
for depmodname, dependencies in sorted(dep_info.items()):
|
||||
for modname in dependencies:
|
||||
printer.emit_edge(modname, depmodname)
|
||||
printer.generate(filename)
|
||||
|
||||
|
||||
def _make_graph(filename, dep_info, sect, gtype):
|
||||
"""generate a dependencies graph and add some information about it in the
|
||||
report's section
|
||||
"""
|
||||
_dependencies_graph(filename, dep_info)
|
||||
sect.append(Paragraph("%simports graph has been written to %s" % (gtype, filename)))
|
||||
|
||||
|
||||
# the import checker itself ###################################################
|
||||
|
||||
MSGS = {
|
||||
"E0401": (
|
||||
"Unable to import %s",
|
||||
"import-error",
|
||||
"Used when pylint has been unable to import a module.",
|
||||
{"old_names": [("F0401", "import-error")]},
|
||||
),
|
||||
"E0402": (
|
||||
"Attempted relative import beyond top-level package",
|
||||
"relative-beyond-top-level",
|
||||
"Used when a relative import tries to access too many levels "
|
||||
"in the current package.",
|
||||
),
|
||||
"R0401": (
|
||||
"Cyclic import (%s)",
|
||||
"cyclic-import",
|
||||
"Used when a cyclic import between two or more modules is detected.",
|
||||
),
|
||||
"W0401": (
|
||||
"Wildcard import %s",
|
||||
"wildcard-import",
|
||||
"Used when `from module import *` is detected.",
|
||||
),
|
||||
"W0402": (
|
||||
"Uses of a deprecated module %r",
|
||||
"deprecated-module",
|
||||
"Used a module marked as deprecated is imported.",
|
||||
),
|
||||
"W0403": (
|
||||
"Relative import %r, should be %r",
|
||||
"relative-import",
|
||||
"Used when an import relative to the package directory is detected.",
|
||||
{"maxversion": (3, 0)},
|
||||
),
|
||||
"W0404": (
|
||||
"Reimport %r (imported line %s)",
|
||||
"reimported",
|
||||
"Used when a module is reimported multiple times.",
|
||||
),
|
||||
"W0406": (
|
||||
"Module import itself",
|
||||
"import-self",
|
||||
"Used when a module is importing itself.",
|
||||
),
|
||||
"W0410": (
|
||||
"__future__ import is not the first non docstring statement",
|
||||
"misplaced-future",
|
||||
"Python 2.5 and greater require __future__ import to be the "
|
||||
"first non docstring statement in the module.",
|
||||
),
|
||||
"C0410": (
|
||||
"Multiple imports on one line (%s)",
|
||||
"multiple-imports",
|
||||
"Used when import statement importing multiple modules is detected.",
|
||||
),
|
||||
"C0411": (
|
||||
"%s should be placed before %s",
|
||||
"wrong-import-order",
|
||||
"Used when PEP8 import order is not respected (standard imports "
|
||||
"first, then third-party libraries, then local imports)",
|
||||
),
|
||||
"C0412": (
|
||||
"Imports from package %s are not grouped",
|
||||
"ungrouped-imports",
|
||||
"Used when imports are not grouped by packages",
|
||||
),
|
||||
"C0413": (
|
||||
'Import "%s" should be placed at the top of the module',
|
||||
"wrong-import-position",
|
||||
"Used when code and imports are mixed",
|
||||
),
|
||||
"C0414": (
|
||||
"Import alias does not rename original package",
|
||||
"useless-import-alias",
|
||||
"Used when an import alias is same as original package."
|
||||
"e.g using import numpy as numpy instead of import numpy as np",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
DEFAULT_STANDARD_LIBRARY = ()
|
||||
DEFAULT_KNOWN_THIRD_PARTY = ("enchant",)
|
||||
|
||||
|
||||
class ImportsChecker(BaseChecker):
|
||||
"""checks for
|
||||
* external modules dependencies
|
||||
* relative / wildcard imports
|
||||
* cyclic imports
|
||||
* uses of deprecated modules
|
||||
"""
|
||||
|
||||
__implements__ = IAstroidChecker
|
||||
|
||||
name = "imports"
|
||||
msgs = MSGS
|
||||
priority = -2
|
||||
|
||||
if sys.version_info < (3, 5):
|
||||
deprecated_modules = ("optparse",)
|
||||
else:
|
||||
deprecated_modules = ("optparse", "tkinter.tix")
|
||||
options = (
|
||||
(
|
||||
"deprecated-modules",
|
||||
{
|
||||
"default": deprecated_modules,
|
||||
"type": "csv",
|
||||
"metavar": "<modules>",
|
||||
"help": "Deprecated modules which should not be used,"
|
||||
" separated by a comma.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"import-graph",
|
||||
{
|
||||
"default": "",
|
||||
"type": "string",
|
||||
"metavar": "<file.dot>",
|
||||
"help": "Create a graph of every (i.e. internal and"
|
||||
" external) dependencies in the given file"
|
||||
" (report RP0402 must not be disabled).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"ext-import-graph",
|
||||
{
|
||||
"default": "",
|
||||
"type": "string",
|
||||
"metavar": "<file.dot>",
|
||||
"help": "Create a graph of external dependencies in the"
|
||||
" given file (report RP0402 must not be disabled).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"int-import-graph",
|
||||
{
|
||||
"default": "",
|
||||
"type": "string",
|
||||
"metavar": "<file.dot>",
|
||||
"help": "Create a graph of internal dependencies in the"
|
||||
" given file (report RP0402 must not be disabled).",
|
||||
},
|
||||
),
|
||||
(
|
||||
"known-standard-library",
|
||||
{
|
||||
"default": DEFAULT_STANDARD_LIBRARY,
|
||||
"type": "csv",
|
||||
"metavar": "<modules>",
|
||||
"help": "Force import order to recognize a module as part of "
|
||||
"the standard compatibility libraries.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"known-third-party",
|
||||
{
|
||||
"default": DEFAULT_KNOWN_THIRD_PARTY,
|
||||
"type": "csv",
|
||||
"metavar": "<modules>",
|
||||
"help": "Force import order to recognize a module as part of "
|
||||
"a third party library.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"analyse-fallback-blocks",
|
||||
{
|
||||
"default": False,
|
||||
"type": "yn",
|
||||
"metavar": "<y_or_n>",
|
||||
"help": "Analyse import fallback blocks. This can be used to "
|
||||
"support both Python 2 and 3 compatible code, which "
|
||||
"means that the block might have code that exists "
|
||||
"only in one or another interpreter, leading to false "
|
||||
"positives when analysed.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"allow-wildcard-with-all",
|
||||
{
|
||||
"default": False,
|
||||
"type": "yn",
|
||||
"metavar": "<y_or_n>",
|
||||
"help": "Allow wildcard imports from modules that define __all__.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def __init__(self, linter=None):
|
||||
BaseChecker.__init__(self, linter)
|
||||
self.stats = None
|
||||
self.import_graph = None
|
||||
self._imports_stack = []
|
||||
self._first_non_import_node = None
|
||||
self._module_pkg = {} # mapping of modules to the pkg they belong in
|
||||
self.reports = (
|
||||
("RP0401", "External dependencies", self._report_external_dependencies),
|
||||
("RP0402", "Modules dependencies graph", self._report_dependencies_graph),
|
||||
)
|
||||
|
||||
self._site_packages = self._compute_site_packages()
|
||||
|
||||
@staticmethod
|
||||
def _compute_site_packages():
|
||||
def _normalized_path(path):
|
||||
return os.path.normcase(os.path.abspath(path))
|
||||
|
||||
paths = set()
|
||||
real_prefix = getattr(sys, "real_prefix", None)
|
||||
for prefix in filter(None, (real_prefix, sys.prefix)):
|
||||
path = sysconfig.get_python_lib(prefix=prefix)
|
||||
path = _normalized_path(path)
|
||||
paths.add(path)
|
||||
|
||||
# Handle Debian's derivatives /usr/local.
|
||||
if os.path.isfile("/etc/debian_version"):
|
||||
for prefix in filter(None, (real_prefix, sys.prefix)):
|
||||
libpython = os.path.join(
|
||||
prefix,
|
||||
"local",
|
||||
"lib",
|
||||
"python" + sysconfig.get_python_version(),
|
||||
"dist-packages",
|
||||
)
|
||||
paths.add(libpython)
|
||||
return paths
|
||||
|
||||
def open(self):
|
||||
"""called before visiting project (i.e set of modules)"""
|
||||
self.linter.add_stats(dependencies={})
|
||||
self.linter.add_stats(cycles=[])
|
||||
self.stats = self.linter.stats
|
||||
self.import_graph = collections.defaultdict(set)
|
||||
self._module_pkg = {} # mapping of modules to the pkg they belong in
|
||||
self._excluded_edges = collections.defaultdict(set)
|
||||
self._ignored_modules = get_global_option(self, "ignored-modules", default=[])
|
||||
|
||||
def _import_graph_without_ignored_edges(self):
|
||||
filtered_graph = copy.deepcopy(self.import_graph)
|
||||
for node in filtered_graph:
|
||||
filtered_graph[node].difference_update(self._excluded_edges[node])
|
||||
return filtered_graph
|
||||
|
||||
def close(self):
|
||||
"""called before visiting project (i.e set of modules)"""
|
||||
if self.linter.is_message_enabled("cyclic-import"):
|
||||
graph = self._import_graph_without_ignored_edges()
|
||||
vertices = list(graph)
|
||||
for cycle in get_cycles(graph, vertices=vertices):
|
||||
self.add_message("cyclic-import", args=" -> ".join(cycle))
|
||||
|
||||
@check_messages(*MSGS)
|
||||
def visit_import(self, node):
|
||||
"""triggered when an import statement is seen"""
|
||||
self._check_reimport(node)
|
||||
self._check_import_as_rename(node)
|
||||
|
||||
modnode = node.root()
|
||||
names = [name for name, _ in node.names]
|
||||
if len(names) >= 2:
|
||||
self.add_message("multiple-imports", args=", ".join(names), node=node)
|
||||
|
||||
for name in names:
|
||||
self._check_deprecated_module(node, name)
|
||||
imported_module = self._get_imported_module(node, name)
|
||||
if isinstance(node.parent, astroid.Module):
|
||||
# Allow imports nested
|
||||
self._check_position(node)
|
||||
if isinstance(node.scope(), astroid.Module):
|
||||
self._record_import(node, imported_module)
|
||||
|
||||
if imported_module is None:
|
||||
continue
|
||||
|
||||
self._check_relative_import(modnode, node, imported_module, name)
|
||||
self._add_imported_module(node, imported_module.name)
|
||||
|
||||
@check_messages(*MSGS)
|
||||
def visit_importfrom(self, node):
|
||||
"""triggered when a from statement is seen"""
|
||||
basename = node.modname
|
||||
imported_module = self._get_imported_module(node, basename)
|
||||
|
||||
self._check_import_as_rename(node)
|
||||
self._check_misplaced_future(node)
|
||||
self._check_deprecated_module(node, basename)
|
||||
self._check_wildcard_imports(node, imported_module)
|
||||
self._check_same_line_imports(node)
|
||||
self._check_reimport(node, basename=basename, level=node.level)
|
||||
|
||||
if isinstance(node.parent, astroid.Module):
|
||||
# Allow imports nested
|
||||
self._check_position(node)
|
||||
if isinstance(node.scope(), astroid.Module):
|
||||
self._record_import(node, imported_module)
|
||||
if imported_module is None:
|
||||
return
|
||||
modnode = node.root()
|
||||
self._check_relative_import(modnode, node, imported_module, basename)
|
||||
|
||||
for name, _ in node.names:
|
||||
if name != "*":
|
||||
self._add_imported_module(node, "%s.%s" % (imported_module.name, name))
|
||||
else:
|
||||
self._add_imported_module(node, imported_module.name)
|
||||
|
||||
@check_messages(*MSGS)
|
||||
def leave_module(self, node):
|
||||
# Check imports are grouped by category (standard, 3rd party, local)
|
||||
std_imports, ext_imports, loc_imports = self._check_imports_order(node)
|
||||
|
||||
# Check imports are grouped by package within a given category
|
||||
met = set()
|
||||
current_package = None
|
||||
for import_node, import_name in std_imports + ext_imports + loc_imports:
|
||||
if not self.linter.is_message_enabled(
|
||||
"ungrouped-imports", import_node.fromlineno
|
||||
):
|
||||
continue
|
||||
package, _, _ = import_name.partition(".")
|
||||
if current_package and current_package != package and package in met:
|
||||
self.add_message("ungrouped-imports", node=import_node, args=package)
|
||||
current_package = package
|
||||
met.add(package)
|
||||
|
||||
self._imports_stack = []
|
||||
self._first_non_import_node = None
|
||||
|
||||
def compute_first_non_import_node(self, node):
|
||||
if not self.linter.is_message_enabled("wrong-import-position", node.fromlineno):
|
||||
return
|
||||
# if the node does not contain an import instruction, and if it is the
|
||||
# first node of the module, keep a track of it (all the import positions
|
||||
# of the module will be compared to the position of this first
|
||||
# instruction)
|
||||
if self._first_non_import_node:
|
||||
return
|
||||
if not isinstance(node.parent, astroid.Module):
|
||||
return
|
||||
nested_allowed = [astroid.TryExcept, astroid.TryFinally]
|
||||
is_nested_allowed = [
|
||||
allowed for allowed in nested_allowed if isinstance(node, allowed)
|
||||
]
|
||||
if is_nested_allowed and any(
|
||||
node.nodes_of_class((astroid.Import, astroid.ImportFrom))
|
||||
):
|
||||
return
|
||||
if isinstance(node, astroid.Assign):
|
||||
# Add compatibility for module level dunder names
|
||||
# https://www.python.org/dev/peps/pep-0008/#module-level-dunder-names
|
||||
valid_targets = [
|
||||
isinstance(target, astroid.AssignName)
|
||||
and target.name.startswith("__")
|
||||
and target.name.endswith("__")
|
||||
for target in node.targets
|
||||
]
|
||||
if all(valid_targets):
|
||||
return
|
||||
self._first_non_import_node = node
|
||||
|
||||
visit_tryfinally = (
|
||||
visit_tryexcept
|
||||
) = (
|
||||
visit_assignattr
|
||||
) = (
|
||||
visit_assign
|
||||
) = (
|
||||
visit_ifexp
|
||||
) = visit_comprehension = visit_expr = visit_if = compute_first_non_import_node
|
||||
|
||||
def visit_functiondef(self, node):
|
||||
if not self.linter.is_message_enabled("wrong-import-position", node.fromlineno):
|
||||
return
|
||||
# If it is the first non import instruction of the module, record it.
|
||||
if self._first_non_import_node:
|
||||
return
|
||||
|
||||
# Check if the node belongs to an `If` or a `Try` block. If they
|
||||
# contain imports, skip recording this node.
|
||||
if not isinstance(node.parent.scope(), astroid.Module):
|
||||
return
|
||||
|
||||
root = node
|
||||
while not isinstance(root.parent, astroid.Module):
|
||||
root = root.parent
|
||||
|
||||
if isinstance(root, (astroid.If, astroid.TryFinally, astroid.TryExcept)):
|
||||
if any(root.nodes_of_class((astroid.Import, astroid.ImportFrom))):
|
||||
return
|
||||
|
||||
self._first_non_import_node = node
|
||||
|
||||
visit_classdef = visit_for = visit_while = visit_functiondef
|
||||
|
||||
def _check_misplaced_future(self, node):
|
||||
basename = node.modname
|
||||
if basename == "__future__":
|
||||
# check if this is the first non-docstring statement in the module
|
||||
prev = node.previous_sibling()
|
||||
if prev:
|
||||
# consecutive future statements are possible
|
||||
if not (
|
||||
isinstance(prev, astroid.ImportFrom)
|
||||
and prev.modname == "__future__"
|
||||
):
|
||||
self.add_message("misplaced-future", node=node)
|
||||
return
|
||||
|
||||
def _check_same_line_imports(self, node):
|
||||
# Detect duplicate imports on the same line.
|
||||
names = (name for name, _ in node.names)
|
||||
counter = collections.Counter(names)
|
||||
for name, count in counter.items():
|
||||
if count > 1:
|
||||
self.add_message("reimported", node=node, args=(name, node.fromlineno))
|
||||
|
||||
def _check_position(self, node):
|
||||
"""Check `node` import or importfrom node position is correct
|
||||
|
||||
Send a message if `node` comes before another instruction
|
||||
"""
|
||||
# if a first non-import instruction has already been encountered,
|
||||
# it means the import comes after it and therefore is not well placed
|
||||
if self._first_non_import_node:
|
||||
self.add_message("wrong-import-position", node=node, args=node.as_string())
|
||||
|
||||
def _record_import(self, node, importedmodnode):
|
||||
"""Record the package `node` imports from"""
|
||||
if isinstance(node, astroid.ImportFrom):
|
||||
importedname = node.modname
|
||||
else:
|
||||
importedname = importedmodnode.name if importedmodnode else None
|
||||
if not importedname:
|
||||
importedname = node.names[0][0].split(".")[0]
|
||||
|
||||
if isinstance(node, astroid.ImportFrom) and (node.level or 0) >= 1:
|
||||
# We need the importedname with first point to detect local package
|
||||
# Example of node:
|
||||
# 'from .my_package1 import MyClass1'
|
||||
# the output should be '.my_package1' instead of 'my_package1'
|
||||
# Example of node:
|
||||
# 'from . import my_package2'
|
||||
# the output should be '.my_package2' instead of '{pyfile}'
|
||||
importedname = "." + importedname
|
||||
|
||||
self._imports_stack.append((node, importedname))
|
||||
|
||||
@staticmethod
|
||||
def _is_fallback_import(node, imports):
|
||||
imports = [import_node for (import_node, _) in imports]
|
||||
return any(astroid.are_exclusive(import_node, node) for import_node in imports)
|
||||
|
||||
def _check_imports_order(self, _module_node):
|
||||
"""Checks imports of module `node` are grouped by category
|
||||
|
||||
Imports must follow this order: standard, 3rd party, local
|
||||
"""
|
||||
std_imports = []
|
||||
third_party_imports = []
|
||||
first_party_imports = []
|
||||
# need of a list that holds third or first party ordered import
|
||||
external_imports = []
|
||||
local_imports = []
|
||||
third_party_not_ignored = []
|
||||
first_party_not_ignored = []
|
||||
local_not_ignored = []
|
||||
isort_obj = isort.SortImports(
|
||||
file_contents="",
|
||||
known_third_party=self.config.known_third_party,
|
||||
known_standard_library=self.config.known_standard_library,
|
||||
)
|
||||
for node, modname in self._imports_stack:
|
||||
if modname.startswith("."):
|
||||
package = "." + modname.split(".")[1]
|
||||
else:
|
||||
package = modname.split(".")[0]
|
||||
nested = not isinstance(node.parent, astroid.Module)
|
||||
ignore_for_import_order = not self.linter.is_message_enabled(
|
||||
"wrong-import-order", node.fromlineno
|
||||
)
|
||||
import_category = isort_obj.place_module(package)
|
||||
node_and_package_import = (node, package)
|
||||
if import_category in ("FUTURE", "STDLIB"):
|
||||
std_imports.append(node_and_package_import)
|
||||
wrong_import = (
|
||||
third_party_not_ignored
|
||||
or first_party_not_ignored
|
||||
or local_not_ignored
|
||||
)
|
||||
if self._is_fallback_import(node, wrong_import):
|
||||
continue
|
||||
if wrong_import and not nested:
|
||||
self.add_message(
|
||||
"wrong-import-order",
|
||||
node=node,
|
||||
args=(
|
||||
'standard import "%s"' % node.as_string(),
|
||||
'"%s"' % wrong_import[0][0].as_string(),
|
||||
),
|
||||
)
|
||||
elif import_category == "THIRDPARTY":
|
||||
third_party_imports.append(node_and_package_import)
|
||||
external_imports.append(node_and_package_import)
|
||||
if not nested and not ignore_for_import_order:
|
||||
third_party_not_ignored.append(node_and_package_import)
|
||||
wrong_import = first_party_not_ignored or local_not_ignored
|
||||
if wrong_import and not nested:
|
||||
self.add_message(
|
||||
"wrong-import-order",
|
||||
node=node,
|
||||
args=(
|
||||
'third party import "%s"' % node.as_string(),
|
||||
'"%s"' % wrong_import[0][0].as_string(),
|
||||
),
|
||||
)
|
||||
elif import_category == "FIRSTPARTY":
|
||||
first_party_imports.append(node_and_package_import)
|
||||
external_imports.append(node_and_package_import)
|
||||
if not nested and not ignore_for_import_order:
|
||||
first_party_not_ignored.append(node_and_package_import)
|
||||
wrong_import = local_not_ignored
|
||||
if wrong_import and not nested:
|
||||
self.add_message(
|
||||
"wrong-import-order",
|
||||
node=node,
|
||||
args=(
|
||||
'first party import "%s"' % node.as_string(),
|
||||
'"%s"' % wrong_import[0][0].as_string(),
|
||||
),
|
||||
)
|
||||
elif import_category == "LOCALFOLDER":
|
||||
local_imports.append((node, package))
|
||||
if not nested and not ignore_for_import_order:
|
||||
local_not_ignored.append((node, package))
|
||||
return std_imports, external_imports, local_imports
|
||||
|
||||
def _get_imported_module(self, importnode, modname):
|
||||
try:
|
||||
return importnode.do_import_module(modname)
|
||||
except astroid.TooManyLevelsError:
|
||||
if _ignore_import_failure(importnode, modname, self._ignored_modules):
|
||||
return None
|
||||
|
||||
self.add_message("relative-beyond-top-level", node=importnode)
|
||||
except astroid.AstroidSyntaxError as exc:
|
||||
message = "Cannot import {!r} due to syntax error {!r}".format(
|
||||
modname, str(exc.error) # pylint: disable=no-member; false positive
|
||||
)
|
||||
self.add_message("syntax-error", line=importnode.lineno, args=message)
|
||||
|
||||
except astroid.AstroidBuildingException:
|
||||
if not self.linter.is_message_enabled("import-error"):
|
||||
return None
|
||||
if _ignore_import_failure(importnode, modname, self._ignored_modules):
|
||||
return None
|
||||
if not self.config.analyse_fallback_blocks and is_from_fallback_block(
|
||||
importnode
|
||||
):
|
||||
return None
|
||||
|
||||
dotted_modname = _get_import_name(importnode, modname)
|
||||
self.add_message("import-error", args=repr(dotted_modname), node=importnode)
|
||||
|
||||
def _check_relative_import(
|
||||
self, modnode, importnode, importedmodnode, importedasname
|
||||
):
|
||||
"""check relative import. node is either an Import or From node, modname
|
||||
the imported module name.
|
||||
"""
|
||||
if not self.linter.is_message_enabled("relative-import"):
|
||||
return None
|
||||
if importedmodnode.file is None:
|
||||
return False # built-in module
|
||||
if modnode is importedmodnode:
|
||||
return False # module importing itself
|
||||
if modnode.absolute_import_activated() or getattr(importnode, "level", None):
|
||||
return False
|
||||
if importedmodnode.name != importedasname:
|
||||
# this must be a relative import...
|
||||
self.add_message(
|
||||
"relative-import",
|
||||
args=(importedasname, importedmodnode.name),
|
||||
node=importnode,
|
||||
)
|
||||
return None
|
||||
return None
|
||||
|
||||
def _add_imported_module(self, node, importedmodname):
|
||||
"""notify an imported module, used to analyze dependencies"""
|
||||
module_file = node.root().file
|
||||
context_name = node.root().name
|
||||
base = os.path.splitext(os.path.basename(module_file))[0]
|
||||
|
||||
try:
|
||||
importedmodname = get_module_part(importedmodname, module_file)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if context_name == importedmodname:
|
||||
self.add_message("import-self", node=node)
|
||||
|
||||
elif not is_standard_module(importedmodname):
|
||||
# if this is not a package __init__ module
|
||||
if base != "__init__" and context_name not in self._module_pkg:
|
||||
# record the module's parent, or the module itself if this is
|
||||
# a top level module, as the package it belongs to
|
||||
self._module_pkg[context_name] = context_name.rsplit(".", 1)[0]
|
||||
|
||||
# handle dependencies
|
||||
importedmodnames = self.stats["dependencies"].setdefault(
|
||||
importedmodname, set()
|
||||
)
|
||||
if context_name not in importedmodnames:
|
||||
importedmodnames.add(context_name)
|
||||
|
||||
# update import graph
|
||||
self.import_graph[context_name].add(importedmodname)
|
||||
if not self.linter.is_message_enabled("cyclic-import", line=node.lineno):
|
||||
self._excluded_edges[context_name].add(importedmodname)
|
||||
|
||||
def _check_deprecated_module(self, node, mod_path):
|
||||
"""check if the module is deprecated"""
|
||||
for mod_name in self.config.deprecated_modules:
|
||||
if mod_path == mod_name or mod_path.startswith(mod_name + "."):
|
||||
self.add_message("deprecated-module", node=node, args=mod_path)
|
||||
|
||||
def _check_import_as_rename(self, node):
|
||||
names = node.names
|
||||
for name in names:
|
||||
if not all(name):
|
||||
return
|
||||
|
||||
real_name = name[0]
|
||||
splitted_packages = real_name.rsplit(".")
|
||||
real_name = splitted_packages[-1]
|
||||
imported_name = name[1]
|
||||
# consider only following cases
|
||||
# import x as x
|
||||
# and ignore following
|
||||
# import x.y.z as z
|
||||
if real_name == imported_name and len(splitted_packages) == 1:
|
||||
self.add_message("useless-import-alias", node=node)
|
||||
|
||||
def _check_reimport(self, node, basename=None, level=None):
|
||||
"""check if the import is necessary (i.e. not already done)"""
|
||||
if not self.linter.is_message_enabled("reimported"):
|
||||
return
|
||||
|
||||
frame = node.frame()
|
||||
root = node.root()
|
||||
contexts = [(frame, level)]
|
||||
if root is not frame:
|
||||
contexts.append((root, None))
|
||||
|
||||
for known_context, known_level in contexts:
|
||||
for name, alias in node.names:
|
||||
first = _get_first_import(
|
||||
node, known_context, name, basename, known_level, alias
|
||||
)
|
||||
if first is not None:
|
||||
self.add_message(
|
||||
"reimported", node=node, args=(name, first.fromlineno)
|
||||
)
|
||||
|
||||
def _report_external_dependencies(self, sect, _, _dummy):
|
||||
"""return a verbatim layout for displaying dependencies"""
|
||||
dep_info = _make_tree_defs(self._external_dependencies_info().items())
|
||||
if not dep_info:
|
||||
raise EmptyReportError()
|
||||
tree_str = _repr_tree_defs(dep_info)
|
||||
sect.append(VerbatimText(tree_str))
|
||||
|
||||
def _report_dependencies_graph(self, sect, _, _dummy):
|
||||
"""write dependencies as a dot (graphviz) file"""
|
||||
dep_info = self.stats["dependencies"]
|
||||
if not dep_info or not (
|
||||
self.config.import_graph
|
||||
or self.config.ext_import_graph
|
||||
or self.config.int_import_graph
|
||||
):
|
||||
raise EmptyReportError()
|
||||
filename = self.config.import_graph
|
||||
if filename:
|
||||
_make_graph(filename, dep_info, sect, "")
|
||||
filename = self.config.ext_import_graph
|
||||
if filename:
|
||||
_make_graph(filename, self._external_dependencies_info(), sect, "external ")
|
||||
filename = self.config.int_import_graph
|
||||
if filename:
|
||||
_make_graph(filename, self._internal_dependencies_info(), sect, "internal ")
|
||||
|
||||
def _filter_dependencies_graph(self, internal):
|
||||
"""build the internal or the external depedency graph"""
|
||||
graph = collections.defaultdict(set)
|
||||
for importee, importers in self.stats["dependencies"].items():
|
||||
for importer in importers:
|
||||
package = self._module_pkg.get(importer, importer)
|
||||
is_inside = importee.startswith(package)
|
||||
if is_inside and internal or not is_inside and not internal:
|
||||
graph[importee].add(importer)
|
||||
return graph
|
||||
|
||||
@decorators.cached
|
||||
def _external_dependencies_info(self):
|
||||
"""return cached external dependencies information or build and
|
||||
cache them
|
||||
"""
|
||||
return self._filter_dependencies_graph(internal=False)
|
||||
|
||||
@decorators.cached
|
||||
def _internal_dependencies_info(self):
|
||||
"""return cached internal dependencies information or build and
|
||||
cache them
|
||||
"""
|
||||
return self._filter_dependencies_graph(internal=True)
|
||||
|
||||
def _check_wildcard_imports(self, node, imported_module):
|
||||
if node.root().package:
|
||||
# Skip the check if in __init__.py issue #2026
|
||||
return
|
||||
|
||||
wildcard_import_is_allowed = self._wildcard_import_is_allowed(imported_module)
|
||||
for name, _ in node.names:
|
||||
if name == "*" and not wildcard_import_is_allowed:
|
||||
self.add_message("wildcard-import", args=node.modname, node=node)
|
||||
|
||||
def _wildcard_import_is_allowed(self, imported_module):
|
||||
return (
|
||||
self.config.allow_wildcard_with_all
|
||||
and imported_module is not None
|
||||
and "__all__" in imported_module.locals
|
||||
)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(ImportsChecker(linter))
|
375
venv/lib/python3.6/site-packages/pylint/checkers/logging.py
Normal file
375
venv/lib/python3.6/site-packages/pylint/checkers/logging.py
Normal file
@@ -0,0 +1,375 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2009, 2012, 2014 Google, Inc.
|
||||
# Copyright (c) 2012 Mike Bryant <leachim@leachim.info>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Chris Murray <chris@chrismurray.scot>
|
||||
# Copyright (c) 2016 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2017 guillaume2 <guillaume.peillex@gmail.col>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 Mariatta Wijaya <mariatta@python.org>
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""checker for use of Python logging
|
||||
"""
|
||||
import string
|
||||
|
||||
import astroid
|
||||
|
||||
from pylint import checkers
|
||||
from pylint import interfaces
|
||||
from pylint.checkers import utils
|
||||
from pylint.checkers.utils import check_messages
|
||||
|
||||
|
||||
MSGS = {
|
||||
"W1201": (
|
||||
"Specify string format arguments as logging function parameters",
|
||||
"logging-not-lazy",
|
||||
"Used when a logging statement has a call form of "
|
||||
'"logging.<logging method>(format_string % (format_args...))". '
|
||||
"Such calls should leave string interpolation to the logging "
|
||||
"method itself and be written "
|
||||
'"logging.<logging method>(format_string, format_args...)" '
|
||||
"so that the program may avoid incurring the cost of the "
|
||||
"interpolation in those cases in which no message will be "
|
||||
"logged. For more, see "
|
||||
"http://www.python.org/dev/peps/pep-0282/.",
|
||||
),
|
||||
"W1202": (
|
||||
"Use % formatting in logging functions and pass the % "
|
||||
"parameters as arguments",
|
||||
"logging-format-interpolation",
|
||||
"Used when a logging statement has a call form of "
|
||||
'"logging.<logging method>(format_string.format(format_args...))"'
|
||||
". Such calls should use % formatting instead, but leave "
|
||||
"interpolation to the logging function by passing the parameters "
|
||||
"as arguments.",
|
||||
),
|
||||
"W1203": (
|
||||
"Use % formatting in logging functions and pass the % "
|
||||
"parameters as arguments",
|
||||
"logging-fstring-interpolation",
|
||||
"Used when a logging statement has a call form of "
|
||||
'"logging.method(f"..."))"'
|
||||
". Such calls should use % formatting instead, but leave "
|
||||
"interpolation to the logging function by passing the parameters "
|
||||
"as arguments.",
|
||||
),
|
||||
"E1200": (
|
||||
"Unsupported logging format character %r (%#02x) at index %d",
|
||||
"logging-unsupported-format",
|
||||
"Used when an unsupported format character is used in a logging "
|
||||
"statement format string.",
|
||||
),
|
||||
"E1201": (
|
||||
"Logging format string ends in middle of conversion specifier",
|
||||
"logging-format-truncated",
|
||||
"Used when a logging statement format string terminates before "
|
||||
"the end of a conversion specifier.",
|
||||
),
|
||||
"E1205": (
|
||||
"Too many arguments for logging format string",
|
||||
"logging-too-many-args",
|
||||
"Used when a logging format string is given too many arguments.",
|
||||
),
|
||||
"E1206": (
|
||||
"Not enough arguments for logging format string",
|
||||
"logging-too-few-args",
|
||||
"Used when a logging format string is given too few arguments.",
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
CHECKED_CONVENIENCE_FUNCTIONS = {
|
||||
"critical",
|
||||
"debug",
|
||||
"error",
|
||||
"exception",
|
||||
"fatal",
|
||||
"info",
|
||||
"warn",
|
||||
"warning",
|
||||
}
|
||||
|
||||
|
||||
def is_method_call(func, types=(), methods=()):
|
||||
"""Determines if a BoundMethod node represents a method call.
|
||||
|
||||
Args:
|
||||
func (astroid.BoundMethod): The BoundMethod AST node to check.
|
||||
types (Optional[String]): Optional sequence of caller type names to restrict check.
|
||||
methods (Optional[String]): Optional sequence of method names to restrict check.
|
||||
|
||||
Returns:
|
||||
bool: true if the node represents a method call for the given type and
|
||||
method names, False otherwise.
|
||||
"""
|
||||
return (
|
||||
isinstance(func, astroid.BoundMethod)
|
||||
and isinstance(func.bound, astroid.Instance)
|
||||
and (func.bound.name in types if types else True)
|
||||
and (func.name in methods if methods else True)
|
||||
)
|
||||
|
||||
|
||||
class LoggingChecker(checkers.BaseChecker):
|
||||
"""Checks use of the logging module."""
|
||||
|
||||
__implements__ = interfaces.IAstroidChecker
|
||||
name = "logging"
|
||||
msgs = MSGS
|
||||
|
||||
options = (
|
||||
(
|
||||
"logging-modules",
|
||||
{
|
||||
"default": ("logging",),
|
||||
"type": "csv",
|
||||
"metavar": "<comma separated list>",
|
||||
"help": "Logging modules to check that the string format "
|
||||
"arguments are in logging function parameter format.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"logging-format-style",
|
||||
{
|
||||
"default": "old",
|
||||
"type": "choice",
|
||||
"metavar": "<old (%) or new ({)>",
|
||||
"choices": ["old", "new"],
|
||||
"help": "Format style used to check logging format string. "
|
||||
"`old` means using % formatting, while `new` is for `{}` formatting.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def visit_module(self, node): # pylint: disable=unused-argument
|
||||
"""Clears any state left in this checker from last module checked."""
|
||||
# The code being checked can just as easily "import logging as foo",
|
||||
# so it is necessary to process the imports and store in this field
|
||||
# what name the logging module is actually given.
|
||||
self._logging_names = set()
|
||||
logging_mods = self.config.logging_modules
|
||||
|
||||
self._format_style = self.config.logging_format_style
|
||||
self._logging_modules = set(logging_mods)
|
||||
self._from_imports = {}
|
||||
for logging_mod in logging_mods:
|
||||
parts = logging_mod.rsplit(".", 1)
|
||||
if len(parts) > 1:
|
||||
self._from_imports[parts[0]] = parts[1]
|
||||
|
||||
def visit_importfrom(self, node):
|
||||
"""Checks to see if a module uses a non-Python logging module."""
|
||||
try:
|
||||
logging_name = self._from_imports[node.modname]
|
||||
for module, as_name in node.names:
|
||||
if module == logging_name:
|
||||
self._logging_names.add(as_name or module)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def visit_import(self, node):
|
||||
"""Checks to see if this module uses Python's built-in logging."""
|
||||
for module, as_name in node.names:
|
||||
if module in self._logging_modules:
|
||||
self._logging_names.add(as_name or module)
|
||||
|
||||
@check_messages(*MSGS)
|
||||
def visit_call(self, node):
|
||||
"""Checks calls to logging methods."""
|
||||
|
||||
def is_logging_name():
|
||||
return (
|
||||
isinstance(node.func, astroid.Attribute)
|
||||
and isinstance(node.func.expr, astroid.Name)
|
||||
and node.func.expr.name in self._logging_names
|
||||
)
|
||||
|
||||
def is_logger_class():
|
||||
try:
|
||||
for inferred in node.func.infer():
|
||||
if isinstance(inferred, astroid.BoundMethod):
|
||||
parent = inferred._proxied.parent
|
||||
if isinstance(parent, astroid.ClassDef) and (
|
||||
parent.qname() == "logging.Logger"
|
||||
or any(
|
||||
ancestor.qname() == "logging.Logger"
|
||||
for ancestor in parent.ancestors()
|
||||
)
|
||||
):
|
||||
return True, inferred._proxied.name
|
||||
except astroid.exceptions.InferenceError:
|
||||
pass
|
||||
return False, None
|
||||
|
||||
if is_logging_name():
|
||||
name = node.func.attrname
|
||||
else:
|
||||
result, name = is_logger_class()
|
||||
if not result:
|
||||
return
|
||||
self._check_log_method(node, name)
|
||||
|
||||
def _check_log_method(self, node, name):
|
||||
"""Checks calls to logging.log(level, format, *format_args)."""
|
||||
if name == "log":
|
||||
if node.starargs or node.kwargs or len(node.args) < 2:
|
||||
# Either a malformed call, star args, or double-star args. Beyond
|
||||
# the scope of this checker.
|
||||
return
|
||||
format_pos = 1
|
||||
elif name in CHECKED_CONVENIENCE_FUNCTIONS:
|
||||
if node.starargs or node.kwargs or not node.args:
|
||||
# Either no args, star args, or double-star args. Beyond the
|
||||
# scope of this checker.
|
||||
return
|
||||
format_pos = 0
|
||||
else:
|
||||
return
|
||||
|
||||
if isinstance(node.args[format_pos], astroid.BinOp):
|
||||
binop = node.args[format_pos]
|
||||
emit = binop.op == "%"
|
||||
if binop.op == "+":
|
||||
total_number_of_strings = sum(
|
||||
1
|
||||
for operand in (binop.left, binop.right)
|
||||
if self._is_operand_literal_str(utils.safe_infer(operand))
|
||||
)
|
||||
emit = total_number_of_strings > 0
|
||||
if emit:
|
||||
self.add_message("logging-not-lazy", node=node)
|
||||
elif isinstance(node.args[format_pos], astroid.Call):
|
||||
self._check_call_func(node.args[format_pos])
|
||||
elif isinstance(node.args[format_pos], astroid.Const):
|
||||
self._check_format_string(node, format_pos)
|
||||
elif isinstance(
|
||||
node.args[format_pos], (astroid.FormattedValue, astroid.JoinedStr)
|
||||
):
|
||||
self.add_message("logging-fstring-interpolation", node=node)
|
||||
|
||||
@staticmethod
|
||||
def _is_operand_literal_str(operand):
|
||||
"""
|
||||
Return True if the operand in argument is a literal string
|
||||
"""
|
||||
return isinstance(operand, astroid.Const) and operand.name == "str"
|
||||
|
||||
def _check_call_func(self, node):
|
||||
"""Checks that function call is not format_string.format().
|
||||
|
||||
Args:
|
||||
node (astroid.node_classes.Call):
|
||||
Call AST node to be checked.
|
||||
"""
|
||||
func = utils.safe_infer(node.func)
|
||||
types = ("str", "unicode")
|
||||
methods = ("format",)
|
||||
if is_method_call(func, types, methods) and not is_complex_format_str(
|
||||
func.bound
|
||||
):
|
||||
self.add_message("logging-format-interpolation", node=node)
|
||||
|
||||
def _check_format_string(self, node, format_arg):
|
||||
"""Checks that format string tokens match the supplied arguments.
|
||||
|
||||
Args:
|
||||
node (astroid.node_classes.NodeNG): AST node to be checked.
|
||||
format_arg (int): Index of the format string in the node arguments.
|
||||
"""
|
||||
num_args = _count_supplied_tokens(node.args[format_arg + 1 :])
|
||||
if not num_args:
|
||||
# If no args were supplied the string is not interpolated and can contain
|
||||
# formatting characters - it's used verbatim. Don't check any further.
|
||||
return
|
||||
format_string = node.args[format_arg].value
|
||||
if not isinstance(format_string, str):
|
||||
# If the log format is constant non-string (e.g. logging.debug(5)),
|
||||
# ensure there are no arguments.
|
||||
required_num_args = 0
|
||||
else:
|
||||
try:
|
||||
if self._format_style == "old":
|
||||
keyword_args, required_num_args, _, _ = utils.parse_format_string(
|
||||
format_string
|
||||
)
|
||||
if keyword_args:
|
||||
# Keyword checking on logging strings is complicated by
|
||||
# special keywords - out of scope.
|
||||
return
|
||||
elif self._format_style == "new":
|
||||
keyword_arguments, implicit_pos_args, explicit_pos_args = utils.parse_format_method_string(
|
||||
format_string
|
||||
)
|
||||
|
||||
keyword_args_cnt = len(
|
||||
set(k for k, l in keyword_arguments if not isinstance(k, int))
|
||||
)
|
||||
required_num_args = (
|
||||
keyword_args_cnt + implicit_pos_args + explicit_pos_args
|
||||
)
|
||||
except utils.UnsupportedFormatCharacter as ex:
|
||||
char = format_string[ex.index]
|
||||
self.add_message(
|
||||
"logging-unsupported-format",
|
||||
node=node,
|
||||
args=(char, ord(char), ex.index),
|
||||
)
|
||||
return
|
||||
except utils.IncompleteFormatString:
|
||||
self.add_message("logging-format-truncated", node=node)
|
||||
return
|
||||
if num_args > required_num_args:
|
||||
self.add_message("logging-too-many-args", node=node)
|
||||
elif num_args < required_num_args:
|
||||
self.add_message("logging-too-few-args", node=node)
|
||||
|
||||
|
||||
def is_complex_format_str(node):
|
||||
"""Checks if node represents a string with complex formatting specs.
|
||||
|
||||
Args:
|
||||
node (astroid.node_classes.NodeNG): AST node to check
|
||||
Returns:
|
||||
bool: True if inferred string uses complex formatting, False otherwise
|
||||
"""
|
||||
inferred = utils.safe_infer(node)
|
||||
if inferred is None or not isinstance(inferred.value, str):
|
||||
return True
|
||||
try:
|
||||
parsed = list(string.Formatter().parse(inferred.value))
|
||||
except ValueError:
|
||||
# This format string is invalid
|
||||
return False
|
||||
for _, _, format_spec, _ in parsed:
|
||||
if format_spec:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _count_supplied_tokens(args):
|
||||
"""Counts the number of tokens in an args list.
|
||||
|
||||
The Python log functions allow for special keyword arguments: func,
|
||||
exc_info and extra. To handle these cases correctly, we only count
|
||||
arguments that aren't keywords.
|
||||
|
||||
Args:
|
||||
args (list): AST nodes that are arguments for a log format string.
|
||||
|
||||
Returns:
|
||||
int: Number of AST nodes that aren't keywords.
|
||||
"""
|
||||
return sum(1 for arg in args if not isinstance(arg, astroid.Keyword))
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""Required method to auto-register this checker."""
|
||||
linter.register_checker(LoggingChecker(linter))
|
182
venv/lib/python3.6/site-packages/pylint/checkers/misc.py
Normal file
182
venv/lib/python3.6/site-packages/pylint/checkers/misc.py
Normal file
@@ -0,0 +1,182 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2006, 2009-2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012-2014 Google, Inc.
|
||||
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Alexandru Coman <fcoman@bitdefender.com>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2016 glegoux <gilles.legoux@gmail.com>
|
||||
# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
|
||||
"""Check source code is ascii only or has an encoding declaration (PEP 263)"""
|
||||
|
||||
# pylint: disable=W0511
|
||||
|
||||
import tokenize
|
||||
|
||||
import re
|
||||
|
||||
from pylint.interfaces import IRawChecker, ITokenChecker
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.utils import OPTION_RGX, MessagesHandlerMixIn
|
||||
|
||||
|
||||
class ByIdManagedMessagesChecker(BaseChecker):
|
||||
|
||||
"""checks for messages that are enabled or disabled by id instead of symbol."""
|
||||
|
||||
__implements__ = IRawChecker
|
||||
|
||||
# configuration section name
|
||||
name = "miscellaneous"
|
||||
msgs = {
|
||||
"I0023": (
|
||||
"%s",
|
||||
"use-symbolic-message-instead",
|
||||
"Used when a message is enabled or disabled by id.",
|
||||
)
|
||||
}
|
||||
|
||||
options = ()
|
||||
|
||||
def process_module(self, module):
|
||||
"""inspect the source file to find messages activated or deactivated by id."""
|
||||
managed_msgs = MessagesHandlerMixIn.get_by_id_managed_msgs()
|
||||
for (mod_name, msg_id, msg_symbol, lineno, is_disabled) in managed_msgs:
|
||||
if mod_name == module.name:
|
||||
if is_disabled:
|
||||
txt = "Id '{ident}' is used to disable '{symbol}' message emission".format(
|
||||
ident=msg_id, symbol=msg_symbol
|
||||
)
|
||||
else:
|
||||
txt = "Id '{ident}' is used to enable '{symbol}' message emission".format(
|
||||
ident=msg_id, symbol=msg_symbol
|
||||
)
|
||||
self.add_message("use-symbolic-message-instead", line=lineno, args=txt)
|
||||
MessagesHandlerMixIn.clear_by_id_managed_msgs()
|
||||
|
||||
|
||||
class EncodingChecker(BaseChecker):
|
||||
|
||||
"""checks for:
|
||||
* warning notes in the code like FIXME, XXX
|
||||
* encoding issues.
|
||||
"""
|
||||
|
||||
__implements__ = (IRawChecker, ITokenChecker)
|
||||
|
||||
# configuration section name
|
||||
name = "miscellaneous"
|
||||
msgs = {
|
||||
"W0511": (
|
||||
"%s",
|
||||
"fixme",
|
||||
"Used when a warning note as FIXME or XXX is detected.",
|
||||
),
|
||||
"W0512": (
|
||||
'Cannot decode using encoding "%s", unexpected byte at position %d',
|
||||
"invalid-encoded-data",
|
||||
"Used when a source line cannot be decoded using the specified "
|
||||
"source file encoding.",
|
||||
{"maxversion": (3, 0)},
|
||||
),
|
||||
}
|
||||
|
||||
options = (
|
||||
(
|
||||
"notes",
|
||||
{
|
||||
"type": "csv",
|
||||
"metavar": "<comma separated values>",
|
||||
"default": ("FIXME", "XXX", "TODO"),
|
||||
"help": (
|
||||
"List of note tags to take in consideration, "
|
||||
"separated by a comma."
|
||||
),
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def open(self):
|
||||
super().open()
|
||||
self._fixme_pattern = re.compile(
|
||||
r"#\s*(%s)\b" % "|".join(map(re.escape, self.config.notes)), re.I
|
||||
)
|
||||
|
||||
def _check_encoding(self, lineno, line, file_encoding):
|
||||
try:
|
||||
return line.decode(file_encoding)
|
||||
except UnicodeDecodeError as ex:
|
||||
self.add_message(
|
||||
"invalid-encoded-data", line=lineno, args=(file_encoding, ex.args[2])
|
||||
)
|
||||
except LookupError as ex:
|
||||
if line.startswith("#") and "coding" in line and file_encoding in line:
|
||||
self.add_message(
|
||||
"syntax-error",
|
||||
line=lineno,
|
||||
args='Cannot decode using encoding "{}",'
|
||||
" bad encoding".format(file_encoding),
|
||||
)
|
||||
|
||||
def process_module(self, module):
|
||||
"""inspect the source file to find encoding problem"""
|
||||
if module.file_encoding:
|
||||
encoding = module.file_encoding
|
||||
else:
|
||||
encoding = "ascii"
|
||||
|
||||
with module.stream() as stream:
|
||||
for lineno, line in enumerate(stream):
|
||||
self._check_encoding(lineno + 1, line, encoding)
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
"""inspect the source to find fixme problems"""
|
||||
if not self.config.notes:
|
||||
return
|
||||
comments = (
|
||||
token_info for token_info in tokens if token_info.type == tokenize.COMMENT
|
||||
)
|
||||
for comment in comments:
|
||||
comment_text = comment.string[1:].lstrip() # trim '#' and whitespaces
|
||||
|
||||
# handle pylint disable clauses
|
||||
disable_option_match = OPTION_RGX.search(comment_text)
|
||||
if disable_option_match:
|
||||
try:
|
||||
_, value = disable_option_match.group(1).split("=", 1)
|
||||
values = [_val.strip().upper() for _val in value.split(",")]
|
||||
if set(values) & set(self.config.notes):
|
||||
continue
|
||||
except ValueError:
|
||||
self.add_message(
|
||||
"bad-inline-option",
|
||||
args=disable_option_match.group(1).strip(),
|
||||
line=comment.string,
|
||||
)
|
||||
continue
|
||||
|
||||
# emit warnings if necessary
|
||||
match = self._fixme_pattern.search("#" + comment_text.lower())
|
||||
if match:
|
||||
note = match.group(1)
|
||||
self.add_message(
|
||||
"fixme",
|
||||
col_offset=comment.string.lower().index(note.lower()),
|
||||
args=comment_text,
|
||||
line=comment.start[0],
|
||||
)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker"""
|
||||
linter.register_checker(EncodingChecker(linter))
|
||||
linter.register_checker(ByIdManagedMessagesChecker(linter))
|
142
venv/lib/python3.6/site-packages/pylint/checkers/newstyle.py
Normal file
142
venv/lib/python3.6/site-packages/pylint/checkers/newstyle.py
Normal file
@@ -0,0 +1,142 @@
|
||||
# Copyright (c) 2006, 2008-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012-2014 Google, Inc.
|
||||
# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""check for new / old style related problems
|
||||
"""
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
|
||||
from pylint.interfaces import IAstroidChecker
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.checkers.utils import check_messages, node_frame_class, has_known_bases
|
||||
|
||||
MSGS = {
|
||||
"E1003": (
|
||||
"Bad first argument %r given to super()",
|
||||
"bad-super-call",
|
||||
"Used when another argument than the current class is given as "
|
||||
"first argument of the super builtin.",
|
||||
),
|
||||
"E1004": (
|
||||
"Missing argument to super()",
|
||||
"missing-super-argument",
|
||||
"Used when the super builtin didn't receive an argument.",
|
||||
{"maxversion": (3, 0)},
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
class NewStyleConflictChecker(BaseChecker):
|
||||
"""checks for usage of new style capabilities on old style classes and
|
||||
other new/old styles conflicts problems
|
||||
* use of property, __slots__, super
|
||||
* "super" usage
|
||||
"""
|
||||
|
||||
__implements__ = (IAstroidChecker,)
|
||||
|
||||
# configuration section name
|
||||
name = "newstyle"
|
||||
# messages
|
||||
msgs = MSGS
|
||||
priority = -2
|
||||
# configuration options
|
||||
options = ()
|
||||
|
||||
@check_messages("bad-super-call", "missing-super-argument")
|
||||
def visit_functiondef(self, node):
|
||||
"""check use of super"""
|
||||
# ignore actual functions or method within a new style class
|
||||
if not node.is_method():
|
||||
return
|
||||
klass = node.parent.frame()
|
||||
for stmt in node.nodes_of_class(astroid.Call):
|
||||
if node_frame_class(stmt) != node_frame_class(node):
|
||||
# Don't look down in other scopes.
|
||||
continue
|
||||
|
||||
expr = stmt.func
|
||||
if not isinstance(expr, astroid.Attribute):
|
||||
continue
|
||||
|
||||
call = expr.expr
|
||||
# skip the test if using super
|
||||
if not (
|
||||
isinstance(call, astroid.Call)
|
||||
and isinstance(call.func, astroid.Name)
|
||||
and call.func.name == "super"
|
||||
):
|
||||
continue
|
||||
|
||||
if not klass.newstyle and has_known_bases(klass):
|
||||
# super should not be used on an old style class
|
||||
continue
|
||||
else:
|
||||
# super first arg should be the class
|
||||
if not call.args:
|
||||
if sys.version_info[0] == 3:
|
||||
# unless Python 3
|
||||
continue
|
||||
else:
|
||||
self.add_message("missing-super-argument", node=call)
|
||||
continue
|
||||
|
||||
# calling super(type(self), self) can lead to recursion loop
|
||||
# in derived classes
|
||||
arg0 = call.args[0]
|
||||
if (
|
||||
isinstance(arg0, astroid.Call)
|
||||
and isinstance(arg0.func, astroid.Name)
|
||||
and arg0.func.name == "type"
|
||||
):
|
||||
self.add_message("bad-super-call", node=call, args=("type",))
|
||||
continue
|
||||
|
||||
# calling super(self.__class__, self) can lead to recursion loop
|
||||
# in derived classes
|
||||
if (
|
||||
len(call.args) >= 2
|
||||
and isinstance(call.args[1], astroid.Name)
|
||||
and call.args[1].name == "self"
|
||||
and isinstance(arg0, astroid.Attribute)
|
||||
and arg0.attrname == "__class__"
|
||||
):
|
||||
self.add_message(
|
||||
"bad-super-call", node=call, args=("self.__class__",)
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
supcls = call.args and next(call.args[0].infer(), None)
|
||||
except astroid.InferenceError:
|
||||
continue
|
||||
|
||||
if klass is not supcls:
|
||||
name = None
|
||||
# if supcls is not Uninferable, then supcls was infered
|
||||
# and use its name. Otherwise, try to look
|
||||
# for call.args[0].name
|
||||
if supcls:
|
||||
name = supcls.name
|
||||
elif call.args and hasattr(call.args[0], "name"):
|
||||
name = call.args[0].name
|
||||
if name:
|
||||
self.add_message("bad-super-call", node=call, args=(name,))
|
||||
|
||||
visit_asyncfunctiondef = visit_functiondef
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(NewStyleConflictChecker(linter))
|
1398
venv/lib/python3.6/site-packages/pylint/checkers/python3.py
Normal file
1398
venv/lib/python3.6/site-packages/pylint/checkers/python3.py
Normal file
File diff suppressed because it is too large
Load Diff
125
venv/lib/python3.6/site-packages/pylint/checkers/raw_metrics.py
Normal file
125
venv/lib/python3.6/site-packages/pylint/checkers/raw_metrics.py
Normal file
@@ -0,0 +1,125 @@
|
||||
# Copyright (c) 2007, 2010, 2013, 2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013 Google, Inc.
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015-2017 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015 Mike Frysinger <vapier@gentoo.org>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
""" Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
|
||||
http://www.logilab.fr/ -- mailto:contact@logilab.fr
|
||||
|
||||
Raw metrics checker
|
||||
"""
|
||||
|
||||
import tokenize
|
||||
from typing import Any
|
||||
|
||||
from pylint.interfaces import ITokenChecker
|
||||
from pylint.exceptions import EmptyReportError
|
||||
from pylint.checkers import BaseTokenChecker
|
||||
from pylint.reporters import diff_string
|
||||
from pylint.reporters.ureports.nodes import Table
|
||||
|
||||
|
||||
def report_raw_stats(sect, stats, old_stats):
|
||||
"""calculate percentage of code / doc / comment / empty
|
||||
"""
|
||||
total_lines = stats["total_lines"]
|
||||
if not total_lines:
|
||||
raise EmptyReportError()
|
||||
sect.description = "%s lines have been analyzed" % total_lines
|
||||
lines = ("type", "number", "%", "previous", "difference")
|
||||
for node_type in ("code", "docstring", "comment", "empty"):
|
||||
key = node_type + "_lines"
|
||||
total = stats[key]
|
||||
percent = float(total * 100) / total_lines
|
||||
old = old_stats.get(key, None)
|
||||
if old is not None:
|
||||
diff_str = diff_string(old, total)
|
||||
else:
|
||||
old, diff_str = "NC", "NC"
|
||||
lines += (node_type, str(total), "%.2f" % percent, str(old), diff_str)
|
||||
sect.append(Table(children=lines, cols=5, rheaders=1))
|
||||
|
||||
|
||||
class RawMetricsChecker(BaseTokenChecker):
|
||||
"""does not check anything but gives some raw metrics :
|
||||
* total number of lines
|
||||
* total number of code lines
|
||||
* total number of docstring lines
|
||||
* total number of comments lines
|
||||
* total number of empty lines
|
||||
"""
|
||||
|
||||
__implements__ = (ITokenChecker,)
|
||||
|
||||
# configuration section name
|
||||
name = "metrics"
|
||||
# configuration options
|
||||
options = ()
|
||||
# messages
|
||||
msgs = {} # type: Any
|
||||
# reports
|
||||
reports = (("RP0701", "Raw metrics", report_raw_stats),)
|
||||
|
||||
def __init__(self, linter):
|
||||
BaseTokenChecker.__init__(self, linter)
|
||||
self.stats = None
|
||||
|
||||
def open(self):
|
||||
"""init statistics"""
|
||||
self.stats = self.linter.add_stats(
|
||||
total_lines=0,
|
||||
code_lines=0,
|
||||
empty_lines=0,
|
||||
docstring_lines=0,
|
||||
comment_lines=0,
|
||||
)
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
"""update stats"""
|
||||
i = 0
|
||||
tokens = list(tokens)
|
||||
while i < len(tokens):
|
||||
i, lines_number, line_type = get_type(tokens, i)
|
||||
self.stats["total_lines"] += lines_number
|
||||
self.stats[line_type] += lines_number
|
||||
|
||||
|
||||
JUNK = (tokenize.NL, tokenize.INDENT, tokenize.NEWLINE, tokenize.ENDMARKER)
|
||||
|
||||
|
||||
def get_type(tokens, start_index):
|
||||
"""return the line type : docstring, comment, code, empty"""
|
||||
i = start_index
|
||||
tok_type = tokens[i][0]
|
||||
start = tokens[i][2]
|
||||
pos = start
|
||||
line_type = None
|
||||
while i < len(tokens) and tokens[i][2][0] == start[0]:
|
||||
tok_type = tokens[i][0]
|
||||
pos = tokens[i][3]
|
||||
if line_type is None:
|
||||
if tok_type == tokenize.STRING:
|
||||
line_type = "docstring_lines"
|
||||
elif tok_type == tokenize.COMMENT:
|
||||
line_type = "comment_lines"
|
||||
elif tok_type in JUNK:
|
||||
pass
|
||||
else:
|
||||
line_type = "code_lines"
|
||||
i += 1
|
||||
if line_type is None:
|
||||
line_type = "empty_lines"
|
||||
elif i < len(tokens) and tokens[i][0] == tokenize.NEWLINE:
|
||||
i += 1
|
||||
return i, pos[0] - start[0] + 1, line_type
|
||||
|
||||
|
||||
def register(linter):
|
||||
""" required method to auto register this checker """
|
||||
linter.register_checker(RawMetricsChecker(linter))
|
1444
venv/lib/python3.6/site-packages/pylint/checkers/refactoring.py
Normal file
1444
venv/lib/python3.6/site-packages/pylint/checkers/refactoring.py
Normal file
File diff suppressed because it is too large
Load Diff
449
venv/lib/python3.6/site-packages/pylint/checkers/similar.py
Normal file
449
venv/lib/python3.6/site-packages/pylint/checkers/similar.py
Normal file
@@ -0,0 +1,449 @@
|
||||
# Copyright (c) 2006, 2008-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012 Ry4an Brase <ry4an-hg@ry4an.org>
|
||||
# Copyright (c) 2012 Google, Inc.
|
||||
# Copyright (c) 2012 Anthony VEREZ <anthony.verez.external@cassidian.com>
|
||||
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2017 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
|
||||
# Copyright (c) 2018 ssolanki <sushobhitsolanki@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
# pylint: disable=W0622
|
||||
"""a similarities / code duplication command line tool and pylint checker
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from itertools import groupby
|
||||
|
||||
import astroid
|
||||
|
||||
from pylint.utils import decoding_stream
|
||||
from pylint.interfaces import IRawChecker
|
||||
from pylint.checkers import BaseChecker, table_lines_from_stats
|
||||
from pylint.reporters.ureports.nodes import Table
|
||||
|
||||
|
||||
class Similar:
|
||||
"""finds copy-pasted lines of code in a project"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
min_lines=4,
|
||||
ignore_comments=False,
|
||||
ignore_docstrings=False,
|
||||
ignore_imports=False,
|
||||
):
|
||||
self.min_lines = min_lines
|
||||
self.ignore_comments = ignore_comments
|
||||
self.ignore_docstrings = ignore_docstrings
|
||||
self.ignore_imports = ignore_imports
|
||||
self.linesets = []
|
||||
|
||||
def append_stream(self, streamid, stream, encoding=None):
|
||||
"""append a file to search for similarities"""
|
||||
if encoding is None:
|
||||
readlines = stream.readlines
|
||||
else:
|
||||
readlines = decoding_stream(stream, encoding).readlines
|
||||
try:
|
||||
self.linesets.append(
|
||||
LineSet(
|
||||
streamid,
|
||||
readlines(),
|
||||
self.ignore_comments,
|
||||
self.ignore_docstrings,
|
||||
self.ignore_imports,
|
||||
)
|
||||
)
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
"""start looking for similarities and display results on stdout"""
|
||||
self._display_sims(self._compute_sims())
|
||||
|
||||
def _compute_sims(self):
|
||||
"""compute similarities in appended files"""
|
||||
no_duplicates = defaultdict(list)
|
||||
for num, lineset1, idx1, lineset2, idx2 in self._iter_sims():
|
||||
duplicate = no_duplicates[num]
|
||||
for couples in duplicate:
|
||||
if (lineset1, idx1) in couples or (lineset2, idx2) in couples:
|
||||
couples.add((lineset1, idx1))
|
||||
couples.add((lineset2, idx2))
|
||||
break
|
||||
else:
|
||||
duplicate.append({(lineset1, idx1), (lineset2, idx2)})
|
||||
sims = []
|
||||
for num, ensembles in no_duplicates.items():
|
||||
for couples in ensembles:
|
||||
sims.append((num, couples))
|
||||
sims.sort()
|
||||
sims.reverse()
|
||||
return sims
|
||||
|
||||
def _display_sims(self, sims):
|
||||
"""display computed similarities on stdout"""
|
||||
nb_lignes_dupliquees = 0
|
||||
for num, couples in sims:
|
||||
print()
|
||||
print(num, "similar lines in", len(couples), "files")
|
||||
couples = sorted(couples)
|
||||
for lineset, idx in couples:
|
||||
print("==%s:%s" % (lineset.name, idx))
|
||||
# pylint: disable=W0631
|
||||
for line in lineset._real_lines[idx : idx + num]:
|
||||
print(" ", line.rstrip())
|
||||
nb_lignes_dupliquees += num * (len(couples) - 1)
|
||||
nb_total_lignes = sum([len(lineset) for lineset in self.linesets])
|
||||
print(
|
||||
"TOTAL lines=%s duplicates=%s percent=%.2f"
|
||||
% (
|
||||
nb_total_lignes,
|
||||
nb_lignes_dupliquees,
|
||||
nb_lignes_dupliquees * 100. / nb_total_lignes,
|
||||
)
|
||||
)
|
||||
|
||||
def _find_common(self, lineset1, lineset2):
|
||||
"""find similarities in the two given linesets"""
|
||||
lines1 = lineset1.enumerate_stripped
|
||||
lines2 = lineset2.enumerate_stripped
|
||||
find = lineset2.find
|
||||
index1 = 0
|
||||
min_lines = self.min_lines
|
||||
while index1 < len(lineset1):
|
||||
skip = 1
|
||||
num = 0
|
||||
for index2 in find(lineset1[index1]):
|
||||
non_blank = 0
|
||||
for num, ((_, line1), (_, line2)) in enumerate(
|
||||
zip(lines1(index1), lines2(index2))
|
||||
):
|
||||
if line1 != line2:
|
||||
if non_blank > min_lines:
|
||||
yield num, lineset1, index1, lineset2, index2
|
||||
skip = max(skip, num)
|
||||
break
|
||||
if line1:
|
||||
non_blank += 1
|
||||
else:
|
||||
# we may have reach the end
|
||||
num += 1
|
||||
if non_blank > min_lines:
|
||||
yield num, lineset1, index1, lineset2, index2
|
||||
skip = max(skip, num)
|
||||
index1 += skip
|
||||
|
||||
def _iter_sims(self):
|
||||
"""iterate on similarities among all files, by making a cartesian
|
||||
product
|
||||
"""
|
||||
for idx, lineset in enumerate(self.linesets[:-1]):
|
||||
for lineset2 in self.linesets[idx + 1 :]:
|
||||
for sim in self._find_common(lineset, lineset2):
|
||||
yield sim
|
||||
|
||||
|
||||
def stripped_lines(lines, ignore_comments, ignore_docstrings, ignore_imports):
|
||||
"""return lines with leading/trailing whitespace and any ignored code
|
||||
features removed
|
||||
"""
|
||||
if ignore_imports:
|
||||
tree = astroid.parse("".join(lines))
|
||||
node_is_import_by_lineno = (
|
||||
(node.lineno, isinstance(node, (astroid.Import, astroid.ImportFrom)))
|
||||
for node in tree.body
|
||||
)
|
||||
line_begins_import = {
|
||||
lineno: all(is_import for _, is_import in node_is_import_group)
|
||||
for lineno, node_is_import_group in groupby(
|
||||
node_is_import_by_lineno, key=lambda x: x[0]
|
||||
)
|
||||
}
|
||||
current_line_is_import = False
|
||||
|
||||
strippedlines = []
|
||||
docstring = None
|
||||
for lineno, line in enumerate(lines, start=1):
|
||||
line = line.strip()
|
||||
if ignore_docstrings:
|
||||
if not docstring and (line.startswith('"""') or line.startswith("'''")):
|
||||
docstring = line[:3]
|
||||
line = line[3:]
|
||||
if docstring:
|
||||
if line.endswith(docstring):
|
||||
docstring = None
|
||||
line = ""
|
||||
if ignore_imports:
|
||||
current_line_is_import = line_begins_import.get(
|
||||
lineno, current_line_is_import
|
||||
)
|
||||
if current_line_is_import:
|
||||
line = ""
|
||||
if ignore_comments:
|
||||
# XXX should use regex in checkers/format to avoid cutting
|
||||
# at a "#" in a string
|
||||
line = line.split("#", 1)[0].strip()
|
||||
strippedlines.append(line)
|
||||
return strippedlines
|
||||
|
||||
|
||||
class LineSet:
|
||||
"""Holds and indexes all the lines of a single source file"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name,
|
||||
lines,
|
||||
ignore_comments=False,
|
||||
ignore_docstrings=False,
|
||||
ignore_imports=False,
|
||||
):
|
||||
self.name = name
|
||||
self._real_lines = lines
|
||||
self._stripped_lines = stripped_lines(
|
||||
lines, ignore_comments, ignore_docstrings, ignore_imports
|
||||
)
|
||||
self._index = self._mk_index()
|
||||
|
||||
def __str__(self):
|
||||
return "<Lineset for %s>" % self.name
|
||||
|
||||
def __len__(self):
|
||||
return len(self._real_lines)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return self._stripped_lines[index]
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.name < other.name
|
||||
|
||||
def __hash__(self):
|
||||
return id(self)
|
||||
|
||||
def enumerate_stripped(self, start_at=0):
|
||||
"""return an iterator on stripped lines, starting from a given index
|
||||
if specified, else 0
|
||||
"""
|
||||
idx = start_at
|
||||
if start_at:
|
||||
lines = self._stripped_lines[start_at:]
|
||||
else:
|
||||
lines = self._stripped_lines
|
||||
for line in lines:
|
||||
# if line:
|
||||
yield idx, line
|
||||
idx += 1
|
||||
|
||||
def find(self, stripped_line):
|
||||
"""return positions of the given stripped line in this set"""
|
||||
return self._index.get(stripped_line, ())
|
||||
|
||||
def _mk_index(self):
|
||||
"""create the index for this set"""
|
||||
index = defaultdict(list)
|
||||
for line_no, line in enumerate(self._stripped_lines):
|
||||
if line:
|
||||
index[line].append(line_no)
|
||||
return index
|
||||
|
||||
|
||||
MSGS = {
|
||||
"R0801": (
|
||||
"Similar lines in %s files\n%s",
|
||||
"duplicate-code",
|
||||
"Indicates that a set of similar lines has been detected "
|
||||
"among multiple file. This usually means that the code should "
|
||||
"be refactored to avoid this duplication.",
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def report_similarities(sect, stats, old_stats):
|
||||
"""make a layout with some stats about duplication"""
|
||||
lines = ["", "now", "previous", "difference"]
|
||||
lines += table_lines_from_stats(
|
||||
stats, old_stats, ("nb_duplicated_lines", "percent_duplicated_lines")
|
||||
)
|
||||
sect.append(Table(children=lines, cols=4, rheaders=1, cheaders=1))
|
||||
|
||||
|
||||
# wrapper to get a pylint checker from the similar class
|
||||
class SimilarChecker(BaseChecker, Similar):
|
||||
"""checks for similarities and duplicated code. This computation may be
|
||||
memory / CPU intensive, so you should disable it if you experiment some
|
||||
problems.
|
||||
"""
|
||||
|
||||
__implements__ = (IRawChecker,)
|
||||
# configuration section name
|
||||
name = "similarities"
|
||||
# messages
|
||||
msgs = MSGS
|
||||
# configuration options
|
||||
# for available dict keys/values see the optik parser 'add_option' method
|
||||
options = (
|
||||
(
|
||||
"min-similarity-lines", # type: ignore
|
||||
{
|
||||
"default": 4,
|
||||
"type": "int",
|
||||
"metavar": "<int>",
|
||||
"help": "Minimum lines number of a similarity.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"ignore-comments",
|
||||
{
|
||||
"default": True,
|
||||
"type": "yn",
|
||||
"metavar": "<y or n>",
|
||||
"help": "Ignore comments when computing similarities.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"ignore-docstrings",
|
||||
{
|
||||
"default": True,
|
||||
"type": "yn",
|
||||
"metavar": "<y or n>",
|
||||
"help": "Ignore docstrings when computing similarities.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"ignore-imports",
|
||||
{
|
||||
"default": False,
|
||||
"type": "yn",
|
||||
"metavar": "<y or n>",
|
||||
"help": "Ignore imports when computing similarities.",
|
||||
},
|
||||
),
|
||||
)
|
||||
# reports
|
||||
reports = (("RP0801", "Duplication", report_similarities),) # type: ignore
|
||||
|
||||
def __init__(self, linter=None):
|
||||
BaseChecker.__init__(self, linter)
|
||||
Similar.__init__(
|
||||
self, min_lines=4, ignore_comments=True, ignore_docstrings=True
|
||||
)
|
||||
self.stats = None
|
||||
|
||||
def set_option(self, optname, value, action=None, optdict=None):
|
||||
"""method called to set an option (registered in the options list)
|
||||
|
||||
overridden to report options setting to Similar
|
||||
"""
|
||||
BaseChecker.set_option(self, optname, value, action, optdict)
|
||||
if optname == "min-similarity-lines":
|
||||
self.min_lines = self.config.min_similarity_lines
|
||||
elif optname == "ignore-comments":
|
||||
self.ignore_comments = self.config.ignore_comments
|
||||
elif optname == "ignore-docstrings":
|
||||
self.ignore_docstrings = self.config.ignore_docstrings
|
||||
elif optname == "ignore-imports":
|
||||
self.ignore_imports = self.config.ignore_imports
|
||||
|
||||
def open(self):
|
||||
"""init the checkers: reset linesets and statistics information"""
|
||||
self.linesets = []
|
||||
self.stats = self.linter.add_stats(
|
||||
nb_duplicated_lines=0, percent_duplicated_lines=0
|
||||
)
|
||||
|
||||
def process_module(self, node):
|
||||
"""process a module
|
||||
|
||||
the module's content is accessible via the stream object
|
||||
|
||||
stream must implement the readlines method
|
||||
"""
|
||||
with node.stream() as stream:
|
||||
self.append_stream(self.linter.current_name, stream, node.file_encoding)
|
||||
|
||||
def close(self):
|
||||
"""compute and display similarities on closing (i.e. end of parsing)"""
|
||||
total = sum(len(lineset) for lineset in self.linesets)
|
||||
duplicated = 0
|
||||
stats = self.stats
|
||||
for num, couples in self._compute_sims():
|
||||
msg = []
|
||||
for lineset, idx in couples:
|
||||
msg.append("==%s:%s" % (lineset.name, idx))
|
||||
msg.sort()
|
||||
# pylint: disable=W0631
|
||||
for line in lineset._real_lines[idx : idx + num]:
|
||||
msg.append(line.rstrip())
|
||||
self.add_message("R0801", args=(len(couples), "\n".join(msg)))
|
||||
duplicated += num * (len(couples) - 1)
|
||||
stats["nb_duplicated_lines"] = duplicated
|
||||
stats["percent_duplicated_lines"] = total and duplicated * 100. / total
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(SimilarChecker(linter))
|
||||
|
||||
|
||||
def usage(status=0):
|
||||
"""display command line usage information"""
|
||||
print("finds copy pasted blocks in a set of files")
|
||||
print()
|
||||
print(
|
||||
"Usage: symilar [-d|--duplicates min_duplicated_lines] \
|
||||
[-i|--ignore-comments] [--ignore-docstrings] [--ignore-imports] file1..."
|
||||
)
|
||||
sys.exit(status)
|
||||
|
||||
|
||||
def Run(argv=None):
|
||||
"""standalone command line access point"""
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
from getopt import getopt
|
||||
|
||||
s_opts = "hdi"
|
||||
l_opts = (
|
||||
"help",
|
||||
"duplicates=",
|
||||
"ignore-comments",
|
||||
"ignore-imports",
|
||||
"ignore-docstrings",
|
||||
)
|
||||
min_lines = 4
|
||||
ignore_comments = False
|
||||
ignore_docstrings = False
|
||||
ignore_imports = False
|
||||
opts, args = getopt(argv, s_opts, l_opts)
|
||||
for opt, val in opts:
|
||||
if opt in ("-d", "--duplicates"):
|
||||
min_lines = int(val)
|
||||
elif opt in ("-h", "--help"):
|
||||
usage()
|
||||
elif opt in ("-i", "--ignore-comments"):
|
||||
ignore_comments = True
|
||||
elif opt in ("--ignore-docstrings",):
|
||||
ignore_docstrings = True
|
||||
elif opt in ("--ignore-imports",):
|
||||
ignore_imports = True
|
||||
if not args:
|
||||
usage(1)
|
||||
sim = Similar(min_lines, ignore_comments, ignore_docstrings, ignore_imports)
|
||||
for filename in args:
|
||||
with open(filename) as stream:
|
||||
sim.append_stream(filename, stream)
|
||||
sim.run()
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
Run()
|
403
venv/lib/python3.6/site-packages/pylint/checkers/spelling.py
Normal file
403
venv/lib/python3.6/site-packages/pylint/checkers/spelling.py
Normal file
@@ -0,0 +1,403 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2014-2017 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Michal Nowikowski <godfryd@gmail.com>
|
||||
# Copyright (c) 2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2015 Pavel Roskin <proski@gnu.org>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016-2017 Pedro Algarvio <pedro@algarvio.me>
|
||||
# Copyright (c) 2016 Alexander Todorov <atodorov@otb.bg>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Mikhail Fesenko <proggga@gmail.com>
|
||||
# Copyright (c) 2018 Mike Frysinger <vapier@gmail.com>
|
||||
# Copyright (c) 2018 Sushobhit <31987769+sushobhit27@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checker for spelling errors in comments and docstrings.
|
||||
"""
|
||||
|
||||
import os
|
||||
import tokenize
|
||||
import re
|
||||
|
||||
try:
|
||||
import enchant
|
||||
from enchant.tokenize import ( # type: ignore
|
||||
get_tokenizer,
|
||||
Chunker,
|
||||
Filter,
|
||||
EmailFilter,
|
||||
URLFilter,
|
||||
WikiWordFilter,
|
||||
)
|
||||
except ImportError:
|
||||
enchant = None
|
||||
# pylint: disable=no-init
|
||||
class Filter: # type: ignore
|
||||
def _skip(self, word):
|
||||
raise NotImplementedError
|
||||
|
||||
class Chunker: # type: ignore
|
||||
pass
|
||||
|
||||
|
||||
from pylint.interfaces import ITokenChecker, IAstroidChecker
|
||||
from pylint.checkers import BaseTokenChecker
|
||||
from pylint.checkers.utils import check_messages
|
||||
|
||||
if enchant is not None:
|
||||
br = enchant.Broker()
|
||||
dicts = br.list_dicts()
|
||||
dict_choices = [""] + [d[0] for d in dicts]
|
||||
dicts = ["%s (%s)" % (d[0], d[1].name) for d in dicts]
|
||||
dicts = ", ".join(dicts)
|
||||
instr = ""
|
||||
else:
|
||||
dicts = "none"
|
||||
dict_choices = [""]
|
||||
instr = " To make it working install python-enchant package."
|
||||
|
||||
|
||||
class WordsWithDigigtsFilter(Filter):
|
||||
"""Skips words with digits.
|
||||
"""
|
||||
|
||||
def _skip(self, word):
|
||||
for char in word:
|
||||
if char.isdigit():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class WordsWithUnderscores(Filter):
|
||||
"""Skips words with underscores.
|
||||
|
||||
They are probably function parameter names.
|
||||
"""
|
||||
|
||||
def _skip(self, word):
|
||||
return "_" in word
|
||||
|
||||
|
||||
class CamelCasedWord(Filter):
|
||||
r"""Filter skipping over camelCasedWords.
|
||||
This filter skips any words matching the following regular expression:
|
||||
|
||||
^([a-z]\w+[A-Z]+\w+)
|
||||
|
||||
That is, any words that are camelCasedWords.
|
||||
"""
|
||||
_pattern = re.compile(r"^([a-z]+([\d]|[A-Z])(?:\w+)?)")
|
||||
|
||||
def _skip(self, word):
|
||||
return bool(self._pattern.match(word))
|
||||
|
||||
|
||||
class SphinxDirectives(Filter):
|
||||
r"""Filter skipping over Sphinx Directives.
|
||||
This filter skips any words matching the following regular expression:
|
||||
|
||||
^:([a-z]+):`([^`]+)(`)?
|
||||
|
||||
That is, for example, :class:`BaseQuery`
|
||||
"""
|
||||
# The final ` in the pattern is optional because enchant strips it out
|
||||
_pattern = re.compile(r"^:([a-z]+):`([^`]+)(`)?")
|
||||
|
||||
def _skip(self, word):
|
||||
return bool(self._pattern.match(word))
|
||||
|
||||
|
||||
class ForwardSlashChunkder(Chunker):
|
||||
"""
|
||||
This chunker allows splitting words like 'before/after' into 'before' and 'after'
|
||||
"""
|
||||
|
||||
def next(self):
|
||||
while True:
|
||||
if not self._text:
|
||||
raise StopIteration()
|
||||
if "/" not in self._text:
|
||||
text = self._text
|
||||
self._offset = 0
|
||||
self._text = ""
|
||||
return (text, 0)
|
||||
pre_text, post_text = self._text.split("/", 1)
|
||||
self._text = post_text
|
||||
self._offset = 0
|
||||
if (
|
||||
not pre_text
|
||||
or not post_text
|
||||
or not pre_text[-1].isalpha()
|
||||
or not post_text[0].isalpha()
|
||||
):
|
||||
self._text = ""
|
||||
self._offset = 0
|
||||
return (pre_text + "/" + post_text, 0)
|
||||
return (pre_text, 0)
|
||||
|
||||
def _next(self):
|
||||
while True:
|
||||
if "/" not in self._text:
|
||||
return (self._text, 0)
|
||||
pre_text, post_text = self._text.split("/", 1)
|
||||
if not pre_text or not post_text:
|
||||
break
|
||||
if not pre_text[-1].isalpha() or not post_text[0].isalpha():
|
||||
raise StopIteration()
|
||||
self._text = pre_text + " " + post_text
|
||||
raise StopIteration()
|
||||
|
||||
|
||||
class SpellingChecker(BaseTokenChecker):
|
||||
"""Check spelling in comments and docstrings"""
|
||||
|
||||
__implements__ = (ITokenChecker, IAstroidChecker)
|
||||
name = "spelling"
|
||||
msgs = {
|
||||
"C0401": (
|
||||
"Wrong spelling of a word '%s' in a comment:\n%s\n"
|
||||
"%s\nDid you mean: '%s'?",
|
||||
"wrong-spelling-in-comment",
|
||||
"Used when a word in comment is not spelled correctly.",
|
||||
),
|
||||
"C0402": (
|
||||
"Wrong spelling of a word '%s' in a docstring:\n%s\n"
|
||||
"%s\nDid you mean: '%s'?",
|
||||
"wrong-spelling-in-docstring",
|
||||
"Used when a word in docstring is not spelled correctly.",
|
||||
),
|
||||
"C0403": (
|
||||
"Invalid characters %r in a docstring",
|
||||
"invalid-characters-in-docstring",
|
||||
"Used when a word in docstring cannot be checked by enchant.",
|
||||
),
|
||||
}
|
||||
options = (
|
||||
(
|
||||
"spelling-dict",
|
||||
{
|
||||
"default": "",
|
||||
"type": "choice",
|
||||
"metavar": "<dict name>",
|
||||
"choices": dict_choices,
|
||||
"help": "Spelling dictionary name. "
|
||||
"Available dictionaries: %s.%s." % (dicts, instr),
|
||||
},
|
||||
),
|
||||
(
|
||||
"spelling-ignore-words",
|
||||
{
|
||||
"default": "",
|
||||
"type": "string",
|
||||
"metavar": "<comma separated words>",
|
||||
"help": "List of comma separated words that " "should not be checked.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"spelling-private-dict-file",
|
||||
{
|
||||
"default": "",
|
||||
"type": "string",
|
||||
"metavar": "<path to file>",
|
||||
"help": "A path to a file that contains private "
|
||||
"dictionary; one word per line.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"spelling-store-unknown-words",
|
||||
{
|
||||
"default": "n",
|
||||
"type": "yn",
|
||||
"metavar": "<y_or_n>",
|
||||
"help": "Tells whether to store unknown words to "
|
||||
"indicated private dictionary in "
|
||||
"--spelling-private-dict-file option instead of "
|
||||
"raising a message.",
|
||||
},
|
||||
),
|
||||
(
|
||||
"max-spelling-suggestions",
|
||||
{
|
||||
"default": 4,
|
||||
"type": "int",
|
||||
"metavar": "N",
|
||||
"help": "Limits count of emitted suggestions for " "spelling mistakes.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
def open(self):
|
||||
self.initialized = False
|
||||
self.private_dict_file = None
|
||||
|
||||
if enchant is None:
|
||||
return
|
||||
dict_name = self.config.spelling_dict
|
||||
if not dict_name:
|
||||
return
|
||||
|
||||
self.ignore_list = [
|
||||
w.strip() for w in self.config.spelling_ignore_words.split(",")
|
||||
]
|
||||
# "param" appears in docstring in param description and
|
||||
# "pylint" appears in comments in pylint pragmas.
|
||||
self.ignore_list.extend(["param", "pylint"])
|
||||
|
||||
# Expand tilde to allow e.g. spelling-private-dict-file = ~/.pylintdict
|
||||
if self.config.spelling_private_dict_file:
|
||||
self.config.spelling_private_dict_file = os.path.expanduser(
|
||||
self.config.spelling_private_dict_file
|
||||
)
|
||||
|
||||
if self.config.spelling_private_dict_file:
|
||||
self.spelling_dict = enchant.DictWithPWL(
|
||||
dict_name, self.config.spelling_private_dict_file
|
||||
)
|
||||
self.private_dict_file = open(self.config.spelling_private_dict_file, "a")
|
||||
else:
|
||||
self.spelling_dict = enchant.Dict(dict_name)
|
||||
|
||||
if self.config.spelling_store_unknown_words:
|
||||
self.unknown_words = set()
|
||||
|
||||
self.tokenizer = get_tokenizer(
|
||||
dict_name,
|
||||
chunkers=[ForwardSlashChunkder],
|
||||
filters=[
|
||||
EmailFilter,
|
||||
URLFilter,
|
||||
WikiWordFilter,
|
||||
WordsWithDigigtsFilter,
|
||||
WordsWithUnderscores,
|
||||
CamelCasedWord,
|
||||
SphinxDirectives,
|
||||
],
|
||||
)
|
||||
self.initialized = True
|
||||
|
||||
def close(self):
|
||||
if self.private_dict_file:
|
||||
self.private_dict_file.close()
|
||||
|
||||
def _check_spelling(self, msgid, line, line_num):
|
||||
original_line = line
|
||||
if line.strip().startswith("#"):
|
||||
line = line.strip()[1:]
|
||||
starts_with_comment = True
|
||||
else:
|
||||
starts_with_comment = False
|
||||
for word, _ in self.tokenizer(line.strip()):
|
||||
lower_cased_word = word.casefold()
|
||||
|
||||
# Skip words from ignore list.
|
||||
if word in self.ignore_list or lower_cased_word in self.ignore_list:
|
||||
continue
|
||||
|
||||
# Strip starting u' from unicode literals and r' from raw strings.
|
||||
if word.startswith(("u'", 'u"', "r'", 'r"')) and len(word) > 2:
|
||||
word = word[2:]
|
||||
lower_cased_word = lower_cased_word[2:]
|
||||
|
||||
# If it is a known word, then continue.
|
||||
try:
|
||||
if self.spelling_dict.check(lower_cased_word):
|
||||
# The lower cased version of word passed spell checking
|
||||
continue
|
||||
|
||||
# If we reached this far, it means there was a spelling mistake.
|
||||
# Let's retry with the original work because 'unicode' is a
|
||||
# spelling mistake but 'Unicode' is not
|
||||
if self.spelling_dict.check(word):
|
||||
continue
|
||||
except enchant.errors.Error:
|
||||
self.add_message(
|
||||
"invalid-characters-in-docstring", line=line_num, args=(word,)
|
||||
)
|
||||
continue
|
||||
|
||||
# Store word to private dict or raise a message.
|
||||
if self.config.spelling_store_unknown_words:
|
||||
if lower_cased_word not in self.unknown_words:
|
||||
self.private_dict_file.write("%s\n" % lower_cased_word)
|
||||
self.unknown_words.add(lower_cased_word)
|
||||
else:
|
||||
# Present up to N suggestions.
|
||||
suggestions = self.spelling_dict.suggest(word)
|
||||
del suggestions[self.config.max_spelling_suggestions :]
|
||||
|
||||
m = re.search(r"(\W|^)(%s)(\W|$)" % word, line)
|
||||
if m:
|
||||
# Start position of second group in regex.
|
||||
col = m.regs[2][0]
|
||||
else:
|
||||
col = line.index(word)
|
||||
|
||||
if starts_with_comment:
|
||||
col += 1
|
||||
indicator = (" " * col) + ("^" * len(word))
|
||||
|
||||
self.add_message(
|
||||
msgid,
|
||||
line=line_num,
|
||||
args=(
|
||||
word,
|
||||
original_line,
|
||||
indicator,
|
||||
"'{}'".format("' or '".join(suggestions)),
|
||||
),
|
||||
)
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
if not self.initialized:
|
||||
return
|
||||
|
||||
# Process tokens and look for comments.
|
||||
for (tok_type, token, (start_row, _), _, _) in tokens:
|
||||
if tok_type == tokenize.COMMENT:
|
||||
if start_row == 1 and token.startswith("#!/"):
|
||||
# Skip shebang lines
|
||||
continue
|
||||
if token.startswith("# pylint:"):
|
||||
# Skip pylint enable/disable comments
|
||||
continue
|
||||
self._check_spelling("wrong-spelling-in-comment", token, start_row)
|
||||
|
||||
@check_messages("wrong-spelling-in-docstring")
|
||||
def visit_module(self, node):
|
||||
if not self.initialized:
|
||||
return
|
||||
self._check_docstring(node)
|
||||
|
||||
@check_messages("wrong-spelling-in-docstring")
|
||||
def visit_classdef(self, node):
|
||||
if not self.initialized:
|
||||
return
|
||||
self._check_docstring(node)
|
||||
|
||||
@check_messages("wrong-spelling-in-docstring")
|
||||
def visit_functiondef(self, node):
|
||||
if not self.initialized:
|
||||
return
|
||||
self._check_docstring(node)
|
||||
|
||||
visit_asyncfunctiondef = visit_functiondef
|
||||
|
||||
def _check_docstring(self, node):
|
||||
"""check the node has any spelling errors"""
|
||||
docstring = node.doc
|
||||
if not docstring:
|
||||
return
|
||||
|
||||
start_line = node.lineno + 1
|
||||
|
||||
# Go through lines of docstring
|
||||
for idx, line in enumerate(docstring.splitlines()):
|
||||
self._check_spelling("wrong-spelling-in-docstring", line, start_line + idx)
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(SpellingChecker(linter))
|
441
venv/lib/python3.6/site-packages/pylint/checkers/stdlib.py
Normal file
441
venv/lib/python3.6/site-packages/pylint/checkers/stdlib.py
Normal file
@@ -0,0 +1,441 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Cosmin Poieana <cmin@ropython.org>
|
||||
# Copyright (c) 2014 Vlad Temian <vladtemian@gmail.com>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Cezar <celnazli@bitdefender.com>
|
||||
# Copyright (c) 2015 Chris Rebert <code@rebertia.com>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016 Jared Garst <cultofjared@gmail.com>
|
||||
# Copyright (c) 2017 Renat Galimov <renat2017@gmail.com>
|
||||
# Copyright (c) 2017 Martin <MartinBasti@users.noreply.github.com>
|
||||
# Copyright (c) 2017 Christopher Zurcher <zurcher@users.noreply.github.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2018 Banjamin Freeman <befreeman@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Ioana Tagirta <ioana.tagirta@gmail.com>
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checkers for various standard library functions."""
|
||||
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
from astroid.bases import Instance
|
||||
from astroid.node_classes import Const
|
||||
from pylint.interfaces import IAstroidChecker
|
||||
from pylint.checkers import BaseChecker
|
||||
from pylint.checkers import utils
|
||||
|
||||
|
||||
OPEN_FILES = {"open", "file"}
|
||||
UNITTEST_CASE = "unittest.case"
|
||||
THREADING_THREAD = "threading.Thread"
|
||||
COPY_COPY = "copy.copy"
|
||||
OS_ENVIRON = "os._Environ"
|
||||
ENV_GETTERS = {"os.getenv"}
|
||||
SUBPROCESS_POPEN = "subprocess.Popen"
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
OPEN_MODULE = "_io"
|
||||
else:
|
||||
OPEN_MODULE = "__builtin__"
|
||||
|
||||
|
||||
def _check_mode_str(mode):
|
||||
# check type
|
||||
if not isinstance(mode, str):
|
||||
return False
|
||||
# check syntax
|
||||
modes = set(mode)
|
||||
_mode = "rwatb+Ux"
|
||||
creating = "x" in modes
|
||||
if modes - set(_mode) or len(mode) > len(modes):
|
||||
return False
|
||||
# check logic
|
||||
reading = "r" in modes
|
||||
writing = "w" in modes
|
||||
appending = "a" in modes
|
||||
text = "t" in modes
|
||||
binary = "b" in modes
|
||||
if "U" in modes:
|
||||
if writing or appending or creating:
|
||||
return False
|
||||
reading = True
|
||||
if text and binary:
|
||||
return False
|
||||
total = reading + writing + appending + creating
|
||||
if total > 1:
|
||||
return False
|
||||
if not (reading or writing or appending or creating):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class StdlibChecker(BaseChecker):
|
||||
__implements__ = (IAstroidChecker,)
|
||||
name = "stdlib"
|
||||
|
||||
msgs = {
|
||||
"W1501": (
|
||||
'"%s" is not a valid mode for open.',
|
||||
"bad-open-mode",
|
||||
"Python supports: r, w, a[, x] modes with b, +, "
|
||||
"and U (only with r) options. "
|
||||
"See http://docs.python.org/2/library/functions.html#open",
|
||||
),
|
||||
"W1502": (
|
||||
"Using datetime.time in a boolean context.",
|
||||
"boolean-datetime",
|
||||
"Using datetime.time in a boolean context can hide "
|
||||
"subtle bugs when the time they represent matches "
|
||||
"midnight UTC. This behaviour was fixed in Python 3.5. "
|
||||
"See http://bugs.python.org/issue13936 for reference.",
|
||||
{"maxversion": (3, 5)},
|
||||
),
|
||||
"W1503": (
|
||||
"Redundant use of %s with constant value %r",
|
||||
"redundant-unittest-assert",
|
||||
"The first argument of assertTrue and assertFalse is "
|
||||
"a condition. If a constant is passed as parameter, that "
|
||||
"condition will be always true. In this case a warning "
|
||||
"should be emitted.",
|
||||
),
|
||||
"W1505": (
|
||||
"Using deprecated method %s()",
|
||||
"deprecated-method",
|
||||
"The method is marked as deprecated and will be removed in "
|
||||
"a future version of Python. Consider looking for an "
|
||||
"alternative in the documentation.",
|
||||
),
|
||||
"W1506": (
|
||||
"threading.Thread needs the target function",
|
||||
"bad-thread-instantiation",
|
||||
"The warning is emitted when a threading.Thread class "
|
||||
"is instantiated without the target function being passed. "
|
||||
"By default, the first parameter is the group param, not the target param. ",
|
||||
),
|
||||
"W1507": (
|
||||
"Using copy.copy(os.environ). Use os.environ.copy() instead. ",
|
||||
"shallow-copy-environ",
|
||||
"os.environ is not a dict object but proxy object, so "
|
||||
"shallow copy has still effects on original object. "
|
||||
"See https://bugs.python.org/issue15373 for reference. ",
|
||||
),
|
||||
"E1507": (
|
||||
"%s does not support %s type argument",
|
||||
"invalid-envvar-value",
|
||||
"Env manipulation functions support only string type arguments. "
|
||||
"See https://docs.python.org/3/library/os.html#os.getenv. ",
|
||||
),
|
||||
"W1508": (
|
||||
"%s default type is %s. Expected str or None.",
|
||||
"invalid-envvar-default",
|
||||
"Env manipulation functions return None or str values. "
|
||||
"Supplying anything different as a default may cause bugs. "
|
||||
"See https://docs.python.org/3/library/os.html#os.getenv. ",
|
||||
),
|
||||
"W1509": (
|
||||
"Using preexec_fn keyword which may be unsafe in the presence "
|
||||
"of threads",
|
||||
"subprocess-popen-preexec-fn",
|
||||
"The preexec_fn parameter is not safe to use in the presence "
|
||||
"of threads in your application. The child process could "
|
||||
"deadlock before exec is called. If you must use it, keep it "
|
||||
"trivial! Minimize the number of libraries you call into."
|
||||
"https://docs.python.org/3/library/subprocess.html#popen-constructor",
|
||||
),
|
||||
}
|
||||
|
||||
deprecated = {
|
||||
0: {
|
||||
"cgi.parse_qs",
|
||||
"cgi.parse_qsl",
|
||||
"ctypes.c_buffer",
|
||||
"distutils.command.register.register.check_metadata",
|
||||
"distutils.command.sdist.sdist.check_metadata",
|
||||
"tkinter.Misc.tk_menuBar",
|
||||
"tkinter.Menu.tk_bindForTraversal",
|
||||
},
|
||||
2: {
|
||||
(2, 6, 0): {
|
||||
"commands.getstatus",
|
||||
"os.popen2",
|
||||
"os.popen3",
|
||||
"os.popen4",
|
||||
"macostools.touched",
|
||||
},
|
||||
(2, 7, 0): {
|
||||
"unittest.case.TestCase.assertEquals",
|
||||
"unittest.case.TestCase.assertNotEquals",
|
||||
"unittest.case.TestCase.assertAlmostEquals",
|
||||
"unittest.case.TestCase.assertNotAlmostEquals",
|
||||
"unittest.case.TestCase.assert_",
|
||||
"xml.etree.ElementTree.Element.getchildren",
|
||||
"xml.etree.ElementTree.Element.getiterator",
|
||||
"xml.etree.ElementTree.XMLParser.getiterator",
|
||||
"xml.etree.ElementTree.XMLParser.doctype",
|
||||
},
|
||||
},
|
||||
3: {
|
||||
(3, 0, 0): {
|
||||
"inspect.getargspec",
|
||||
"failUnlessEqual",
|
||||
"assertEquals",
|
||||
"failIfEqual",
|
||||
"assertNotEquals",
|
||||
"failUnlessAlmostEqual",
|
||||
"assertAlmostEquals",
|
||||
"failIfAlmostEqual",
|
||||
"assertNotAlmostEquals",
|
||||
"failUnless",
|
||||
"assert_",
|
||||
"failUnlessRaises",
|
||||
"failIf",
|
||||
"assertRaisesRegexp",
|
||||
"assertRegexpMatches",
|
||||
"assertNotRegexpMatches",
|
||||
},
|
||||
(3, 1, 0): {
|
||||
"base64.encodestring",
|
||||
"base64.decodestring",
|
||||
"ntpath.splitunc",
|
||||
},
|
||||
(3, 2, 0): {
|
||||
"cgi.escape",
|
||||
"configparser.RawConfigParser.readfp",
|
||||
"xml.etree.ElementTree.Element.getchildren",
|
||||
"xml.etree.ElementTree.Element.getiterator",
|
||||
"xml.etree.ElementTree.XMLParser.getiterator",
|
||||
"xml.etree.ElementTree.XMLParser.doctype",
|
||||
},
|
||||
(3, 3, 0): {
|
||||
"inspect.getmoduleinfo",
|
||||
"logging.warn",
|
||||
"logging.Logger.warn",
|
||||
"logging.LoggerAdapter.warn",
|
||||
"nntplib._NNTPBase.xpath",
|
||||
"platform.popen",
|
||||
},
|
||||
(3, 4, 0): {
|
||||
"importlib.find_loader",
|
||||
"plistlib.readPlist",
|
||||
"plistlib.writePlist",
|
||||
"plistlib.readPlistFromBytes",
|
||||
"plistlib.writePlistToBytes",
|
||||
},
|
||||
(3, 4, 4): {"asyncio.tasks.async"},
|
||||
(3, 5, 0): {
|
||||
"fractions.gcd",
|
||||
"inspect.getargvalues",
|
||||
"inspect.formatargspec",
|
||||
"inspect.formatargvalues",
|
||||
"inspect.getcallargs",
|
||||
"platform.linux_distribution",
|
||||
"platform.dist",
|
||||
},
|
||||
(3, 6, 0): {"importlib._bootstrap_external.FileLoader.load_module"},
|
||||
},
|
||||
}
|
||||
|
||||
def _check_bad_thread_instantiation(self, node):
|
||||
if not node.kwargs and not node.keywords and len(node.args) <= 1:
|
||||
self.add_message("bad-thread-instantiation", node=node)
|
||||
|
||||
def _check_for_preexec_fn_in_Popen(self, node):
|
||||
if node.keywords:
|
||||
for keyword in node.keywords:
|
||||
if keyword.arg == "preexec_fn":
|
||||
self.add_message("subprocess-popen-preexec-fn", node=node)
|
||||
|
||||
def _check_shallow_copy_environ(self, node):
|
||||
arg = utils.get_argument_from_call(node, position=0)
|
||||
for inferred in arg.inferred():
|
||||
if inferred.qname() == OS_ENVIRON:
|
||||
self.add_message("shallow-copy-environ", node=node)
|
||||
break
|
||||
|
||||
@utils.check_messages(
|
||||
"bad-open-mode",
|
||||
"redundant-unittest-assert",
|
||||
"deprecated-method",
|
||||
"bad-thread-instantiation",
|
||||
"shallow-copy-environ",
|
||||
"invalid-envvar-value",
|
||||
"invalid-envvar-default",
|
||||
"subprocess-popen-preexec-fn",
|
||||
)
|
||||
def visit_call(self, node):
|
||||
"""Visit a Call node."""
|
||||
try:
|
||||
for inferred in node.func.infer():
|
||||
if inferred is astroid.Uninferable:
|
||||
continue
|
||||
elif inferred.root().name == OPEN_MODULE:
|
||||
if getattr(node.func, "name", None) in OPEN_FILES:
|
||||
self._check_open_mode(node)
|
||||
elif inferred.root().name == UNITTEST_CASE:
|
||||
self._check_redundant_assert(node, inferred)
|
||||
elif isinstance(inferred, astroid.ClassDef):
|
||||
if inferred.qname() == THREADING_THREAD:
|
||||
self._check_bad_thread_instantiation(node)
|
||||
elif inferred.qname() == SUBPROCESS_POPEN:
|
||||
self._check_for_preexec_fn_in_Popen(node)
|
||||
elif isinstance(inferred, astroid.FunctionDef):
|
||||
name = inferred.qname()
|
||||
if name == COPY_COPY:
|
||||
self._check_shallow_copy_environ(node)
|
||||
elif name in ENV_GETTERS:
|
||||
self._check_env_function(node, inferred)
|
||||
self._check_deprecated_method(node, inferred)
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
|
||||
@utils.check_messages("boolean-datetime")
|
||||
def visit_unaryop(self, node):
|
||||
if node.op == "not":
|
||||
self._check_datetime(node.operand)
|
||||
|
||||
@utils.check_messages("boolean-datetime")
|
||||
def visit_if(self, node):
|
||||
self._check_datetime(node.test)
|
||||
|
||||
@utils.check_messages("boolean-datetime")
|
||||
def visit_ifexp(self, node):
|
||||
self._check_datetime(node.test)
|
||||
|
||||
@utils.check_messages("boolean-datetime")
|
||||
def visit_boolop(self, node):
|
||||
for value in node.values:
|
||||
self._check_datetime(value)
|
||||
|
||||
def _check_deprecated_method(self, node, inferred):
|
||||
py_vers = sys.version_info[0]
|
||||
|
||||
if isinstance(node.func, astroid.Attribute):
|
||||
func_name = node.func.attrname
|
||||
elif isinstance(node.func, astroid.Name):
|
||||
func_name = node.func.name
|
||||
else:
|
||||
# Not interested in other nodes.
|
||||
return
|
||||
|
||||
# Reject nodes which aren't of interest to us.
|
||||
acceptable_nodes = (
|
||||
astroid.BoundMethod,
|
||||
astroid.UnboundMethod,
|
||||
astroid.FunctionDef,
|
||||
)
|
||||
if not isinstance(inferred, acceptable_nodes):
|
||||
return
|
||||
|
||||
qname = inferred.qname()
|
||||
if any(name in self.deprecated[0] for name in (qname, func_name)):
|
||||
self.add_message("deprecated-method", node=node, args=(func_name,))
|
||||
else:
|
||||
for since_vers, func_list in self.deprecated[py_vers].items():
|
||||
if since_vers <= sys.version_info and any(
|
||||
name in func_list for name in (qname, func_name)
|
||||
):
|
||||
self.add_message("deprecated-method", node=node, args=(func_name,))
|
||||
break
|
||||
|
||||
def _check_redundant_assert(self, node, infer):
|
||||
if (
|
||||
isinstance(infer, astroid.BoundMethod)
|
||||
and node.args
|
||||
and isinstance(node.args[0], astroid.Const)
|
||||
and infer.name in ["assertTrue", "assertFalse"]
|
||||
):
|
||||
self.add_message(
|
||||
"redundant-unittest-assert",
|
||||
args=(infer.name, node.args[0].value),
|
||||
node=node,
|
||||
)
|
||||
|
||||
def _check_datetime(self, node):
|
||||
""" Check that a datetime was infered.
|
||||
If so, emit boolean-datetime warning.
|
||||
"""
|
||||
try:
|
||||
infered = next(node.infer())
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
if isinstance(infered, Instance) and infered.qname() == "datetime.time":
|
||||
self.add_message("boolean-datetime", node=node)
|
||||
|
||||
def _check_open_mode(self, node):
|
||||
"""Check that the mode argument of an open or file call is valid."""
|
||||
try:
|
||||
mode_arg = utils.get_argument_from_call(node, position=1, keyword="mode")
|
||||
except utils.NoSuchArgumentError:
|
||||
return
|
||||
if mode_arg:
|
||||
mode_arg = utils.safe_infer(mode_arg)
|
||||
if isinstance(mode_arg, astroid.Const) and not _check_mode_str(
|
||||
mode_arg.value
|
||||
):
|
||||
self.add_message("bad-open-mode", node=node, args=mode_arg.value)
|
||||
|
||||
def _check_env_function(self, node, infer):
|
||||
env_name_kwarg = "key"
|
||||
env_value_kwarg = "default"
|
||||
if node.keywords:
|
||||
kwargs = {keyword.arg: keyword.value for keyword in node.keywords}
|
||||
else:
|
||||
kwargs = None
|
||||
if node.args:
|
||||
env_name_arg = node.args[0]
|
||||
elif kwargs and env_name_kwarg in kwargs:
|
||||
env_name_arg = kwargs[env_name_kwarg]
|
||||
else:
|
||||
env_name_arg = None
|
||||
|
||||
if env_name_arg:
|
||||
self._check_invalid_envvar_value(
|
||||
node=node,
|
||||
message="invalid-envvar-value",
|
||||
call_arg=utils.safe_infer(env_name_arg),
|
||||
infer=infer,
|
||||
allow_none=False,
|
||||
)
|
||||
|
||||
if len(node.args) == 2:
|
||||
env_value_arg = node.args[1]
|
||||
elif kwargs and env_value_kwarg in kwargs:
|
||||
env_value_arg = kwargs[env_value_kwarg]
|
||||
else:
|
||||
env_value_arg = None
|
||||
|
||||
if env_value_arg:
|
||||
self._check_invalid_envvar_value(
|
||||
node=node,
|
||||
infer=infer,
|
||||
message="invalid-envvar-default",
|
||||
call_arg=utils.safe_infer(env_value_arg),
|
||||
allow_none=True,
|
||||
)
|
||||
|
||||
def _check_invalid_envvar_value(self, node, infer, message, call_arg, allow_none):
|
||||
if call_arg in (astroid.Uninferable, None):
|
||||
return
|
||||
|
||||
name = infer.qname()
|
||||
if isinstance(call_arg, Const):
|
||||
emit = False
|
||||
if call_arg.value is None:
|
||||
emit = not allow_none
|
||||
elif not isinstance(call_arg.value, str):
|
||||
emit = True
|
||||
if emit:
|
||||
self.add_message(message, node=node, args=(name, call_arg.pytype()))
|
||||
else:
|
||||
self.add_message(message, node=node, args=(name, call_arg.pytype()))
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(StdlibChecker(linter))
|
763
venv/lib/python3.6/site-packages/pylint/checkers/strings.py
Normal file
763
venv/lib/python3.6/site-packages/pylint/checkers/strings.py
Normal file
@@ -0,0 +1,763 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2009 Charles Hebert <charles.hebert@logilab.fr>
|
||||
# Copyright (c) 2010-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
|
||||
# Copyright (c) 2012-2014 Google, Inc.
|
||||
# Copyright (c) 2013-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Brett Cannon <brett@python.org>
|
||||
# Copyright (c) 2014 Arun Persaud <arun@nubati.net>
|
||||
# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
|
||||
# Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
|
||||
# Copyright (c) 2016, 2018 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2016 Peter Dawyndt <Peter.Dawyndt@UGent.be>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
|
||||
|
||||
|
||||
# Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
|
||||
# For details: https://github.com/PyCQA/pylint/blob/master/COPYING
|
||||
|
||||
"""Checker for string formatting operations.
|
||||
"""
|
||||
|
||||
import builtins
|
||||
import sys
|
||||
import tokenize
|
||||
import numbers
|
||||
from collections import Counter
|
||||
|
||||
import astroid
|
||||
from astroid.arguments import CallSite
|
||||
from astroid.node_classes import Const
|
||||
from pylint.interfaces import ITokenChecker, IAstroidChecker, IRawChecker
|
||||
from pylint.checkers import BaseChecker, BaseTokenChecker
|
||||
from pylint.checkers import utils
|
||||
from pylint.checkers.utils import check_messages
|
||||
|
||||
_AST_NODE_STR_TYPES = ("__builtin__.unicode", "__builtin__.str", "builtins.str")
|
||||
|
||||
_PY3K = sys.version_info[:2] >= (3, 0)
|
||||
_PY27 = sys.version_info[:2] == (2, 7)
|
||||
|
||||
MSGS = {
|
||||
"E1300": (
|
||||
"Unsupported format character %r (%#02x) at index %d",
|
||||
"bad-format-character",
|
||||
"Used when an unsupported format character is used in a format string.",
|
||||
),
|
||||
"E1301": (
|
||||
"Format string ends in middle of conversion specifier",
|
||||
"truncated-format-string",
|
||||
"Used when a format string terminates before the end of a "
|
||||
"conversion specifier.",
|
||||
),
|
||||
"E1302": (
|
||||
"Mixing named and unnamed conversion specifiers in format string",
|
||||
"mixed-format-string",
|
||||
"Used when a format string contains both named (e.g. '%(foo)d') "
|
||||
"and unnamed (e.g. '%d') conversion specifiers. This is also "
|
||||
"used when a named conversion specifier contains * for the "
|
||||
"minimum field width and/or precision.",
|
||||
),
|
||||
"E1303": (
|
||||
"Expected mapping for format string, not %s",
|
||||
"format-needs-mapping",
|
||||
"Used when a format string that uses named conversion specifiers "
|
||||
"is used with an argument that is not a mapping.",
|
||||
),
|
||||
"W1300": (
|
||||
"Format string dictionary key should be a string, not %s",
|
||||
"bad-format-string-key",
|
||||
"Used when a format string that uses named conversion specifiers "
|
||||
"is used with a dictionary whose keys are not all strings.",
|
||||
),
|
||||
"W1301": (
|
||||
"Unused key %r in format string dictionary",
|
||||
"unused-format-string-key",
|
||||
"Used when a format string that uses named conversion specifiers "
|
||||
"is used with a dictionary that contains keys not required by the "
|
||||
"format string.",
|
||||
),
|
||||
"E1304": (
|
||||
"Missing key %r in format string dictionary",
|
||||
"missing-format-string-key",
|
||||
"Used when a format string that uses named conversion specifiers "
|
||||
"is used with a dictionary that doesn't contain all the keys "
|
||||
"required by the format string.",
|
||||
),
|
||||
"E1305": (
|
||||
"Too many arguments for format string",
|
||||
"too-many-format-args",
|
||||
"Used when a format string that uses unnamed conversion "
|
||||
"specifiers is given too many arguments.",
|
||||
),
|
||||
"E1306": (
|
||||
"Not enough arguments for format string",
|
||||
"too-few-format-args",
|
||||
"Used when a format string that uses unnamed conversion "
|
||||
"specifiers is given too few arguments",
|
||||
),
|
||||
"E1307": (
|
||||
"Argument %r does not match format type %r",
|
||||
"bad-string-format-type",
|
||||
"Used when a type required by format string "
|
||||
"is not suitable for actual argument type",
|
||||
),
|
||||
"E1310": (
|
||||
"Suspicious argument in %s.%s call",
|
||||
"bad-str-strip-call",
|
||||
"The argument to a str.{l,r,}strip call contains a duplicate character, ",
|
||||
),
|
||||
"W1302": (
|
||||
"Invalid format string",
|
||||
"bad-format-string",
|
||||
"Used when a PEP 3101 format string is invalid.",
|
||||
),
|
||||
"W1303": (
|
||||
"Missing keyword argument %r for format string",
|
||||
"missing-format-argument-key",
|
||||
"Used when a PEP 3101 format string that uses named fields "
|
||||
"doesn't receive one or more required keywords.",
|
||||
),
|
||||
"W1304": (
|
||||
"Unused format argument %r",
|
||||
"unused-format-string-argument",
|
||||
"Used when a PEP 3101 format string that uses named "
|
||||
"fields is used with an argument that "
|
||||
"is not required by the format string.",
|
||||
),
|
||||
"W1305": (
|
||||
"Format string contains both automatic field numbering "
|
||||
"and manual field specification",
|
||||
"format-combined-specification",
|
||||
"Used when a PEP 3101 format string contains both automatic "
|
||||
"field numbering (e.g. '{}') and manual field "
|
||||
"specification (e.g. '{0}').",
|
||||
),
|
||||
"W1306": (
|
||||
"Missing format attribute %r in format specifier %r",
|
||||
"missing-format-attribute",
|
||||
"Used when a PEP 3101 format string uses an "
|
||||
"attribute specifier ({0.length}), but the argument "
|
||||
"passed for formatting doesn't have that attribute.",
|
||||
),
|
||||
"W1307": (
|
||||
"Using invalid lookup key %r in format specifier %r",
|
||||
"invalid-format-index",
|
||||
"Used when a PEP 3101 format string uses a lookup specifier "
|
||||
"({a[1]}), but the argument passed for formatting "
|
||||
"doesn't contain or doesn't have that key as an attribute.",
|
||||
),
|
||||
"W1308": (
|
||||
"Duplicate string formatting argument %r, consider passing as named argument",
|
||||
"duplicate-string-formatting-argument",
|
||||
"Used when we detect that a string formatting is "
|
||||
"repeating an argument instead of using named string arguments",
|
||||
),
|
||||
}
|
||||
|
||||
OTHER_NODES = (
|
||||
astroid.Const,
|
||||
astroid.List,
|
||||
astroid.Lambda,
|
||||
astroid.FunctionDef,
|
||||
astroid.ListComp,
|
||||
astroid.SetComp,
|
||||
astroid.GeneratorExp,
|
||||
)
|
||||
|
||||
BUILTINS_STR = builtins.__name__ + ".str"
|
||||
BUILTINS_FLOAT = builtins.__name__ + ".float"
|
||||
BUILTINS_INT = builtins.__name__ + ".int"
|
||||
|
||||
|
||||
def get_access_path(key, parts):
|
||||
""" Given a list of format specifiers, returns
|
||||
the final access path (e.g. a.b.c[0][1]).
|
||||
"""
|
||||
path = []
|
||||
for is_attribute, specifier in parts:
|
||||
if is_attribute:
|
||||
path.append(".{}".format(specifier))
|
||||
else:
|
||||
path.append("[{!r}]".format(specifier))
|
||||
return str(key) + "".join(path)
|
||||
|
||||
|
||||
def arg_matches_format_type(arg_type, format_type):
|
||||
if format_type in "sr":
|
||||
# All types can be printed with %s and %r
|
||||
return True
|
||||
if isinstance(arg_type, astroid.Instance):
|
||||
arg_type = arg_type.pytype()
|
||||
if arg_type == BUILTINS_STR:
|
||||
return format_type == "c"
|
||||
if arg_type == BUILTINS_FLOAT:
|
||||
return format_type in "deEfFgGn%"
|
||||
if arg_type == BUILTINS_INT:
|
||||
# Integers allow all types
|
||||
return True
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class StringFormatChecker(BaseChecker):
|
||||
"""Checks string formatting operations to ensure that the format string
|
||||
is valid and the arguments match the format string.
|
||||
"""
|
||||
|
||||
__implements__ = (IAstroidChecker,)
|
||||
name = "string"
|
||||
msgs = MSGS
|
||||
|
||||
# pylint: disable=too-many-branches
|
||||
@check_messages(*MSGS)
|
||||
def visit_binop(self, node):
|
||||
if node.op != "%":
|
||||
return
|
||||
left = node.left
|
||||
args = node.right
|
||||
|
||||
if not (isinstance(left, astroid.Const) and isinstance(left.value, str)):
|
||||
return
|
||||
format_string = left.value
|
||||
try:
|
||||
required_keys, required_num_args, required_key_types, required_arg_types = utils.parse_format_string(
|
||||
format_string
|
||||
)
|
||||
except utils.UnsupportedFormatCharacter as e:
|
||||
c = format_string[e.index]
|
||||
self.add_message(
|
||||
"bad-format-character", node=node, args=(c, ord(c), e.index)
|
||||
)
|
||||
return
|
||||
except utils.IncompleteFormatString:
|
||||
self.add_message("truncated-format-string", node=node)
|
||||
return
|
||||
if required_keys and required_num_args:
|
||||
# The format string uses both named and unnamed format
|
||||
# specifiers.
|
||||
self.add_message("mixed-format-string", node=node)
|
||||
elif required_keys:
|
||||
# The format string uses only named format specifiers.
|
||||
# Check that the RHS of the % operator is a mapping object
|
||||
# that contains precisely the set of keys required by the
|
||||
# format string.
|
||||
if isinstance(args, astroid.Dict):
|
||||
keys = set()
|
||||
unknown_keys = False
|
||||
for k, _ in args.items:
|
||||
if isinstance(k, astroid.Const):
|
||||
key = k.value
|
||||
if isinstance(key, str):
|
||||
keys.add(key)
|
||||
else:
|
||||
self.add_message(
|
||||
"bad-format-string-key", node=node, args=key
|
||||
)
|
||||
else:
|
||||
# One of the keys was something other than a
|
||||
# constant. Since we can't tell what it is,
|
||||
# suppress checks for missing keys in the
|
||||
# dictionary.
|
||||
unknown_keys = True
|
||||
if not unknown_keys:
|
||||
for key in required_keys:
|
||||
if key not in keys:
|
||||
self.add_message(
|
||||
"missing-format-string-key", node=node, args=key
|
||||
)
|
||||
for key in keys:
|
||||
if key not in required_keys:
|
||||
self.add_message(
|
||||
"unused-format-string-key", node=node, args=key
|
||||
)
|
||||
for key, arg in args.items:
|
||||
if not isinstance(key, astroid.Const):
|
||||
continue
|
||||
format_type = required_key_types.get(key.value, None)
|
||||
arg_type = utils.safe_infer(arg)
|
||||
if (
|
||||
format_type is not None
|
||||
and arg_type not in (None, astroid.Uninferable)
|
||||
and not arg_matches_format_type(arg_type, format_type)
|
||||
):
|
||||
self.add_message(
|
||||
"bad-string-format-type",
|
||||
node=node,
|
||||
args=(arg_type.pytype(), format_type),
|
||||
)
|
||||
elif isinstance(args, (OTHER_NODES, astroid.Tuple)):
|
||||
type_name = type(args).__name__
|
||||
self.add_message("format-needs-mapping", node=node, args=type_name)
|
||||
# else:
|
||||
# The RHS of the format specifier is a name or
|
||||
# expression. It may be a mapping object, so
|
||||
# there's nothing we can check.
|
||||
else:
|
||||
# The format string uses only unnamed format specifiers.
|
||||
# Check that the number of arguments passed to the RHS of
|
||||
# the % operator matches the number required by the format
|
||||
# string.
|
||||
args_elts = ()
|
||||
if isinstance(args, astroid.Tuple):
|
||||
rhs_tuple = utils.safe_infer(args)
|
||||
num_args = None
|
||||
if hasattr(rhs_tuple, "elts"):
|
||||
args_elts = rhs_tuple.elts
|
||||
num_args = len(args_elts)
|
||||
elif isinstance(args, (OTHER_NODES, (astroid.Dict, astroid.DictComp))):
|
||||
args_elts = [args]
|
||||
num_args = 1
|
||||
else:
|
||||
# The RHS of the format specifier is a name or
|
||||
# expression. It could be a tuple of unknown size, so
|
||||
# there's nothing we can check.
|
||||
num_args = None
|
||||
if num_args is not None:
|
||||
if num_args > required_num_args:
|
||||
self.add_message("too-many-format-args", node=node)
|
||||
elif num_args < required_num_args:
|
||||
self.add_message("too-few-format-args", node=node)
|
||||
for arg, format_type in zip(args_elts, required_arg_types):
|
||||
if not arg:
|
||||
continue
|
||||
arg_type = utils.safe_infer(arg)
|
||||
if arg_type not in (
|
||||
None,
|
||||
astroid.Uninferable,
|
||||
) and not arg_matches_format_type(arg_type, format_type):
|
||||
self.add_message(
|
||||
"bad-string-format-type",
|
||||
node=node,
|
||||
args=(arg_type.pytype(), format_type),
|
||||
)
|
||||
|
||||
@check_messages(*MSGS)
|
||||
def visit_call(self, node):
|
||||
func = utils.safe_infer(node.func)
|
||||
if (
|
||||
isinstance(func, astroid.BoundMethod)
|
||||
and isinstance(func.bound, astroid.Instance)
|
||||
and func.bound.name in ("str", "unicode", "bytes")
|
||||
):
|
||||
if func.name in ("strip", "lstrip", "rstrip") and node.args:
|
||||
arg = utils.safe_infer(node.args[0])
|
||||
if not isinstance(arg, astroid.Const) or not isinstance(arg.value, str):
|
||||
return
|
||||
if len(arg.value) != len(set(arg.value)):
|
||||
self.add_message(
|
||||
"bad-str-strip-call",
|
||||
node=node,
|
||||
args=(func.bound.name, func.name),
|
||||
)
|
||||
elif func.name == "format":
|
||||
self._check_new_format(node, func)
|
||||
|
||||
def _detect_vacuous_formatting(self, node, positional_arguments):
|
||||
counter = Counter(
|
||||
arg.name for arg in positional_arguments if isinstance(arg, astroid.Name)
|
||||
)
|
||||
for name, count in counter.items():
|
||||
if count == 1:
|
||||
continue
|
||||
self.add_message(
|
||||
"duplicate-string-formatting-argument", node=node, args=(name,)
|
||||
)
|
||||
|
||||
def _check_new_format(self, node, func):
|
||||
""" Check the new string formatting. """
|
||||
# TODO: skip (for now) format nodes which don't have
|
||||
# an explicit string on the left side of the format operation.
|
||||
# We do this because our inference engine can't properly handle
|
||||
# redefinitions of the original string.
|
||||
# For more details, see issue 287.
|
||||
#
|
||||
# Note that there may not be any left side at all, if the format method
|
||||
# has been assigned to another variable. See issue 351. For example:
|
||||
#
|
||||
# fmt = 'some string {}'.format
|
||||
# fmt('arg')
|
||||
if isinstance(node.func, astroid.Attribute) and not isinstance(
|
||||
node.func.expr, astroid.Const
|
||||
):
|
||||
return
|
||||
if node.starargs or node.kwargs:
|
||||
return
|
||||
try:
|
||||
strnode = next(func.bound.infer())
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
if not (isinstance(strnode, astroid.Const) and isinstance(strnode.value, str)):
|
||||
return
|
||||
try:
|
||||
call_site = CallSite.from_call(node)
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
|
||||
try:
|
||||
fields, num_args, manual_pos = utils.parse_format_method_string(
|
||||
strnode.value
|
||||
)
|
||||
except utils.IncompleteFormatString:
|
||||
self.add_message("bad-format-string", node=node)
|
||||
return
|
||||
|
||||
positional_arguments = call_site.positional_arguments
|
||||
named_arguments = call_site.keyword_arguments
|
||||
named_fields = {field[0] for field in fields if isinstance(field[0], str)}
|
||||
if num_args and manual_pos:
|
||||
self.add_message("format-combined-specification", node=node)
|
||||
return
|
||||
|
||||
check_args = False
|
||||
# Consider "{[0]} {[1]}" as num_args.
|
||||
num_args += sum(1 for field in named_fields if field == "")
|
||||
if named_fields:
|
||||
for field in named_fields:
|
||||
if field and field not in named_arguments:
|
||||
self.add_message(
|
||||
"missing-format-argument-key", node=node, args=(field,)
|
||||
)
|
||||
for field in named_arguments:
|
||||
if field not in named_fields:
|
||||
self.add_message(
|
||||
"unused-format-string-argument", node=node, args=(field,)
|
||||
)
|
||||
# num_args can be 0 if manual_pos is not.
|
||||
num_args = num_args or manual_pos
|
||||
if positional_arguments or num_args:
|
||||
empty = any(True for field in named_fields if field == "")
|
||||
if named_arguments or empty:
|
||||
# Verify the required number of positional arguments
|
||||
# only if the .format got at least one keyword argument.
|
||||
# This means that the format strings accepts both
|
||||
# positional and named fields and we should warn
|
||||
# when one of the them is missing or is extra.
|
||||
check_args = True
|
||||
else:
|
||||
check_args = True
|
||||
if check_args:
|
||||
# num_args can be 0 if manual_pos is not.
|
||||
num_args = num_args or manual_pos
|
||||
if len(positional_arguments) > num_args:
|
||||
self.add_message("too-many-format-args", node=node)
|
||||
elif len(positional_arguments) < num_args:
|
||||
self.add_message("too-few-format-args", node=node)
|
||||
|
||||
self._detect_vacuous_formatting(node, positional_arguments)
|
||||
self._check_new_format_specifiers(node, fields, named_arguments)
|
||||
|
||||
def _check_new_format_specifiers(self, node, fields, named):
|
||||
"""
|
||||
Check attribute and index access in the format
|
||||
string ("{0.a}" and "{0[a]}").
|
||||
"""
|
||||
for key, specifiers in fields:
|
||||
# Obtain the argument. If it can't be obtained
|
||||
# or infered, skip this check.
|
||||
if key == "":
|
||||
# {[0]} will have an unnamed argument, defaulting
|
||||
# to 0. It will not be present in `named`, so use the value
|
||||
# 0 for it.
|
||||
key = 0
|
||||
if isinstance(key, numbers.Number):
|
||||
try:
|
||||
argname = utils.get_argument_from_call(node, key)
|
||||
except utils.NoSuchArgumentError:
|
||||
continue
|
||||
else:
|
||||
if key not in named:
|
||||
continue
|
||||
argname = named[key]
|
||||
if argname in (astroid.Uninferable, None):
|
||||
continue
|
||||
try:
|
||||
argument = next(argname.infer())
|
||||
except astroid.InferenceError:
|
||||
continue
|
||||
if not specifiers or argument is astroid.Uninferable:
|
||||
# No need to check this key if it doesn't
|
||||
# use attribute / item access
|
||||
continue
|
||||
if argument.parent and isinstance(argument.parent, astroid.Arguments):
|
||||
# Ignore any object coming from an argument,
|
||||
# because we can't infer its value properly.
|
||||
continue
|
||||
previous = argument
|
||||
parsed = []
|
||||
for is_attribute, specifier in specifiers:
|
||||
if previous is astroid.Uninferable:
|
||||
break
|
||||
parsed.append((is_attribute, specifier))
|
||||
if is_attribute:
|
||||
try:
|
||||
previous = previous.getattr(specifier)[0]
|
||||
except astroid.NotFoundError:
|
||||
if (
|
||||
hasattr(previous, "has_dynamic_getattr")
|
||||
and previous.has_dynamic_getattr()
|
||||
):
|
||||
# Don't warn if the object has a custom __getattr__
|
||||
break
|
||||
path = get_access_path(key, parsed)
|
||||
self.add_message(
|
||||
"missing-format-attribute",
|
||||
args=(specifier, path),
|
||||
node=node,
|
||||
)
|
||||
break
|
||||
else:
|
||||
warn_error = False
|
||||
if hasattr(previous, "getitem"):
|
||||
try:
|
||||
previous = previous.getitem(astroid.Const(specifier))
|
||||
except (
|
||||
astroid.AstroidIndexError,
|
||||
astroid.AstroidTypeError,
|
||||
astroid.AttributeInferenceError,
|
||||
):
|
||||
warn_error = True
|
||||
except astroid.InferenceError:
|
||||
break
|
||||
if previous is astroid.Uninferable:
|
||||
break
|
||||
else:
|
||||
try:
|
||||
# Lookup __getitem__ in the current node,
|
||||
# but skip further checks, because we can't
|
||||
# retrieve the looked object
|
||||
previous.getattr("__getitem__")
|
||||
break
|
||||
except astroid.NotFoundError:
|
||||
warn_error = True
|
||||
if warn_error:
|
||||
path = get_access_path(key, parsed)
|
||||
self.add_message(
|
||||
"invalid-format-index", args=(specifier, path), node=node
|
||||
)
|
||||
break
|
||||
|
||||
try:
|
||||
previous = next(previous.infer())
|
||||
except astroid.InferenceError:
|
||||
# can't check further if we can't infer it
|
||||
break
|
||||
|
||||
|
||||
class StringConstantChecker(BaseTokenChecker):
|
||||
"""Check string literals"""
|
||||
|
||||
__implements__ = (IAstroidChecker, ITokenChecker, IRawChecker)
|
||||
name = "string"
|
||||
msgs = {
|
||||
"W1401": (
|
||||
"Anomalous backslash in string: '%s'. "
|
||||
"String constant might be missing an r prefix.",
|
||||
"anomalous-backslash-in-string",
|
||||
"Used when a backslash is in a literal string but not as an escape.",
|
||||
),
|
||||
"W1402": (
|
||||
"Anomalous Unicode escape in byte string: '%s'. "
|
||||
"String constant might be missing an r or u prefix.",
|
||||
"anomalous-unicode-escape-in-string",
|
||||
"Used when an escape like \\u is encountered in a byte "
|
||||
"string where it has no effect.",
|
||||
),
|
||||
"W1403": (
|
||||
"Implicit string concatenation found in %s",
|
||||
"implicit-str-concat-in-sequence",
|
||||
"String literals are implicitly concatenated in a "
|
||||
"literal iterable definition : "
|
||||
"maybe a comma is missing ?",
|
||||
),
|
||||
}
|
||||
options = (
|
||||
(
|
||||
"check-str-concat-over-line-jumps",
|
||||
{
|
||||
"default": False,
|
||||
"type": "yn",
|
||||
"metavar": "<y_or_n>",
|
||||
"help": "This flag controls whether the "
|
||||
"implicit-str-concat-in-sequence should generate a warning "
|
||||
"on implicit string concatenation in sequences defined over "
|
||||
"several lines.",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
# Characters that have a special meaning after a backslash in either
|
||||
# Unicode or byte strings.
|
||||
ESCAPE_CHARACTERS = "abfnrtvx\n\r\t\\'\"01234567"
|
||||
|
||||
# TODO(mbp): Octal characters are quite an edge case today; people may
|
||||
# prefer a separate warning where they occur. \0 should be allowed.
|
||||
|
||||
# Characters that have a special meaning after a backslash but only in
|
||||
# Unicode strings.
|
||||
UNICODE_ESCAPE_CHARACTERS = "uUN"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(StringConstantChecker, self).__init__(*args, **kwargs)
|
||||
self.string_tokens = {} # token position -> (token value, next token)
|
||||
|
||||
def process_module(self, module):
|
||||
self._unicode_literals = "unicode_literals" in module.future_imports
|
||||
|
||||
def process_tokens(self, tokens):
|
||||
encoding = "ascii"
|
||||
for i, (tok_type, token, start, _, line) in enumerate(tokens):
|
||||
if tok_type == tokenize.ENCODING:
|
||||
# this is always the first token processed
|
||||
encoding = token
|
||||
elif tok_type == tokenize.STRING:
|
||||
# 'token' is the whole un-parsed token; we can look at the start
|
||||
# of it to see whether it's a raw or unicode string etc.
|
||||
self.process_string_token(token, start[0])
|
||||
# We figure the next token, ignoring comments & newlines:
|
||||
j = i + 1
|
||||
while j < len(tokens) and tokens[j].type in (
|
||||
tokenize.NEWLINE,
|
||||
tokenize.NL,
|
||||
tokenize.COMMENT,
|
||||
):
|
||||
j += 1
|
||||
next_token = tokens[j] if j < len(tokens) else None
|
||||
if encoding != "ascii":
|
||||
# We convert `tokenize` character count into a byte count,
|
||||
# to match with astroid `.col_offset`
|
||||
start = (start[0], len(line[: start[1]].encode(encoding)))
|
||||
self.string_tokens[start] = (str_eval(token), next_token)
|
||||
|
||||
@check_messages(*(msgs.keys()))
|
||||
def visit_list(self, node):
|
||||
self.check_for_concatenated_strings(node, "list")
|
||||
|
||||
@check_messages(*(msgs.keys()))
|
||||
def visit_set(self, node):
|
||||
self.check_for_concatenated_strings(node, "set")
|
||||
|
||||
@check_messages(*(msgs.keys()))
|
||||
def visit_tuple(self, node):
|
||||
self.check_for_concatenated_strings(node, "tuple")
|
||||
|
||||
def check_for_concatenated_strings(self, iterable_node, iterable_type):
|
||||
for elt in iterable_node.elts:
|
||||
if isinstance(elt, Const) and elt.pytype() in _AST_NODE_STR_TYPES:
|
||||
if elt.col_offset < 0:
|
||||
# This can happen in case of escaped newlines
|
||||
continue
|
||||
if (elt.lineno, elt.col_offset) not in self.string_tokens:
|
||||
# This may happen with Latin1 encoding
|
||||
# cf. https://github.com/PyCQA/pylint/issues/2610
|
||||
continue
|
||||
matching_token, next_token = self.string_tokens[
|
||||
(elt.lineno, elt.col_offset)
|
||||
]
|
||||
# We detect string concatenation: the AST Const is the
|
||||
# combination of 2 string tokens
|
||||
if matching_token != elt.value and next_token is not None:
|
||||
if next_token.type == tokenize.STRING and (
|
||||
next_token.start[0] == elt.lineno
|
||||
or self.config.check_str_concat_over_line_jumps
|
||||
):
|
||||
self.add_message(
|
||||
"implicit-str-concat-in-sequence",
|
||||
line=elt.lineno,
|
||||
args=(iterable_type,),
|
||||
)
|
||||
|
||||
def process_string_token(self, token, start_row):
|
||||
quote_char = None
|
||||
index = None
|
||||
for index, c in enumerate(token):
|
||||
if c in "'\"":
|
||||
quote_char = c
|
||||
break
|
||||
if quote_char is None:
|
||||
return
|
||||
|
||||
prefix = token[:index].lower() # markers like u, b, r.
|
||||
after_prefix = token[index:]
|
||||
if after_prefix[:3] == after_prefix[-3:] == 3 * quote_char:
|
||||
string_body = after_prefix[3:-3]
|
||||
else:
|
||||
string_body = after_prefix[1:-1] # Chop off quotes
|
||||
# No special checks on raw strings at the moment.
|
||||
if "r" not in prefix:
|
||||
self.process_non_raw_string_token(prefix, string_body, start_row)
|
||||
|
||||
def process_non_raw_string_token(self, prefix, string_body, start_row):
|
||||
"""check for bad escapes in a non-raw string.
|
||||
|
||||
prefix: lowercase string of eg 'ur' string prefix markers.
|
||||
string_body: the un-parsed body of the string, not including the quote
|
||||
marks.
|
||||
start_row: integer line number in the source.
|
||||
"""
|
||||
# Walk through the string; if we see a backslash then escape the next
|
||||
# character, and skip over it. If we see a non-escaped character,
|
||||
# alert, and continue.
|
||||
#
|
||||
# Accept a backslash when it escapes a backslash, or a quote, or
|
||||
# end-of-line, or one of the letters that introduce a special escape
|
||||
# sequence <http://docs.python.org/reference/lexical_analysis.html>
|
||||
#
|
||||
# TODO(mbp): Maybe give a separate warning about the rarely-used
|
||||
# \a \b \v \f?
|
||||
#
|
||||
# TODO(mbp): We could give the column of the problem character, but
|
||||
# add_message doesn't seem to have a way to pass it through at present.
|
||||
i = 0
|
||||
while True:
|
||||
i = string_body.find("\\", i)
|
||||
if i == -1:
|
||||
break
|
||||
# There must be a next character; having a backslash at the end
|
||||
# of the string would be a SyntaxError.
|
||||
next_char = string_body[i + 1]
|
||||
match = string_body[i : i + 2]
|
||||
if next_char in self.UNICODE_ESCAPE_CHARACTERS:
|
||||
if "u" in prefix:
|
||||
pass
|
||||
elif (_PY3K or self._unicode_literals) and "b" not in prefix:
|
||||
pass # unicode by default
|
||||
else:
|
||||
self.add_message(
|
||||
"anomalous-unicode-escape-in-string",
|
||||
line=start_row,
|
||||
args=(match,),
|
||||
)
|
||||
elif next_char not in self.ESCAPE_CHARACTERS:
|
||||
self.add_message(
|
||||
"anomalous-backslash-in-string", line=start_row, args=(match,)
|
||||
)
|
||||
# Whether it was a valid escape or not, backslash followed by
|
||||
# another character can always be consumed whole: the second
|
||||
# character can never be the start of a new backslash escape.
|
||||
i += 2
|
||||
|
||||
|
||||
def register(linter):
|
||||
"""required method to auto register this checker """
|
||||
linter.register_checker(StringFormatChecker(linter))
|
||||
linter.register_checker(StringConstantChecker(linter))
|
||||
|
||||
|
||||
def str_eval(token):
|
||||
"""
|
||||
Mostly replicate `ast.literal_eval(token)` manually to avoid any performance hit.
|
||||
This supports f-strings, contrary to `ast.literal_eval`.
|
||||
We have to support all string literal notations:
|
||||
https://docs.python.org/3/reference/lexical_analysis.html#string-and-bytes-literals
|
||||
"""
|
||||
if token[0:2].lower() in ("fr", "rf"):
|
||||
token = token[2:]
|
||||
elif token[0].lower() in ("r", "u", "f"):
|
||||
token = token[1:]
|
||||
if token[0:3] in ('"""', "'''"):
|
||||
return token[3:-3]
|
||||
return token[1:-1]
|
1644
venv/lib/python3.6/site-packages/pylint/checkers/typecheck.py
Normal file
1644
venv/lib/python3.6/site-packages/pylint/checkers/typecheck.py
Normal file
File diff suppressed because it is too large
Load Diff
1211
venv/lib/python3.6/site-packages/pylint/checkers/utils.py
Normal file
1211
venv/lib/python3.6/site-packages/pylint/checkers/utils.py
Normal file
File diff suppressed because it is too large
Load Diff
1858
venv/lib/python3.6/site-packages/pylint/checkers/variables.py
Normal file
1858
venv/lib/python3.6/site-packages/pylint/checkers/variables.py
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user