8th day of python challenges 111-117

This commit is contained in:
abd.shallal
2019-08-04 15:26:35 +03:00
parent b04c1b055f
commit 627802c383
3215 changed files with 760227 additions and 491 deletions

View File

@@ -58,6 +58,16 @@
<option value="E501" />
<option value="W29" />
<option value="E501" />
<option value="W29" />
<option value="E501" />
<option value="W29" />
<option value="E501" />
<option value="W29" />
<option value="E501" />
<option value="W29" />
<option value="E501" />
<option value="W29" />
<option value="E501" />
</list>
</option>
</inspection_tool>

3818
.idea/workspace.xml generated

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,6 @@
To Kill A Mockingbird, Harper Lee, 1960
A Brief History of Time, Stephan Hawking, 1988
The Man Who Mistook His Wife for a Hat, F.Scott Fitzerald, 1922
Pride and Prejudice, Jane Austen, 1813
my life through shit, abdullah shallal, 2020
just do it, abdullah shallal, 1933
1 To Kill A Mockingbird Harper Lee 1960
2 A Brief History of Time Stephan Hawking 1988
3 The Man Who Mistook His Wife for a Hat F.Scott Fitzerald 1922
4 Pride and Prejudice Jane Austen 1813
5 my life through shit abdullah shallal 2020
6 just do it abdullah shallal 1933

View File

@@ -0,0 +1 @@
abdullah, 115, 118, 115, 115
1 abdullah 115 118 115 115

View File

@@ -0,0 +1,5 @@
with open('Books.csv', 'w') as file:
file.write('To Kill A Mockingbird' + ', ' + 'Harper Lee' + ', ' + '1960\n')
file.write('A Brief History of Time' + ', ' + 'Stephan Hawking' + ', ' + '1988\n')
file.write('The Man Who Mistook His Wife for a Hat' + ', ' + 'F.Scott Fitzerald' + ', ' + '1922\n')
file.write('Pride and Prejudice' + ', ' + 'Jane Austen' + ', ' + '1813\n')

View File

@@ -0,0 +1,16 @@
file_read = open('Books.csv', 'r')
for row in file_read:
print(row)
file_read.close()
ask_name = str(input('Enter Book Name : '))
ask_author = str(input('Enter Book Author : '))
ask_year = str(input('Enter Book Year Released : '))
file_write = open('Books.csv', 'a')
file_write.write(ask_name + ', ' + ask_author + ', ' + ask_year + '\n')
file_write.close()
file_read_last = open('Books.csv', 'r')
for row in file_read_last:
print(row)
file_read_last.close()

View File

@@ -0,0 +1,21 @@
file_read = open('Books.csv', 'r')
for row in file_read:
print(row)
file_read.close()
ask_count_books = int(input('Enter how many books you want to add to file : '))
for i in range(ask_count_books):
ask_name = str(input('Enter Book Name : '))
ask_author = str(input('Enter Book Author : '))
ask_year = str(input('Enter Book Year Released : '))
file_write = open('Books.csv', 'a')
file_write.write(ask_name + ', ' + ask_author + ', ' + ask_year + '\n')
file_write.close()
ask_author_to_show = str(input('Enter Book Author to show : '))
file_read = open('Books.csv', 'r')
for row in file_read:
if ask_author_to_show in row:
print(row)
file_read.close()

View File

@@ -0,0 +1,17 @@
import csv
file_read = open('Books.csv', 'r')
for row in file_read:
print(row)
file_read.close()
start_year = int(input('Enter start year : '))
end_year = int(input('Enter end year : '))
tmp = []
file_read = open('Books.csv', 'r')
file_reader = csv.reader(file_read)
for row in file_reader:
if start_year < int(row[2]) < end_year:
print(row)
file_read.close()

View File

@@ -0,0 +1,6 @@
file_read = open('Books.csv', 'r')
count_row = 0
for row in file_read:
print('Row ' + str(count_row) + ' : '+row)
count_row += 1
file_read.close()

View File

@@ -0,0 +1,40 @@
import csv
file_read = list(csv.reader(open('Books.csv')))
count_row = 0
list = []
for row in file_read:
print('Row ' + str(count_row) + ' : ' + str(row))
list.append(row)
count_row += 1
ask = int(input('Enter row number to delete : '))
list.pop(ask)
count_row = 0
for row in list:
print('Row ' + str(count_row) + ' : ' + str(row))
count_row += 1
ask = int(input('Enter row number to edit : '))
print('1) Edit book name')
print('2) Edit author')
print('3) Release Year ')
selection_input = int(input('Make a selection 1, 2 or 3 : '))
if selection_input == 1:
ask_name = str(input('Enter Book Name : '))
list[ask][0] = ask_name
elif selection_input == 2:
ask_author = str(input('Enter Book Author : '))
list[ask][1] = ask_author
elif selection_input == 3:
ask_year = str(input('Enter Book Year Released : '))
list[ask][2] = ask_year
i = 0
file_write = open('Books.csv', 'w')
for row in range(len(list)):
file_write.write(list[i][0] + ', ' + list[i][1] + ', ' + list[i][2] + '\n')
i += 1
file_write.close()

View File

@@ -0,0 +1,23 @@
import csv
import random
score = 0
ask_name = str(input('Enter your name : '))
qustion_1_num_1 = random.randint(1, 100)
qustion_1_num_2= random.randint(1, 100)
answer_1 = qustion_1_num_1 + qustion_1_num_2
ask_answer_1 = int(input(str(qustion_1_num_1) + '+' + str(qustion_1_num_2) + '= '))
if answer_1 == ask_answer_1:
score += 1
qustion_2_num_1 = random.randint(1, 100)
qustion_2_num_2 = random.randint(1, 100)
answer_2 = qustion_2_num_1 + qustion_2_num_2
ask_answer_2 = int(input(str(qustion_2_num_1) + '+' + str(qustion_2_num_2) + '= '))
if answer_2 == ask_answer_2:
score += 1
file = open('QuizScore.csv', 'a')
file.write(ask_name + ', ' + str(ask_answer_1) + ', ' + str(answer_1) + ', ' + str(ask_answer_2) + ', ' + str(answer_2) + '\n')
file.close()

View File

@@ -0,0 +1,3 @@
name,address,date joined
john smith,1132 Anywhere Lane Hoboken NJ, 07030,Jan 4
erica meyers,1234 Smith Lane Hoboken NJ, 07030,March 2

View File

@@ -0,0 +1,3 @@
name,department,birthday month
John Smith,Accounting,November
Erica Meyers,IT,March

View File

@@ -0,0 +1,2 @@
John Smith,Accounting,November
Erica Meyers,IT,March
1 John Smith Accounting November
2 Erica Meyers IT March

View File

@@ -0,0 +1,3 @@
emp_name,dept,birth_month
John Smith,Accounting,November
Erica Meyers,IT,March
1 emp_name dept birth_month
2 John Smith Accounting November
3 Erica Meyers IT March

View File

@@ -0,0 +1,6 @@
Graham Chapman,03/15/14,50000.00,10
John Cleese,06/01/15,65000.00,8
Eric Idle,05/12/14,45000.00,10
Terry Jones,11/01/13,70000.00,3
Terry Gilliam,08/12/14,48000.00,7
Michael Palin,05/23/13,66000.00,8
1 Graham Chapman 03/15/14 50000.00 10
2 John Cleese 06/01/15 65000.00 8
3 Eric Idle 05/12/14 45000.00 10
4 Terry Jones 11/01/13 70000.00 3
5 Terry Gilliam 08/12/14 48000.00 7
6 Michael Palin 05/23/13 66000.00 8

View File

@@ -0,0 +1,7 @@
Name,Hire Date,Salary,Sick Days remaining
Graham Chapman,03/15/14,50000.00,10
John Cleese,06/01/15,65000.00,8
Eric Idle,05/12/14,45000.00,10
Terry Jones,11/01/13,70000.00,3
Terry Gilliam,08/12/14,48000.00,7
Michael Palin,05/23/13,66000.00,8
1 Name Hire Date Salary Sick Days remaining
2 Graham Chapman 03/15/14 50000.00 10
3 John Cleese 06/01/15 65000.00 8
4 Eric Idle 05/12/14 45000.00 10
5 Terry Jones 11/01/13 70000.00 3
6 Terry Gilliam 08/12/14 48000.00 7
7 Michael Palin 05/23/13 66000.00 8

View File

@@ -0,0 +1,7 @@
Employee,Hired,Salary,Sick Days
Graham Chapman,2014-03-15,50000.0,10
John Cleese,2015-06-01,65000.0,8
Eric Idle,2014-05-12,45000.0,10
Terry Jones,2013-11-01,70000.0,3
Terry Gilliam,2014-08-12,48000.0,7
Michael Palin,2013-05-23,66000.0,8
1 Employee Hired Salary Sick Days
2 Graham Chapman 2014-03-15 50000.0 10
3 John Cleese 2015-06-01 65000.0 8
4 Eric Idle 2014-05-12 45000.0 10
5 Terry Jones 2013-11-01 70000.0 3
6 Terry Gilliam 2014-08-12 48000.0 7
7 Michael Palin 2013-05-23 66000.0 8

View File

@@ -0,0 +1,59 @@
import csv
# --------- READING CSV ----------
# 1 - Built-in CSV Library
# here return an array --> ['John Smith', 'Accounting', 'November']
with open('employee_birthday.txt') as csv_file:
csv_reader = csv.reader(csv_file)
count = 0
for row in csv_reader:
print(row)
# if count == 0:
# print(f'Columns are {", ".join(row)}')
# count += 1
# else:
# print(f'\t{row[0]} works in the {row[1]} department, and was born in {row[2]}.')
# count += 1
print(f'Processed {count} lines.')
# 2 - Reading CSV Files Into a Dictionary
# return dictionary of arrays --> [('name', 'John Smith'), ('department', 'Accounting'), ('birthday month', 'November')]
with open('employee_birthday.txt') as csv_file:
csv_reader = csv.DictReader(csv_file)
count = 0
for row in csv_reader:
print(row)
# if count == 0:
# print(f'Column names are {", ".join(row)}')
# count += 1
# print(f'\t{row["name"]} works in the {row["department"]} department, and was born in {row["birthday month"]}.')
# count += 1
print(f'Processed {count} lines.')
# --------- WRITING CSV ----------
# 1 - Built-in CSV Library
with open('employee_file.csv', mode='w') as employee_file:
employee_writer = csv.writer(employee_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
employee_writer.writerow(['John Smith', 'Accounting', 'November'])
employee_writer.writerow(['Erica Meyers', 'IT', 'March'])
# 2 - Reading CSV Files Into a Dictionary
with open('employee_file2.csv', mode='w') as csv_file:
fieldnames = ['emp_name', 'dept', 'birth_month']
writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
writer.writeheader()
writer.writerow({'emp_name': 'John Smith', 'dept': 'Accounting', 'birth_month': 'November'})
writer.writerow({'emp_name': 'Erica Meyers', 'dept': 'IT', 'birth_month': 'March'})

View File

@@ -0,0 +1,32 @@
import pandas
# --------- READING CSV ----------
df = pandas.read_csv('hrdata.csv')
print(df)
df = pandas.read_csv('hrdata.csv', index_col='Name')
print(df)
df = pandas.read_csv('hrdata.csv', index_col='Name', parse_dates=['Hire Date'])
print(df)
dfNh = pandas.read_csv('hrdata-noheader.csv',
index_col='Employee',
parse_dates=['Hired'],
header=0,
names=['Employee', 'Hired', 'Salary', 'Sick Days'])
print(dfNh)
# --------- WRITING CSV ----------
df = pandas.read_csv('hrdata.csv',
index_col='Employee',
parse_dates=['Hired'],
header=0,
names=['Employee', 'Hired', 'Salary', 'Sick Days'])
df.to_csv('hrdata_modified.csv')

10
venv/bin/epylint Executable file
View File

@@ -0,0 +1,10 @@
#!/home/abd/PycharmProjects/paython-by-learn/venv/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from pylint import run_epylint
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(run_epylint())

10
venv/bin/f2py Executable file
View File

@@ -0,0 +1,10 @@
#!/home/abd/PycharmProjects/paython-by-learn/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from numpy.f2py.f2py2e import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

10
venv/bin/f2py3 Executable file
View File

@@ -0,0 +1,10 @@
#!/home/abd/PycharmProjects/paython-by-learn/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from numpy.f2py.f2py2e import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

10
venv/bin/f2py3.6 Executable file
View File

@@ -0,0 +1,10 @@
#!/home/abd/PycharmProjects/paython-by-learn/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from numpy.f2py.f2py2e import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

10
venv/bin/isort Executable file
View File

@@ -0,0 +1,10 @@
#!/home/abd/PycharmProjects/paython-by-learn/venv/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from isort.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())

10
venv/bin/pylint Executable file
View File

@@ -0,0 +1,10 @@
#!/home/abd/PycharmProjects/paython-by-learn/venv/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from pylint import run_pylint
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(run_pylint())

10
venv/bin/pyreverse Executable file
View File

@@ -0,0 +1,10 @@
#!/home/abd/PycharmProjects/paython-by-learn/venv/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from pylint import run_pyreverse
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(run_pyreverse())

10
venv/bin/symilar Executable file
View File

@@ -0,0 +1,10 @@
#!/home/abd/PycharmProjects/paython-by-learn/venv/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from pylint import run_symilar
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(run_symilar())

View File

@@ -0,0 +1,339 @@
GNU GENERAL PUBLIC LICENSE
Version 2, June 1991
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users. This
General Public License applies to most of the Free Software
Foundation's software and to any other program whose authors commit to
using it. (Some other Free Software Foundation software is covered by
the GNU Lesser General Public License instead.) You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
this service if you wish), that you receive source code or can get it
if you want it, that you can change the software or use pieces of it
in new free programs; and that you know you can do these things.
To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must give the recipients all the rights that
you have. You must make sure that they, too, receive or can get the
source code. And you must show them these terms so they know their
rights.
We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.
Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software. If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.
Finally, any free program is threatened constantly by software
patents. We wish to avoid the danger that redistributors of a free
program will individually obtain patent licenses, in effect making the
program proprietary. To prevent this, we have made it clear that any
patent must be licensed for everyone's free use or not licensed at all.
The precise terms and conditions for copying, distribution and
modification follow.
GNU GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License applies to any program or other work which contains
a notice placed by the copyright holder saying it may be distributed
under the terms of this General Public License. The "Program", below,
refers to any such program or work, and a "work based on the Program"
means either the Program or any derivative work under copyright law:
that is to say, a work containing the Program or a portion of it,
either verbatim or with modifications and/or translated into another
language. (Hereinafter, translation is included without limitation in
the term "modification".) Each licensee is addressed as "you".
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running the Program is not restricted, and the output from the Program
is covered only if its contents constitute a work based on the
Program (independent of having been made by running the Program).
Whether that is true depends on what the Program does.
1. You may copy and distribute verbatim copies of the Program's
source code as you receive it, in any medium, provided that you
conspicuously and appropriately publish on each copy an appropriate
copyright notice and disclaimer of warranty; keep intact all the
notices that refer to this License and to the absence of any warranty;
and give any other recipients of the Program a copy of this License
along with the Program.
You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Program or any portion
of it, thus forming a work based on the Program, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) You must cause the modified files to carry prominent notices
stating that you changed the files and the date of any change.
b) You must cause any work that you distribute or publish, that in
whole or in part contains or is derived from the Program or any
part thereof, to be licensed as a whole at no charge to all third
parties under the terms of this License.
c) If the modified program normally reads commands interactively
when run, you must cause it, when started running for such
interactive use in the most ordinary way, to print or display an
announcement including an appropriate copyright notice and a
notice that there is no warranty (or else, saying that you provide
a warranty) and that users may redistribute the program under
these conditions, and telling the user how to view a copy of this
License. (Exception: if the Program itself is interactive but
does not normally print such an announcement, your work based on
the Program is not required to print an announcement.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Program,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Program, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Program.
In addition, mere aggregation of another work not based on the Program
with the Program (or with a work based on the Program) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may copy and distribute the Program (or a work based on it,
under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you also do one of the following:
a) Accompany it with the complete corresponding machine-readable
source code, which must be distributed under the terms of Sections
1 and 2 above on a medium customarily used for software interchange; or,
b) Accompany it with a written offer, valid for at least three
years, to give any third party, for a charge no more than your
cost of physically performing source distribution, a complete
machine-readable copy of the corresponding source code, to be
distributed under the terms of Sections 1 and 2 above on a medium
customarily used for software interchange; or,
c) Accompany it with the information you received as to the offer
to distribute corresponding source code. (This alternative is
allowed only for noncommercial distribution and only if you
received the program in object code or executable form with such
an offer, in accord with Subsection b above.)
The source code for a work means the preferred form of the work for
making modifications to it. For an executable work, complete source
code means all the source code for all modules it contains, plus any
associated interface definition files, plus the scripts used to
control compilation and installation of the executable. However, as a
special exception, the source code distributed need not include
anything that is normally distributed (in either source or binary
form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component
itself accompanies the executable.
If distribution of executable or object code is made by offering
access to copy from a designated place, then offering equivalent
access to copy the source code from the same place counts as
distribution of the source code, even though third parties are not
compelled to copy the source along with the object code.
4. You may not copy, modify, sublicense, or distribute the Program
except as expressly provided under this License. Any attempt
otherwise to copy, modify, sublicense or distribute the Program is
void, and will automatically terminate your rights under this License.
However, parties who have received copies, or rights, from you under
this License will not have their licenses terminated so long as such
parties remain in full compliance.
5. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Program or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Program (or any work based on the
Program), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Program or works based on it.
6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the
original licensor to copy, distribute or modify the Program subject to
these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties to
this License.
7. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Program at all. For example, if a patent
license would not permit royalty-free redistribution of the Program by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Program.
If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply and the section as a whole is intended to apply in other
circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system, which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
8. If the distribution and/or use of the Program is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Program under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded. In such case, this License incorporates
the limitation as if written in the body of this License.
9. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies a version number of this License which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation. If the Program does not specify a version number of
this License, you may choose any version ever published by the Free Software
Foundation.
10. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission. For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this. Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.
NO WARRANTY
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
Also add information on how to contact you by electronic and paper mail.
If the program is interactive, make it output a short notice like this
when it starts in an interactive mode:
Gnomovision version 69, Copyright (C) year name of author
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, the commands you use may
be called something other than `show w' and `show c'; they could even be
mouse-clicks or menu items--whatever suits your program.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the program, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
`Gnomovision' (which makes passes at compilers) written by James Hacker.
<signature of Ty Coon>, 1 April 1989
Ty Coon, President of Vice
This General Public License does not permit incorporating your program into
proprietary programs. If your program is a subroutine library, you may
consider it more useful to permit linking proprietary applications with the
library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License.

View File

@@ -0,0 +1,510 @@
GNU LESSER GENERAL PUBLIC LICENSE
Version 2.1, February 1999
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
[This is the first released version of the Lesser GPL. It also counts
as the successor of the GNU Library Public License, version 2, hence
the version number 2.1.]
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
Licenses are intended to guarantee your freedom to share and change
free software--to make sure the software is free for all its users.
This license, the Lesser General Public License, applies to some
specially designated software packages--typically libraries--of the
Free Software Foundation and other authors who decide to use it. You
can use it too, but we suggest you first think carefully about whether
this license or the ordinary General Public License is the better
strategy to use in any particular case, based on the explanations
below.
When we speak of free software, we are referring to freedom of use,
not price. Our General Public Licenses are designed to make sure that
you have the freedom to distribute copies of free software (and charge
for this service if you wish); that you receive source code or can get
it if you want it; that you can change the software and use pieces of
it in new free programs; and that you are informed that you can do
these things.
To protect your rights, we need to make restrictions that forbid
distributors to deny you these rights or to ask you to surrender these
rights. These restrictions translate to certain responsibilities for
you if you distribute copies of the library or if you modify it.
For example, if you distribute copies of the library, whether gratis
or for a fee, you must give the recipients all the rights that we gave
you. You must make sure that they, too, receive or can get the source
code. If you link other code with the library, you must provide
complete object files to the recipients, so that they can relink them
with the library after making changes to the library and recompiling
it. And you must show them these terms so they know their rights.
We protect your rights with a two-step method: (1) we copyright the
library, and (2) we offer you this license, which gives you legal
permission to copy, distribute and/or modify the library.
To protect each distributor, we want to make it very clear that
there is no warranty for the free library. Also, if the library is
modified by someone else and passed on, the recipients should know
that what they have is not the original version, so that the original
author's reputation will not be affected by problems that might be
introduced by others.
Finally, software patents pose a constant threat to the existence of
any free program. We wish to make sure that a company cannot
effectively restrict the users of a free program by obtaining a
restrictive license from a patent holder. Therefore, we insist that
any patent license obtained for a version of the library must be
consistent with the full freedom of use specified in this license.
Most GNU software, including some libraries, is covered by the
ordinary GNU General Public License. This license, the GNU Lesser
General Public License, applies to certain designated libraries, and
is quite different from the ordinary General Public License. We use
this license for certain libraries in order to permit linking those
libraries into non-free programs.
When a program is linked with a library, whether statically or using
a shared library, the combination of the two is legally speaking a
combined work, a derivative of the original library. The ordinary
General Public License therefore permits such linking only if the
entire combination fits its criteria of freedom. The Lesser General
Public License permits more lax criteria for linking other code with
the library.
We call this license the "Lesser" General Public License because it
does Less to protect the user's freedom than the ordinary General
Public License. It also provides other free software developers Less
of an advantage over competing non-free programs. These disadvantages
are the reason we use the ordinary General Public License for many
libraries. However, the Lesser license provides advantages in certain
special circumstances.
For example, on rare occasions, there may be a special need to
encourage the widest possible use of a certain library, so that it
becomes a de-facto standard. To achieve this, non-free programs must
be allowed to use the library. A more frequent case is that a free
library does the same job as widely used non-free libraries. In this
case, there is little to gain by limiting the free library to free
software only, so we use the Lesser General Public License.
In other cases, permission to use a particular library in non-free
programs enables a greater number of people to use a large body of
free software. For example, permission to use the GNU C Library in
non-free programs enables many more people to use the whole GNU
operating system, as well as its variant, the GNU/Linux operating
system.
Although the Lesser General Public License is Less protective of the
users' freedom, it does ensure that the user of a program that is
linked with the Library has the freedom and the wherewithal to run
that program using a modified version of the Library.
The precise terms and conditions for copying, distribution and
modification follow. Pay close attention to the difference between a
"work based on the library" and a "work that uses the library". The
former contains code derived from the library, whereas the latter must
be combined with the library in order to run.
GNU LESSER GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License Agreement applies to any software library or other
program which contains a notice placed by the copyright holder or
other authorized party saying it may be distributed under the terms of
this Lesser General Public License (also called "this License").
Each licensee is addressed as "you".
A "library" means a collection of software functions and/or data
prepared so as to be conveniently linked with application programs
(which use some of those functions and data) to form executables.
The "Library", below, refers to any such software library or work
which has been distributed under these terms. A "work based on the
Library" means either the Library or any derivative work under
copyright law: that is to say, a work containing the Library or a
portion of it, either verbatim or with modifications and/or translated
straightforwardly into another language. (Hereinafter, translation is
included without limitation in the term "modification".)
"Source code" for a work means the preferred form of the work for
making modifications to it. For a library, complete source code means
all the source code for all modules it contains, plus any associated
interface definition files, plus the scripts used to control
compilation and installation of the library.
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running a program using the Library is not restricted, and output from
such a program is covered only if its contents constitute a work based
on the Library (independent of the use of the Library in a tool for
writing it). Whether that is true depends on what the Library does
and what the program that uses the Library does.
1. You may copy and distribute verbatim copies of the Library's
complete source code as you receive it, in any medium, provided that
you conspicuously and appropriately publish on each copy an
appropriate copyright notice and disclaimer of warranty; keep intact
all the notices that refer to this License and to the absence of any
warranty; and distribute a copy of this License along with the
Library.
You may charge a fee for the physical act of transferring a copy,
and you may at your option offer warranty protection in exchange for a
fee.
2. You may modify your copy or copies of the Library or any portion
of it, thus forming a work based on the Library, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) The modified work must itself be a software library.
b) You must cause the files modified to carry prominent notices
stating that you changed the files and the date of any change.
c) You must cause the whole of the work to be licensed at no
charge to all third parties under the terms of this License.
d) If a facility in the modified Library refers to a function or a
table of data to be supplied by an application program that uses
the facility, other than as an argument passed when the facility
is invoked, then you must make a good faith effort to ensure that,
in the event an application does not supply such function or
table, the facility still operates, and performs whatever part of
its purpose remains meaningful.
(For example, a function in a library to compute square roots has
a purpose that is entirely well-defined independent of the
application. Therefore, Subsection 2d requires that any
application-supplied function or table used by this function must
be optional: if the application does not supply it, the square
root function must still compute square roots.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Library,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Library, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote
it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Library.
In addition, mere aggregation of another work not based on the Library
with the Library (or with a work based on the Library) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may opt to apply the terms of the ordinary GNU General Public
License instead of this License to a given copy of the Library. To do
this, you must alter all the notices that refer to this License, so
that they refer to the ordinary GNU General Public License, version 2,
instead of to this License. (If a newer version than version 2 of the
ordinary GNU General Public License has appeared, then you can specify
that version instead if you wish.) Do not make any other change in
these notices.
Once this change is made in a given copy, it is irreversible for
that copy, so the ordinary GNU General Public License applies to all
subsequent copies and derivative works made from that copy.
This option is useful when you wish to copy part of the code of
the Library into a program that is not a library.
4. You may copy and distribute the Library (or a portion or
derivative of it, under Section 2) in object code or executable form
under the terms of Sections 1 and 2 above provided that you accompany
it with the complete corresponding machine-readable source code, which
must be distributed under the terms of Sections 1 and 2 above on a
medium customarily used for software interchange.
If distribution of object code is made by offering access to copy
from a designated place, then offering equivalent access to copy the
source code from the same place satisfies the requirement to
distribute the source code, even though third parties are not
compelled to copy the source along with the object code.
5. A program that contains no derivative of any portion of the
Library, but is designed to work with the Library by being compiled or
linked with it, is called a "work that uses the Library". Such a
work, in isolation, is not a derivative work of the Library, and
therefore falls outside the scope of this License.
However, linking a "work that uses the Library" with the Library
creates an executable that is a derivative of the Library (because it
contains portions of the Library), rather than a "work that uses the
library". The executable is therefore covered by this License.
Section 6 states terms for distribution of such executables.
When a "work that uses the Library" uses material from a header file
that is part of the Library, the object code for the work may be a
derivative work of the Library even though the source code is not.
Whether this is true is especially significant if the work can be
linked without the Library, or if the work is itself a library. The
threshold for this to be true is not precisely defined by law.
If such an object file uses only numerical parameters, data
structure layouts and accessors, and small macros and small inline
functions (ten lines or less in length), then the use of the object
file is unrestricted, regardless of whether it is legally a derivative
work. (Executables containing this object code plus portions of the
Library will still fall under Section 6.)
Otherwise, if the work is a derivative of the Library, you may
distribute the object code for the work under the terms of Section 6.
Any executables containing that work also fall under Section 6,
whether or not they are linked directly with the Library itself.
6. As an exception to the Sections above, you may also combine or
link a "work that uses the Library" with the Library to produce a
work containing portions of the Library, and distribute that work
under terms of your choice, provided that the terms permit
modification of the work for the customer's own use and reverse
engineering for debugging such modifications.
You must give prominent notice with each copy of the work that the
Library is used in it and that the Library and its use are covered by
this License. You must supply a copy of this License. If the work
during execution displays copyright notices, you must include the
copyright notice for the Library among them, as well as a reference
directing the user to the copy of this License. Also, you must do one
of these things:
a) Accompany the work with the complete corresponding
machine-readable source code for the Library including whatever
changes were used in the work (which must be distributed under
Sections 1 and 2 above); and, if the work is an executable linked
with the Library, with the complete machine-readable "work that
uses the Library", as object code and/or source code, so that the
user can modify the Library and then relink to produce a modified
executable containing the modified Library. (It is understood
that the user who changes the contents of definitions files in the
Library will not necessarily be able to recompile the application
to use the modified definitions.)
b) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (1) uses at run time a
copy of the library already present on the user's computer system,
rather than copying library functions into the executable, and (2)
will operate properly with a modified version of the library, if
the user installs one, as long as the modified version is
interface-compatible with the version that the work was made with.
c) Accompany the work with a written offer, valid for at least
three years, to give the same user the materials specified in
Subsection 6a, above, for a charge no more than the cost of
performing this distribution.
d) If distribution of the work is made by offering access to copy
from a designated place, offer equivalent access to copy the above
specified materials from the same place.
e) Verify that the user has already received a copy of these
materials or that you have already sent this user a copy.
For an executable, the required form of the "work that uses the
Library" must include any data and utility programs needed for
reproducing the executable from it. However, as a special exception,
the materials to be distributed need not include anything that is
normally distributed (in either source or binary form) with the major
components (compiler, kernel, and so on) of the operating system on
which the executable runs, unless that component itself accompanies
the executable.
It may happen that this requirement contradicts the license
restrictions of other proprietary libraries that do not normally
accompany the operating system. Such a contradiction means you cannot
use both them and the Library together in an executable that you
distribute.
7. You may place library facilities that are a work based on the
Library side-by-side in a single library together with other library
facilities not covered by this License, and distribute such a combined
library, provided that the separate distribution of the work based on
the Library and of the other library facilities is otherwise
permitted, and provided that you do these two things:
a) Accompany the combined library with a copy of the same work
based on the Library, uncombined with any other library
facilities. This must be distributed under the terms of the
Sections above.
b) Give prominent notice with the combined library of the fact
that part of it is a work based on the Library, and explaining
where to find the accompanying uncombined form of the same work.
8. You may not copy, modify, sublicense, link with, or distribute
the Library except as expressly provided under this License. Any
attempt otherwise to copy, modify, sublicense, link with, or
distribute the Library is void, and will automatically terminate your
rights under this License. However, parties who have received copies,
or rights, from you under this License will not have their licenses
terminated so long as such parties remain in full compliance.
9. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Library or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Library (or any work based on the
Library), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Library or works based on it.
10. Each time you redistribute the Library (or any work based on the
Library), the recipient automatically receives a license from the
original licensor to copy, distribute, link with or modify the Library
subject to these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties with
this License.
11. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Library at all. For example, if a patent
license would not permit royalty-free redistribution of the Library by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Library.
If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply, and the section as a whole is intended to apply in other
circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
12. If the distribution and/or use of the Library is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Library under this License
may add an explicit geographical distribution limitation excluding those
countries, so that distribution is permitted only in or among
countries not thus excluded. In such case, this License incorporates
the limitation as if written in the body of this License.
13. The Free Software Foundation may publish revised and/or new
versions of the Lesser General Public License from time to time.
Such new versions will be similar in spirit to the present version,
but may differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Library
specifies a version number of this License which applies to it and
"any later version", you have the option of following the terms and
conditions either of that version or of any later version published by
the Free Software Foundation. If the Library does not specify a
license version number, you may choose any version ever published by
the Free Software Foundation.
14. If you wish to incorporate parts of the Library into other free
programs whose distribution conditions are incompatible with these,
write to the author to ask for permission. For software which is
copyrighted by the Free Software Foundation, write to the Free
Software Foundation; we sometimes make exceptions for this. Our
decision will be guided by the two goals of preserving the free status
of all derivatives of our free software and of promoting the sharing
and reuse of software generally.
NO WARRANTY
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Libraries
If you develop a new library, and you want it to be of the greatest
possible use to the public, we recommend making it free software that
everyone can redistribute and change. You can do so by permitting
redistribution under these terms (or, alternatively, under the terms
of the ordinary General Public License).
To apply these terms, attach the following notices to the library.
It is safest to attach them to the start of each source file to most
effectively convey the exclusion of warranty; and each file should
have at least the "copyright" line and a pointer to where the full
notice is found.
<one line to give the library's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Also add information on how to contact you by electronic and paper mail.
You should also get your employer (if you work as a programmer) or
your school, if any, to sign a "copyright disclaimer" for the library,
if necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the
library `Frob' (a library for tweaking knobs) written by James
Random Hacker.
<signature of Ty Coon>, 1 April 1990
Ty Coon, President of Vice
That's all there is to it!

View File

@@ -0,0 +1 @@
pip

View File

@@ -0,0 +1,102 @@
Metadata-Version: 2.1
Name: astroid
Version: 2.2.5
Summary: An abstract syntax tree for Python with inference support.
Home-page: https://github.com/PyCQA/astroid
Author: Python Code Quality Authority
Author-email: code-quality@python.org
License: LGPL
Platform: UNKNOWN
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Topic :: Software Development :: Quality Assurance
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Requires-Python: >=3.4.*
Requires-Dist: lazy-object-proxy
Requires-Dist: six
Requires-Dist: wrapt
Requires-Dist: typed-ast (>=1.3.0) ; implementation_name == "cpython"
Requires-Dist: typing ; python_version < "3.5"
Astroid
=======
.. image:: https://travis-ci.org/PyCQA/astroid.svg?branch=master
:target: https://travis-ci.org/PyCQA/astroid
.. image:: https://ci.appveyor.com/api/projects/status/co3u42kunguhbh6l/branch/master?svg=true
:alt: AppVeyor Build Status
:target: https://ci.appveyor.com/project/PCManticore/astroid
.. image:: https://coveralls.io/repos/github/PyCQA/astroid/badge.svg?branch=master
:target: https://coveralls.io/github/PyCQA/astroid?branch=master
.. image:: https://readthedocs.org/projects/astroid/badge/?version=latest
:target: http://astroid.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
:target: https://github.com/ambv/black
What's this?
------------
The aim of this module is to provide a common base representation of
python source code. It is currently the library powering pylint's capabilities.
It provides a compatible representation which comes from the `_ast`
module. It rebuilds the tree generated by the builtin _ast module by
recursively walking down the AST and building an extended ast. The new
node classes have additional methods and attributes for different
usages. They include some support for static inference and local name
scopes. Furthermore, astroid can also build partial trees by inspecting living
objects.
Installation
------------
Extract the tarball, jump into the created directory and run::
pip install .
If you want to do an editable installation, you can run::
pip install -e .
If you have any questions, please mail the code-quality@python.org
mailing list for support. See
http://mail.python.org/mailman/listinfo/code-quality for subscription
information and archives.
Documentation
-------------
http://astroid.readthedocs.io/en/latest/
Python Versions
---------------
astroid 2.0 is currently available for Python 3 only. If you want Python 2
support, older versions of astroid will still supported until 2020.
Test
----
Tests are in the 'test' subdirectory. To launch the whole tests suite, you can use
either `tox` or `pytest`::
tox
pytest astroid

View File

@@ -0,0 +1,125 @@
astroid-2.2.5.dist-info/COPYING,sha256=qxX9UmvY3Rip5368E5ZWv00z6X_HI4zRG_YOK5uGZsY,17987
astroid-2.2.5.dist-info/COPYING.LESSER,sha256=qb3eVhbs3R6YC0TzYGAO6Hg7H5m4zIOivrFjoKOQ6GE,26527
astroid-2.2.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
astroid-2.2.5.dist-info/METADATA,sha256=_0Eu_wSg7s0nPtqYiIZZYG7Owax3vxAcNUDiBrCfYho,3213
astroid-2.2.5.dist-info/RECORD,,
astroid-2.2.5.dist-info/WHEEL,sha256=U88EhGIw8Sj2_phqajeu_EAi3RAo8-C6zV3REsWbWbs,92
astroid-2.2.5.dist-info/top_level.txt,sha256=HsdW4O2x7ZXRj6k-agi3RaQybGLobI3VSE-jt4vQUXM,8
astroid/__init__.py,sha256=tJJMsKzMv8hUgw3y0VQAAMx9BO-nrNUcNy_wI0XBFXo,5538
astroid/__pkginfo__.py,sha256=-QFbcr8CyWJD6Vd4hR2kiaSly4ZjvwHKv91DJ81fvnE,2125
astroid/__pycache__/__init__.cpython-36.pyc,,
astroid/__pycache__/__pkginfo__.cpython-36.pyc,,
astroid/__pycache__/_ast.cpython-36.pyc,,
astroid/__pycache__/arguments.cpython-36.pyc,,
astroid/__pycache__/as_string.cpython-36.pyc,,
astroid/__pycache__/bases.cpython-36.pyc,,
astroid/__pycache__/builder.cpython-36.pyc,,
astroid/__pycache__/context.cpython-36.pyc,,
astroid/__pycache__/decorators.cpython-36.pyc,,
astroid/__pycache__/exceptions.cpython-36.pyc,,
astroid/__pycache__/helpers.cpython-36.pyc,,
astroid/__pycache__/inference.cpython-36.pyc,,
astroid/__pycache__/manager.cpython-36.pyc,,
astroid/__pycache__/mixins.cpython-36.pyc,,
astroid/__pycache__/modutils.cpython-36.pyc,,
astroid/__pycache__/node_classes.cpython-36.pyc,,
astroid/__pycache__/nodes.cpython-36.pyc,,
astroid/__pycache__/objects.cpython-36.pyc,,
astroid/__pycache__/protocols.cpython-36.pyc,,
astroid/__pycache__/raw_building.cpython-36.pyc,,
astroid/__pycache__/rebuilder.cpython-36.pyc,,
astroid/__pycache__/scoped_nodes.cpython-36.pyc,,
astroid/__pycache__/test_utils.cpython-36.pyc,,
astroid/__pycache__/transforms.cpython-36.pyc,,
astroid/__pycache__/util.cpython-36.pyc,,
astroid/_ast.py,sha256=osewGNkfO4LbgzI-mJOWg8Seo9YgtxCo6p5VrqIa8vs,1186
astroid/arguments.py,sha256=cui-UmbEeywSk0eitSrOhi9F0Ci2clS4qYXTi8uXRs4,11783
astroid/as_string.py,sha256=WOnmoyFPbRV3M_Oe5ykkncPlaXY_eu9nQzzREHoRFRg,22231
astroid/bases.py,sha256=Da-eo4o0VAIidQn2aHINZXa20_SEbc59HlrgiaeS1qU,19030
astroid/brain/__pycache__/brain_argparse.cpython-36.pyc,,
astroid/brain/__pycache__/brain_attrs.cpython-36.pyc,,
astroid/brain/__pycache__/brain_builtin_inference.cpython-36.pyc,,
astroid/brain/__pycache__/brain_collections.cpython-36.pyc,,
astroid/brain/__pycache__/brain_curses.cpython-36.pyc,,
astroid/brain/__pycache__/brain_dateutil.cpython-36.pyc,,
astroid/brain/__pycache__/brain_fstrings.cpython-36.pyc,,
astroid/brain/__pycache__/brain_functools.cpython-36.pyc,,
astroid/brain/__pycache__/brain_gi.cpython-36.pyc,,
astroid/brain/__pycache__/brain_hashlib.cpython-36.pyc,,
astroid/brain/__pycache__/brain_http.cpython-36.pyc,,
astroid/brain/__pycache__/brain_io.cpython-36.pyc,,
astroid/brain/__pycache__/brain_mechanize.cpython-36.pyc,,
astroid/brain/__pycache__/brain_multiprocessing.cpython-36.pyc,,
astroid/brain/__pycache__/brain_namedtuple_enum.cpython-36.pyc,,
astroid/brain/__pycache__/brain_nose.cpython-36.pyc,,
astroid/brain/__pycache__/brain_numpy.cpython-36.pyc,,
astroid/brain/__pycache__/brain_pkg_resources.cpython-36.pyc,,
astroid/brain/__pycache__/brain_pytest.cpython-36.pyc,,
astroid/brain/__pycache__/brain_qt.cpython-36.pyc,,
astroid/brain/__pycache__/brain_random.cpython-36.pyc,,
astroid/brain/__pycache__/brain_re.cpython-36.pyc,,
astroid/brain/__pycache__/brain_six.cpython-36.pyc,,
astroid/brain/__pycache__/brain_ssl.cpython-36.pyc,,
astroid/brain/__pycache__/brain_subprocess.cpython-36.pyc,,
astroid/brain/__pycache__/brain_threading.cpython-36.pyc,,
astroid/brain/__pycache__/brain_typing.cpython-36.pyc,,
astroid/brain/__pycache__/brain_uuid.cpython-36.pyc,,
astroid/brain/brain_argparse.py,sha256=VEeMCr3OIjHmCy35uc-kX6nJ5_NUOAimpGJMr6CChoA,1024
astroid/brain/brain_attrs.py,sha256=c5l1ijYZB-6HWvPX_5Kb9AlNpMJXDZZ1UK58qtqaUVo,1992
astroid/brain/brain_builtin_inference.py,sha256=ymD7so94WDz16Wtgn7JFuNpdjPMwty8wtD5vMAGPOy4,26926
astroid/brain/brain_collections.py,sha256=XBlyS-6J3rlEqv_44EyB6z6YbJq4KJCoN_pKTxdBhOA,2828
astroid/brain/brain_curses.py,sha256=tDnlCP1bEvleqCMz856yua9mM5um1p_JendFhT4rBFk,3303
astroid/brain/brain_dateutil.py,sha256=q2dyV2907Bw4n7m2W4EEdok3Ndv8NzeIQxAZwXBiS14,795
astroid/brain/brain_fstrings.py,sha256=VKVMijgLE2pg2dtXM6GGFgONOxOg8qA9D5V6dYzWTbQ,2121
astroid/brain/brain_functools.py,sha256=hjSZleIDmFBbnFwrOFlZJJIDNF3FA5fR3zEYywS0rZY,5342
astroid/brain/brain_gi.py,sha256=-EpcKf9z3wT_7v0k0WXIZtgk3-213lkfUX9bxeKOM3Y,6810
astroid/brain/brain_hashlib.py,sha256=cp30hX5HhWqbWG3zqcNu8N3aHGeQK4DPi4ac8owBonU,2163
astroid/brain/brain_http.py,sha256=th-m5kE890y1M2cyr2AtaiYz4HT46Pk5Tordh_Piug4,3476
astroid/brain/brain_io.py,sha256=DJcTFMTexrsHaGg2-kHoXwonddu13ImT7NEjiF1xPiU,1470
astroid/brain/brain_mechanize.py,sha256=xTBc-u2DMmMPeci7DVFs4L2T98DwwLF_Ob5YZviLPp8,889
astroid/brain/brain_multiprocessing.py,sha256=zXqLXg6dVYTkik1qSjo1cPJMZAGrkobVslp7ArwEXYQ,3108
astroid/brain/brain_namedtuple_enum.py,sha256=ginA4riLe-gQSIrpWz0Z8mvWYdXEi9sI8J1hy4MvAxU,15473
astroid/brain/brain_nose.py,sha256=kECw2jHmX0IUPX4Gx3XVGrflKGnlgPB79QHt6WU2cwQ,2211
astroid/brain/brain_numpy.py,sha256=VSrOi7Jbdr_eAGOgFmh-8W2rUrUEsoz8OP6Czaep65A,20546
astroid/brain/brain_pkg_resources.py,sha256=S_5UED1Zg8ObEJumRdpYGnjxZzemh_G_NFj3p5NGPfc,2262
astroid/brain/brain_pytest.py,sha256=RXaNUVqy2R0et0Upn4GJkVgq5SG8Pl7zLlhqQg8Xx3Q,2384
astroid/brain/brain_qt.py,sha256=U3VOrHer0cLvpxJC55MLEYrKn1Z3RQRXsc6fQ7u5hvI,2437
astroid/brain/brain_random.py,sha256=2RZY-QEXMNWp7E6h0l0-ke-DtjKTOFlTdjiQZi3XdQc,2432
astroid/brain/brain_re.py,sha256=le7VJHUAf80HyE_aQCh7_8FyDVK6JwNWA--c9RaMVQ8,1128
astroid/brain/brain_six.py,sha256=6QHcKXoYf8yMMXWkx3g3lK0kqB5OFeYcXwjUTdgWTMw,6146
astroid/brain/brain_ssl.py,sha256=2quiZVA_BW8PWmkAsOuYanq9Hvb93LT7c9YVslw3r14,3634
astroid/brain/brain_subprocess.py,sha256=TLl7qrvP-3XUeE7g9sebmjPN4_m5KKnDbsTzFBzBUu4,4228
astroid/brain/brain_threading.py,sha256=Qv06IeuEwDlk8cibAlUxlPx2FWMyRpoSCMQTkCrL04Q,767
astroid/brain/brain_typing.py,sha256=iFw33beNCitCJjJNvccIY6SsFJcdKVDdl-56DxDioh0,2780
astroid/brain/brain_uuid.py,sha256=flWrk1Ve7oqYrO8GTZ3To8RBYteRfYwvash-s9KiU9o,564
astroid/builder.py,sha256=0wrC4-ausU_nEEkgI8LJTsrNFN_XCbOkqoG2DsKCsks,16023
astroid/context.py,sha256=VsyUDVB1J9fk1o8MQoE4ygfC7gdNjVYVUD4Bhgs9JM0,5164
astroid/decorators.py,sha256=m0v63YRiQKc66-g8ckkYeJ0d5cji8AhkUxFPbTfLVDc,4229
astroid/exceptions.py,sha256=_IJRdLfyNSPVjxYgEd11Uu9XpdqE7uBCVOEIxt3ua70,7047
astroid/helpers.py,sha256=3r5ip51zPeOTCKgG4fYWjWxAzjaJEFot3TYnZDnBJWs,9079
astroid/inference.py,sha256=zwJDRemh5NuvW0FaPGKkJQyNL0zW6LEefWnfidjlfeQ,31454
astroid/interpreter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
astroid/interpreter/__pycache__/__init__.cpython-36.pyc,,
astroid/interpreter/__pycache__/dunder_lookup.cpython-36.pyc,,
astroid/interpreter/__pycache__/objectmodel.cpython-36.pyc,,
astroid/interpreter/_import/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
astroid/interpreter/_import/__pycache__/__init__.cpython-36.pyc,,
astroid/interpreter/_import/__pycache__/spec.cpython-36.pyc,,
astroid/interpreter/_import/__pycache__/util.cpython-36.pyc,,
astroid/interpreter/_import/spec.py,sha256=oDdgvZZCuagrsjaiNOq3MbmEhabWyWUfX7xjJaTXEpE,10942
astroid/interpreter/_import/util.py,sha256=inubUz6F3_kaMFaeleKUW6E6wCMIPrhU882zvwEZ02I,255
astroid/interpreter/dunder_lookup.py,sha256=dP-AZU_aGPNt03b1ttrMglxzeU3NtgnG0MfpSLPH6sg,2155
astroid/interpreter/objectmodel.py,sha256=0jLiTbN8I8GZKF4Rf61YfD-xZCJOOigwKn2EC3BwYlg,23114
astroid/manager.py,sha256=y1LOtZYGhoeOSmyu5k46kPQfSiQmyJDwo3Bs54u3YTI,12573
astroid/mixins.py,sha256=F2rv2Ow7AU3YT_2jitVJik95ZWRVK6hpf8BrkkspzUY,5571
astroid/modutils.py,sha256=8zpeNSGwKLA_hlk36o0Q7m3bzCumVP5c-1DJ1dkllYU,23568
astroid/node_classes.py,sha256=DeAi6YMtUmv-9dbP1iJOro5z1Irw4lsGHn5uSMCL-T0,137726
astroid/nodes.py,sha256=FIa3fF-rJ5BlhlGIcJGrEoyO_r5rMmWeajdAxtGEYFk,2927
astroid/objects.py,sha256=q6ffgYLpyHENUY8BtiZAPHhnz91LJbQFkuaQnrNtf7g,9879
astroid/protocols.py,sha256=fGHk6aTmHiH62sbHqZilRpqBsjOi-BwBWTBuiy94FxE,26513
astroid/raw_building.py,sha256=stLA5VZqM25LtIwge3YkOKaZyjj85kxSWcp8Pqz-zn0,16111
astroid/rebuilder.py,sha256=lUpUlCMJvQrbN-b4QELzgPfkJ4LM99V2Fw0ZK6SwlFY,40506
astroid/scoped_nodes.py,sha256=2O7kodUoSRp052BGHu7eWPsTPvz-XGB8f9AKSk1KOmk,92399
astroid/test_utils.py,sha256=NmVu0GTYA3Fz3BG9sF0KQt2nrd6vMSBoKpGkkIjKz28,2309
astroid/transforms.py,sha256=1npwJWcQUSIjcpcWd1pc-dJhtHOyiboQHsETAIQd5co,3377
astroid/util.py,sha256=jg5LnqbWSZTZP1KgpxGBuC6Lfwhn9Jb2T2TohXghmC0,4785

View File

@@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.33.1)
Root-Is-Purelib: true
Tag: py3-none-any

View File

@@ -0,0 +1 @@
astroid

View File

@@ -0,0 +1,166 @@
# Copyright (c) 2006-2013, 2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Python Abstract Syntax Tree New Generation
The aim of this module is to provide a common base representation of
python source code for projects such as pychecker, pyreverse,
pylint... Well, actually the development of this library is essentially
governed by pylint's needs.
It extends class defined in the python's _ast module with some
additional methods and attributes. Instance attributes are added by a
builder object, which can either generate extended ast (let's call
them astroid ;) by visiting an existent ast tree or by inspecting living
object. Methods are added by monkey patching ast classes.
Main modules are:
* nodes and scoped_nodes for more information about methods and
attributes added to different node classes
* the manager contains a high level object to get astroid trees from
source files and living objects. It maintains a cache of previously
constructed tree for quick access
* builder contains the class responsible to build astroid trees
"""
import enum
import itertools
import os
import sys
import wrapt
_Context = enum.Enum("Context", "Load Store Del")
Load = _Context.Load
Store = _Context.Store
Del = _Context.Del
del _Context
from .__pkginfo__ import version as __version__
# WARNING: internal imports order matters !
# pylint: disable=redefined-builtin
# make all exception classes accessible from astroid package
from astroid.exceptions import *
# make all node classes accessible from astroid package
from astroid.nodes import *
# trigger extra monkey-patching
from astroid import inference
# more stuff available
from astroid import raw_building
from astroid.bases import BaseInstance, Instance, BoundMethod, UnboundMethod
from astroid.node_classes import are_exclusive, unpack_infer
from astroid.scoped_nodes import builtin_lookup
from astroid.builder import parse, extract_node
from astroid.util import Uninferable
# make a manager instance (borg) accessible from astroid package
from astroid.manager import AstroidManager
MANAGER = AstroidManager()
del AstroidManager
# transform utilities (filters and decorator)
# pylint: disable=dangerous-default-value
@wrapt.decorator
def _inference_tip_cached(func, instance, args, kwargs, _cache={}):
"""Cache decorator used for inference tips"""
node = args[0]
try:
return iter(_cache[func, node])
except KeyError:
result = func(*args, **kwargs)
# Need to keep an iterator around
original, copy = itertools.tee(result)
_cache[func, node] = list(copy)
return original
# pylint: enable=dangerous-default-value
def inference_tip(infer_function, raise_on_overwrite=False):
"""Given an instance specific inference function, return a function to be
given to MANAGER.register_transform to set this inference function.
:param bool raise_on_overwrite: Raise an `InferenceOverwriteError`
if the inference tip will overwrite another. Used for debugging
Typical usage
.. sourcecode:: python
MANAGER.register_transform(Call, inference_tip(infer_named_tuple),
predicate)
.. Note::
Using an inference tip will override
any previously set inference tip for the given
node. Use a predicate in the transform to prevent
excess overwrites.
"""
def transform(node, infer_function=infer_function):
if (
raise_on_overwrite
and node._explicit_inference is not None
and node._explicit_inference is not infer_function
):
raise InferenceOverwriteError(
"Inference already set to {existing_inference}. "
"Trying to overwrite with {new_inference} for {node}".format(
existing_inference=infer_function,
new_inference=node._explicit_inference,
node=node,
)
)
# pylint: disable=no-value-for-parameter
node._explicit_inference = _inference_tip_cached(infer_function)
return node
return transform
def register_module_extender(manager, module_name, get_extension_mod):
def transform(node):
extension_module = get_extension_mod()
for name, objs in extension_module.locals.items():
node.locals[name] = objs
for obj in objs:
if obj.parent is extension_module:
obj.parent = node
manager.register_transform(Module, transform, lambda n: n.name == module_name)
# load brain plugins
BRAIN_MODULES_DIR = os.path.join(os.path.dirname(__file__), "brain")
if BRAIN_MODULES_DIR not in sys.path:
# add it to the end of the list so user path take precedence
sys.path.append(BRAIN_MODULES_DIR)
# load modules in this directory
for module in os.listdir(BRAIN_MODULES_DIR):
if module.endswith(".py"):
__import__(module[:-3])

View File

@@ -0,0 +1,57 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2015-2017 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
# Copyright (c) 2015 Radosław Ganczarek <radoslaw@ganczarek.in>
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
# Copyright (c) 2017 Calen Pennington <cale@edx.org>
# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""astroid packaging information"""
distname = "astroid"
modname = "astroid"
version = "2.2.5"
numversion = tuple(int(elem) for elem in version.split(".") if elem.isdigit())
extras_require = {}
install_requires = [
"lazy_object_proxy",
"six",
"wrapt",
'typing;python_version<"3.5"',
'typed-ast>=1.3.0;implementation_name== "cpython"',
]
# pylint: disable=redefined-builtin; why license is a builtin anyway?
license = "LGPL"
author = "Python Code Quality Authority"
author_email = "code-quality@python.org"
mailinglist = "mailto://%s" % author_email
web = "https://github.com/PyCQA/astroid"
description = "An abstract syntax tree for Python with inference support."
classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
]

View File

@@ -0,0 +1,40 @@
import ast
from collections import namedtuple
from functools import partial
from typing import Optional
import sys
_ast_py2 = _ast_py3 = None
try:
import typed_ast.ast3 as _ast_py3
import typed_ast.ast27 as _ast_py2
except ImportError:
pass
FunctionType = namedtuple("FunctionType", ["argtypes", "returns"])
def _get_parser_module(parse_python_two: bool = False):
if parse_python_two:
parser_module = _ast_py2
else:
parser_module = _ast_py3
return parser_module or ast
def _parse(string: str, parse_python_two: bool = False):
parse_module = _get_parser_module(parse_python_two=parse_python_two)
parse_func = parse_module.parse
if _ast_py3 and not parse_python_two:
parse_func = partial(parse_func, feature_version=sys.version_info.minor)
return parse_func(string)
def parse_function_type_comment(type_comment: str) -> Optional[FunctionType]:
"""Given a correct type comment, obtain a FunctionType object"""
if _ast_py3 is None:
return None
func_type = _ast_py3.parse(type_comment, "<type_comment>", "func_type")
return FunctionType(argtypes=func_type.argtypes, returns=func_type.returns)

View File

@@ -0,0 +1,285 @@
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
from astroid import bases
from astroid import context as contextmod
from astroid import exceptions
from astroid import nodes
from astroid import util
class CallSite:
"""Class for understanding arguments passed into a call site
It needs a call context, which contains the arguments and the
keyword arguments that were passed into a given call site.
In order to infer what an argument represents, call
:meth:`infer_argument` with the corresponding function node
and the argument name.
"""
def __init__(self, callcontext, argument_context_map=None):
if argument_context_map is None:
argument_context_map = {}
self.argument_context_map = argument_context_map
args = callcontext.args
keywords = callcontext.keywords
self.duplicated_keywords = set()
self._unpacked_args = self._unpack_args(args)
self._unpacked_kwargs = self._unpack_keywords(keywords)
self.positional_arguments = [
arg for arg in self._unpacked_args if arg is not util.Uninferable
]
self.keyword_arguments = {
key: value
for key, value in self._unpacked_kwargs.items()
if value is not util.Uninferable
}
@classmethod
def from_call(cls, call_node):
"""Get a CallSite object from the given Call node."""
callcontext = contextmod.CallContext(call_node.args, call_node.keywords)
return cls(callcontext)
def has_invalid_arguments(self):
"""Check if in the current CallSite were passed *invalid* arguments
This can mean multiple things. For instance, if an unpacking
of an invalid object was passed, then this method will return True.
Other cases can be when the arguments can't be inferred by astroid,
for example, by passing objects which aren't known statically.
"""
return len(self.positional_arguments) != len(self._unpacked_args)
def has_invalid_keywords(self):
"""Check if in the current CallSite were passed *invalid* keyword arguments
For instance, unpacking a dictionary with integer keys is invalid
(**{1:2}), because the keys must be strings, which will make this
method to return True. Other cases where this might return True if
objects which can't be inferred were passed.
"""
return len(self.keyword_arguments) != len(self._unpacked_kwargs)
def _unpack_keywords(self, keywords):
values = {}
context = contextmod.InferenceContext()
context.extra_context = self.argument_context_map
for name, value in keywords:
if name is None:
# Then it's an unpacking operation (**)
try:
inferred = next(value.infer(context=context))
except exceptions.InferenceError:
values[name] = util.Uninferable
continue
if not isinstance(inferred, nodes.Dict):
# Not something we can work with.
values[name] = util.Uninferable
continue
for dict_key, dict_value in inferred.items:
try:
dict_key = next(dict_key.infer(context=context))
except exceptions.InferenceError:
values[name] = util.Uninferable
continue
if not isinstance(dict_key, nodes.Const):
values[name] = util.Uninferable
continue
if not isinstance(dict_key.value, str):
values[name] = util.Uninferable
continue
if dict_key.value in values:
# The name is already in the dictionary
values[dict_key.value] = util.Uninferable
self.duplicated_keywords.add(dict_key.value)
continue
values[dict_key.value] = dict_value
else:
values[name] = value
return values
def _unpack_args(self, args):
values = []
context = contextmod.InferenceContext()
context.extra_context = self.argument_context_map
for arg in args:
if isinstance(arg, nodes.Starred):
try:
inferred = next(arg.value.infer(context=context))
except exceptions.InferenceError:
values.append(util.Uninferable)
continue
if inferred is util.Uninferable:
values.append(util.Uninferable)
continue
if not hasattr(inferred, "elts"):
values.append(util.Uninferable)
continue
values.extend(inferred.elts)
else:
values.append(arg)
return values
def infer_argument(self, funcnode, name, context):
"""infer a function argument value according to the call context
Arguments:
funcnode: The function being called.
name: The name of the argument whose value is being inferred.
context: Inference context object
"""
if name in self.duplicated_keywords:
raise exceptions.InferenceError(
"The arguments passed to {func!r} " " have duplicate keywords.",
call_site=self,
func=funcnode,
arg=name,
context=context,
)
# Look into the keywords first, maybe it's already there.
try:
return self.keyword_arguments[name].infer(context)
except KeyError:
pass
# Too many arguments given and no variable arguments.
if len(self.positional_arguments) > len(funcnode.args.args):
if not funcnode.args.vararg:
raise exceptions.InferenceError(
"Too many positional arguments "
"passed to {func!r} that does "
"not have *args.",
call_site=self,
func=funcnode,
arg=name,
context=context,
)
positional = self.positional_arguments[: len(funcnode.args.args)]
vararg = self.positional_arguments[len(funcnode.args.args) :]
argindex = funcnode.args.find_argname(name)[0]
kwonlyargs = {arg.name for arg in funcnode.args.kwonlyargs}
kwargs = {
key: value
for key, value in self.keyword_arguments.items()
if key not in kwonlyargs
}
# If there are too few positionals compared to
# what the function expects to receive, check to see
# if the missing positional arguments were passed
# as keyword arguments and if so, place them into the
# positional args list.
if len(positional) < len(funcnode.args.args):
for func_arg in funcnode.args.args:
if func_arg.name in kwargs:
arg = kwargs.pop(func_arg.name)
positional.append(arg)
if argindex is not None:
# 2. first argument of instance/class method
if argindex == 0 and funcnode.type in ("method", "classmethod"):
if context.boundnode is not None:
boundnode = context.boundnode
else:
# XXX can do better ?
boundnode = funcnode.parent.frame()
if isinstance(boundnode, nodes.ClassDef):
# Verify that we're accessing a method
# of the metaclass through a class, as in
# `cls.metaclass_method`. In this case, the
# first argument is always the class.
method_scope = funcnode.parent.scope()
if method_scope is boundnode.metaclass():
return iter((boundnode,))
if funcnode.type == "method":
if not isinstance(boundnode, bases.Instance):
boundnode = bases.Instance(boundnode)
return iter((boundnode,))
if funcnode.type == "classmethod":
return iter((boundnode,))
# if we have a method, extract one position
# from the index, so we'll take in account
# the extra parameter represented by `self` or `cls`
if funcnode.type in ("method", "classmethod"):
argindex -= 1
# 2. search arg index
try:
return self.positional_arguments[argindex].infer(context)
except IndexError:
pass
if funcnode.args.kwarg == name:
# It wants all the keywords that were passed into
# the call site.
if self.has_invalid_keywords():
raise exceptions.InferenceError(
"Inference failed to find values for all keyword arguments "
"to {func!r}: {unpacked_kwargs!r} doesn't correspond to "
"{keyword_arguments!r}.",
keyword_arguments=self.keyword_arguments,
unpacked_kwargs=self._unpacked_kwargs,
call_site=self,
func=funcnode,
arg=name,
context=context,
)
kwarg = nodes.Dict(
lineno=funcnode.args.lineno,
col_offset=funcnode.args.col_offset,
parent=funcnode.args,
)
kwarg.postinit(
[(nodes.const_factory(key), value) for key, value in kwargs.items()]
)
return iter((kwarg,))
if funcnode.args.vararg == name:
# It wants all the args that were passed into
# the call site.
if self.has_invalid_arguments():
raise exceptions.InferenceError(
"Inference failed to find values for all positional "
"arguments to {func!r}: {unpacked_args!r} doesn't "
"correspond to {positional_arguments!r}.",
positional_arguments=self.positional_arguments,
unpacked_args=self._unpacked_args,
call_site=self,
func=funcnode,
arg=name,
context=context,
)
args = nodes.Tuple(
lineno=funcnode.args.lineno,
col_offset=funcnode.args.col_offset,
parent=funcnode.args,
)
args.postinit(vararg)
return iter((args,))
# Check if it's a default parameter.
try:
return funcnode.args.default_value(name).infer(context)
except exceptions.NoDefault:
pass
raise exceptions.InferenceError(
"No value found for argument {name} to " "{func!r}",
call_site=self,
func=funcnode,
arg=name,
context=context,
)

View File

@@ -0,0 +1,630 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
# Copyright (c) 2013-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2013-2014 Google, Inc.
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2016 Jared Garst <jgarst@users.noreply.github.com>
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
# Copyright (c) 2017 rr- <rr-@sakuya.pl>
# Copyright (c) 2018 brendanator <brendan.maginnis@gmail.com>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""This module renders Astroid nodes as string:
* :func:`to_code` function return equivalent (hopefully valid) python string
* :func:`dump` function return an internal representation of nodes found
in the tree, useful for debugging or understanding the tree structure
"""
import sys
# pylint: disable=unused-argument
DOC_NEWLINE = "\0"
class AsStringVisitor:
"""Visitor to render an Astroid node as a valid python code string"""
def __init__(self, indent):
self.indent = indent
def __call__(self, node):
"""Makes this visitor behave as a simple function"""
return node.accept(self).replace(DOC_NEWLINE, "\n")
def _docs_dedent(self, doc):
"""Stop newlines in docs being indented by self._stmt_list"""
return '\n%s"""%s"""' % (self.indent, doc.replace("\n", DOC_NEWLINE))
def _stmt_list(self, stmts, indent=True):
"""return a list of nodes to string"""
stmts = "\n".join(nstr for nstr in [n.accept(self) for n in stmts] if nstr)
if indent:
return self.indent + stmts.replace("\n", "\n" + self.indent)
return stmts
def _precedence_parens(self, node, child, is_left=True):
"""Wrap child in parens only if required to keep same semantics"""
if self._should_wrap(node, child, is_left):
return "(%s)" % child.accept(self)
return child.accept(self)
def _should_wrap(self, node, child, is_left):
"""Wrap child if:
- it has lower precedence
- same precedence with position opposite to associativity direction
"""
node_precedence = node.op_precedence()
child_precedence = child.op_precedence()
if node_precedence > child_precedence:
# 3 * (4 + 5)
return True
if (
node_precedence == child_precedence
and is_left != node.op_left_associative()
):
# 3 - (4 - 5)
# (2**3)**4
return True
return False
## visit_<node> methods ###########################################
def visit_arguments(self, node):
"""return an astroid.Function node as string"""
return node.format_args()
def visit_assignattr(self, node):
"""return an astroid.AssAttr node as string"""
return self.visit_attribute(node)
def visit_assert(self, node):
"""return an astroid.Assert node as string"""
if node.fail:
return "assert %s, %s" % (node.test.accept(self), node.fail.accept(self))
return "assert %s" % node.test.accept(self)
def visit_assignname(self, node):
"""return an astroid.AssName node as string"""
return node.name
def visit_assign(self, node):
"""return an astroid.Assign node as string"""
lhs = " = ".join(n.accept(self) for n in node.targets)
return "%s = %s" % (lhs, node.value.accept(self))
def visit_augassign(self, node):
"""return an astroid.AugAssign node as string"""
return "%s %s %s" % (node.target.accept(self), node.op, node.value.accept(self))
def visit_annassign(self, node):
"""Return an astroid.AugAssign node as string"""
target = node.target.accept(self)
annotation = node.annotation.accept(self)
if node.value is None:
return "%s: %s" % (target, annotation)
return "%s: %s = %s" % (target, annotation, node.value.accept(self))
def visit_repr(self, node):
"""return an astroid.Repr node as string"""
return "`%s`" % node.value.accept(self)
def visit_binop(self, node):
"""return an astroid.BinOp node as string"""
left = self._precedence_parens(node, node.left)
right = self._precedence_parens(node, node.right, is_left=False)
if node.op == "**":
return "%s%s%s" % (left, node.op, right)
return "%s %s %s" % (left, node.op, right)
def visit_boolop(self, node):
"""return an astroid.BoolOp node as string"""
values = ["%s" % self._precedence_parens(node, n) for n in node.values]
return (" %s " % node.op).join(values)
def visit_break(self, node):
"""return an astroid.Break node as string"""
return "break"
def visit_call(self, node):
"""return an astroid.Call node as string"""
expr_str = self._precedence_parens(node, node.func)
args = [arg.accept(self) for arg in node.args]
if node.keywords:
keywords = [kwarg.accept(self) for kwarg in node.keywords]
else:
keywords = []
args.extend(keywords)
return "%s(%s)" % (expr_str, ", ".join(args))
def visit_classdef(self, node):
"""return an astroid.ClassDef node as string"""
decorate = node.decorators.accept(self) if node.decorators else ""
bases = ", ".join(n.accept(self) for n in node.bases)
metaclass = node.metaclass()
if metaclass and not node.has_metaclass_hack():
if bases:
bases = "(%s, metaclass=%s)" % (bases, metaclass.name)
else:
bases = "(metaclass=%s)" % metaclass.name
else:
bases = "(%s)" % bases if bases else ""
docs = self._docs_dedent(node.doc) if node.doc else ""
return "\n\n%sclass %s%s:%s\n%s\n" % (
decorate,
node.name,
bases,
docs,
self._stmt_list(node.body),
)
def visit_compare(self, node):
"""return an astroid.Compare node as string"""
rhs_str = " ".join(
[
"%s %s" % (op, self._precedence_parens(node, expr, is_left=False))
for op, expr in node.ops
]
)
return "%s %s" % (self._precedence_parens(node, node.left), rhs_str)
def visit_comprehension(self, node):
"""return an astroid.Comprehension node as string"""
ifs = "".join(" if %s" % n.accept(self) for n in node.ifs)
return "for %s in %s%s" % (
node.target.accept(self),
node.iter.accept(self),
ifs,
)
def visit_const(self, node):
"""return an astroid.Const node as string"""
if node.value is Ellipsis:
return "..."
return repr(node.value)
def visit_continue(self, node):
"""return an astroid.Continue node as string"""
return "continue"
def visit_delete(self, node): # XXX check if correct
"""return an astroid.Delete node as string"""
return "del %s" % ", ".join(child.accept(self) for child in node.targets)
def visit_delattr(self, node):
"""return an astroid.DelAttr node as string"""
return self.visit_attribute(node)
def visit_delname(self, node):
"""return an astroid.DelName node as string"""
return node.name
def visit_decorators(self, node):
"""return an astroid.Decorators node as string"""
return "@%s\n" % "\n@".join(item.accept(self) for item in node.nodes)
def visit_dict(self, node):
"""return an astroid.Dict node as string"""
return "{%s}" % ", ".join(self._visit_dict(node))
def _visit_dict(self, node):
for key, value in node.items:
key = key.accept(self)
value = value.accept(self)
if key == "**":
# It can only be a DictUnpack node.
yield key + value
else:
yield "%s: %s" % (key, value)
def visit_dictunpack(self, node):
return "**"
def visit_dictcomp(self, node):
"""return an astroid.DictComp node as string"""
return "{%s: %s %s}" % (
node.key.accept(self),
node.value.accept(self),
" ".join(n.accept(self) for n in node.generators),
)
def visit_expr(self, node):
"""return an astroid.Discard node as string"""
return node.value.accept(self)
def visit_emptynode(self, node):
"""dummy method for visiting an Empty node"""
return ""
def visit_excepthandler(self, node):
if node.type:
if node.name:
excs = "except %s, %s" % (
node.type.accept(self),
node.name.accept(self),
)
else:
excs = "except %s" % node.type.accept(self)
else:
excs = "except"
return "%s:\n%s" % (excs, self._stmt_list(node.body))
def visit_ellipsis(self, node):
"""return an astroid.Ellipsis node as string"""
return "..."
def visit_empty(self, node):
"""return an Empty node as string"""
return ""
def visit_exec(self, node):
"""return an astroid.Exec node as string"""
if node.locals:
return "exec %s in %s, %s" % (
node.expr.accept(self),
node.locals.accept(self),
node.globals.accept(self),
)
if node.globals:
return "exec %s in %s" % (node.expr.accept(self), node.globals.accept(self))
return "exec %s" % node.expr.accept(self)
def visit_extslice(self, node):
"""return an astroid.ExtSlice node as string"""
return ", ".join(dim.accept(self) for dim in node.dims)
def visit_for(self, node):
"""return an astroid.For node as string"""
fors = "for %s in %s:\n%s" % (
node.target.accept(self),
node.iter.accept(self),
self._stmt_list(node.body),
)
if node.orelse:
fors = "%s\nelse:\n%s" % (fors, self._stmt_list(node.orelse))
return fors
def visit_importfrom(self, node):
"""return an astroid.ImportFrom node as string"""
return "from %s import %s" % (
"." * (node.level or 0) + node.modname,
_import_string(node.names),
)
def visit_functiondef(self, node):
"""return an astroid.Function node as string"""
decorate = node.decorators.accept(self) if node.decorators else ""
docs = self._docs_dedent(node.doc) if node.doc else ""
trailer = ":"
if node.returns:
return_annotation = "->" + node.returns.as_string()
trailer = return_annotation + ":"
def_format = "\n%sdef %s(%s)%s%s\n%s"
return def_format % (
decorate,
node.name,
node.args.accept(self),
trailer,
docs,
self._stmt_list(node.body),
)
def visit_generatorexp(self, node):
"""return an astroid.GeneratorExp node as string"""
return "(%s %s)" % (
node.elt.accept(self),
" ".join(n.accept(self) for n in node.generators),
)
def visit_attribute(self, node):
"""return an astroid.Getattr node as string"""
return "%s.%s" % (self._precedence_parens(node, node.expr), node.attrname)
def visit_global(self, node):
"""return an astroid.Global node as string"""
return "global %s" % ", ".join(node.names)
def visit_if(self, node):
"""return an astroid.If node as string"""
ifs = ["if %s:\n%s" % (node.test.accept(self), self._stmt_list(node.body))]
if node.has_elif_block():
ifs.append("el%s" % self._stmt_list(node.orelse, indent=False))
elif node.orelse:
ifs.append("else:\n%s" % self._stmt_list(node.orelse))
return "\n".join(ifs)
def visit_ifexp(self, node):
"""return an astroid.IfExp node as string"""
return "%s if %s else %s" % (
self._precedence_parens(node, node.body, is_left=True),
self._precedence_parens(node, node.test, is_left=True),
self._precedence_parens(node, node.orelse, is_left=False),
)
def visit_import(self, node):
"""return an astroid.Import node as string"""
return "import %s" % _import_string(node.names)
def visit_keyword(self, node):
"""return an astroid.Keyword node as string"""
if node.arg is None:
return "**%s" % node.value.accept(self)
return "%s=%s" % (node.arg, node.value.accept(self))
def visit_lambda(self, node):
"""return an astroid.Lambda node as string"""
args = node.args.accept(self)
body = node.body.accept(self)
if args:
return "lambda %s: %s" % (args, body)
return "lambda: %s" % body
def visit_list(self, node):
"""return an astroid.List node as string"""
return "[%s]" % ", ".join(child.accept(self) for child in node.elts)
def visit_listcomp(self, node):
"""return an astroid.ListComp node as string"""
return "[%s %s]" % (
node.elt.accept(self),
" ".join(n.accept(self) for n in node.generators),
)
def visit_module(self, node):
"""return an astroid.Module node as string"""
docs = '"""%s"""\n\n' % node.doc if node.doc else ""
return docs + "\n".join(n.accept(self) for n in node.body) + "\n\n"
def visit_name(self, node):
"""return an astroid.Name node as string"""
return node.name
def visit_pass(self, node):
"""return an astroid.Pass node as string"""
return "pass"
def visit_print(self, node):
"""return an astroid.Print node as string"""
nodes = ", ".join(n.accept(self) for n in node.values)
if not node.nl:
nodes = "%s," % nodes
if node.dest:
return "print >> %s, %s" % (node.dest.accept(self), nodes)
return "print %s" % nodes
def visit_raise(self, node):
"""return an astroid.Raise node as string"""
if node.exc:
if node.inst:
if node.tback:
return "raise %s, %s, %s" % (
node.exc.accept(self),
node.inst.accept(self),
node.tback.accept(self),
)
return "raise %s, %s" % (node.exc.accept(self), node.inst.accept(self))
return "raise %s" % node.exc.accept(self)
return "raise"
def visit_return(self, node):
"""return an astroid.Return node as string"""
if node.is_tuple_return() and len(node.value.elts) > 1:
elts = [child.accept(self) for child in node.value.elts]
return "return %s" % ", ".join(elts)
if node.value:
return "return %s" % node.value.accept(self)
return "return"
def visit_index(self, node):
"""return an astroid.Index node as string"""
return node.value.accept(self)
def visit_set(self, node):
"""return an astroid.Set node as string"""
return "{%s}" % ", ".join(child.accept(self) for child in node.elts)
def visit_setcomp(self, node):
"""return an astroid.SetComp node as string"""
return "{%s %s}" % (
node.elt.accept(self),
" ".join(n.accept(self) for n in node.generators),
)
def visit_slice(self, node):
"""return an astroid.Slice node as string"""
lower = node.lower.accept(self) if node.lower else ""
upper = node.upper.accept(self) if node.upper else ""
step = node.step.accept(self) if node.step else ""
if step:
return "%s:%s:%s" % (lower, upper, step)
return "%s:%s" % (lower, upper)
def visit_subscript(self, node):
"""return an astroid.Subscript node as string"""
idx = node.slice
if idx.__class__.__name__.lower() == "index":
idx = idx.value
idxstr = idx.accept(self)
if idx.__class__.__name__.lower() == "tuple" and idx.elts:
# Remove parenthesis in tuple and extended slice.
# a[(::1, 1:)] is not valid syntax.
idxstr = idxstr[1:-1]
return "%s[%s]" % (self._precedence_parens(node, node.value), idxstr)
def visit_tryexcept(self, node):
"""return an astroid.TryExcept node as string"""
trys = ["try:\n%s" % self._stmt_list(node.body)]
for handler in node.handlers:
trys.append(handler.accept(self))
if node.orelse:
trys.append("else:\n%s" % self._stmt_list(node.orelse))
return "\n".join(trys)
def visit_tryfinally(self, node):
"""return an astroid.TryFinally node as string"""
return "try:\n%s\nfinally:\n%s" % (
self._stmt_list(node.body),
self._stmt_list(node.finalbody),
)
def visit_tuple(self, node):
"""return an astroid.Tuple node as string"""
if len(node.elts) == 1:
return "(%s, )" % node.elts[0].accept(self)
return "(%s)" % ", ".join(child.accept(self) for child in node.elts)
def visit_unaryop(self, node):
"""return an astroid.UnaryOp node as string"""
if node.op == "not":
operator = "not "
else:
operator = node.op
return "%s%s" % (operator, self._precedence_parens(node, node.operand))
def visit_while(self, node):
"""return an astroid.While node as string"""
whiles = "while %s:\n%s" % (node.test.accept(self), self._stmt_list(node.body))
if node.orelse:
whiles = "%s\nelse:\n%s" % (whiles, self._stmt_list(node.orelse))
return whiles
def visit_with(self, node): # 'with' without 'as' is possible
"""return an astroid.With node as string"""
items = ", ".join(
("%s" % expr.accept(self)) + (vars and " as %s" % (vars.accept(self)) or "")
for expr, vars in node.items
)
return "with %s:\n%s" % (items, self._stmt_list(node.body))
def visit_yield(self, node):
"""yield an ast.Yield node as string"""
yi_val = (" " + node.value.accept(self)) if node.value else ""
expr = "yield" + yi_val
if node.parent.is_statement:
return expr
return "(%s)" % (expr,)
def visit_starred(self, node):
"""return Starred node as string"""
return "*" + node.value.accept(self)
# These aren't for real AST nodes, but for inference objects.
def visit_frozenset(self, node):
return node.parent.accept(self)
def visit_super(self, node):
return node.parent.accept(self)
def visit_uninferable(self, node):
return str(node)
class AsStringVisitor3(AsStringVisitor):
"""AsStringVisitor3 overwrites some AsStringVisitor methods"""
def visit_excepthandler(self, node):
if node.type:
if node.name:
excs = "except %s as %s" % (
node.type.accept(self),
node.name.accept(self),
)
else:
excs = "except %s" % node.type.accept(self)
else:
excs = "except"
return "%s:\n%s" % (excs, self._stmt_list(node.body))
def visit_nonlocal(self, node):
"""return an astroid.Nonlocal node as string"""
return "nonlocal %s" % ", ".join(node.names)
def visit_raise(self, node):
"""return an astroid.Raise node as string"""
if node.exc:
if node.cause:
return "raise %s from %s" % (
node.exc.accept(self),
node.cause.accept(self),
)
return "raise %s" % node.exc.accept(self)
return "raise"
def visit_yieldfrom(self, node):
""" Return an astroid.YieldFrom node as string. """
yi_val = (" " + node.value.accept(self)) if node.value else ""
expr = "yield from" + yi_val
if node.parent.is_statement:
return expr
return "(%s)" % (expr,)
def visit_asyncfunctiondef(self, node):
function = super(AsStringVisitor3, self).visit_functiondef(node)
return "async " + function.strip()
def visit_await(self, node):
return "await %s" % node.value.accept(self)
def visit_asyncwith(self, node):
return "async %s" % self.visit_with(node)
def visit_asyncfor(self, node):
return "async %s" % self.visit_for(node)
def visit_joinedstr(self, node):
# Special treatment for constants,
# as we want to join literals not reprs
string = "".join(
value.value if type(value).__name__ == "Const" else value.accept(self)
for value in node.values
)
return "f'%s'" % string
def visit_formattedvalue(self, node):
return "{%s}" % node.value.accept(self)
def visit_comprehension(self, node):
"""return an astroid.Comprehension node as string"""
return "%s%s" % (
"async " if node.is_async else "",
super(AsStringVisitor3, self).visit_comprehension(node),
)
def _import_string(names):
"""return a list of (name, asname) formatted as a string"""
_names = []
for name, asname in names:
if asname is not None:
_names.append("%s as %s" % (name, asname))
else:
_names.append(name)
return ", ".join(_names)
if sys.version_info >= (3, 0):
AsStringVisitor = AsStringVisitor3
# This sets the default indent to 4 spaces.
to_code = AsStringVisitor(" ")

View File

@@ -0,0 +1,547 @@
# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
# Copyright (c) 2016-2017 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2017 Calen Pennington <calen.pennington@gmail.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
# Copyright (c) 2018 Daniel Colascione <dancol@dancol.org>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""This module contains base classes and functions for the nodes and some
inference utils.
"""
import builtins
import collections
import sys
from astroid import context as contextmod
from astroid import exceptions
from astroid import util
objectmodel = util.lazy_import("interpreter.objectmodel")
helpers = util.lazy_import("helpers")
BUILTINS = builtins.__name__
manager = util.lazy_import("manager")
MANAGER = manager.AstroidManager()
if sys.version_info >= (3, 0):
# TODO: check if needs special treatment
BUILTINS = "builtins"
BOOL_SPECIAL_METHOD = "__bool__"
else:
BUILTINS = "__builtin__"
BOOL_SPECIAL_METHOD = "__nonzero__"
PROPERTIES = {BUILTINS + ".property", "abc.abstractproperty"}
# List of possible property names. We use this list in order
# to see if a method is a property or not. This should be
# pretty reliable and fast, the alternative being to check each
# decorator to see if its a real property-like descriptor, which
# can be too complicated.
# Also, these aren't qualified, because each project can
# define them, we shouldn't expect to know every possible
# property-like decorator!
POSSIBLE_PROPERTIES = {
"cached_property",
"cachedproperty",
"lazyproperty",
"lazy_property",
"reify",
"lazyattribute",
"lazy_attribute",
"LazyProperty",
"lazy",
"cache_readonly",
}
def _is_property(meth):
if PROPERTIES.intersection(meth.decoratornames()):
return True
stripped = {
name.split(".")[-1]
for name in meth.decoratornames()
if name is not util.Uninferable
}
if any(name in stripped for name in POSSIBLE_PROPERTIES):
return True
# Lookup for subclasses of *property*
if not meth.decorators:
return False
for decorator in meth.decorators.nodes or ():
inferred = helpers.safe_infer(decorator)
if inferred is None or inferred is util.Uninferable:
continue
if inferred.__class__.__name__ == "ClassDef":
for base_class in inferred.bases:
if base_class.__class__.__name__ != "Name":
continue
module, _ = base_class.lookup(base_class.name)
if module.name == BUILTINS and base_class.name == "property":
return True
return False
class Proxy:
"""a simple proxy object
Note:
Subclasses of this object will need a custom __getattr__
if new instance attributes are created. See the Const class
"""
_proxied = None # proxied object may be set by class or by instance
def __init__(self, proxied=None):
if proxied is not None:
self._proxied = proxied
def __getattr__(self, name):
if name == "_proxied":
return getattr(self.__class__, "_proxied")
if name in self.__dict__:
return self.__dict__[name]
return getattr(self._proxied, name)
def infer(self, context=None):
yield self
def _infer_stmts(stmts, context, frame=None):
"""Return an iterator on statements inferred by each statement in *stmts*."""
inferred = False
if context is not None:
name = context.lookupname
context = context.clone()
else:
name = None
context = contextmod.InferenceContext()
for stmt in stmts:
if stmt is util.Uninferable:
yield stmt
inferred = True
continue
context.lookupname = stmt._infer_name(frame, name)
try:
for inferred in stmt.infer(context=context):
yield inferred
inferred = True
except exceptions.NameInferenceError:
continue
except exceptions.InferenceError:
yield util.Uninferable
inferred = True
if not inferred:
raise exceptions.InferenceError(
"Inference failed for all members of {stmts!r}.",
stmts=stmts,
frame=frame,
context=context,
)
def _infer_method_result_truth(instance, method_name, context):
# Get the method from the instance and try to infer
# its return's truth value.
meth = next(instance.igetattr(method_name, context=context), None)
if meth and hasattr(meth, "infer_call_result"):
if not meth.callable():
return util.Uninferable
try:
for value in meth.infer_call_result(instance, context=context):
if value is util.Uninferable:
return value
inferred = next(value.infer(context=context))
return inferred.bool_value()
except exceptions.InferenceError:
pass
return util.Uninferable
class BaseInstance(Proxy):
"""An instance base class, which provides lookup methods for potential instances."""
special_attributes = None
def display_type(self):
return "Instance of"
def getattr(self, name, context=None, lookupclass=True):
try:
values = self._proxied.instance_attr(name, context)
except exceptions.AttributeInferenceError as exc:
if self.special_attributes and name in self.special_attributes:
return [self.special_attributes.lookup(name)]
if lookupclass:
# Class attributes not available through the instance
# unless they are explicitly defined.
return self._proxied.getattr(name, context, class_context=False)
raise exceptions.AttributeInferenceError(
target=self, attribute=name, context=context
) from exc
# since we've no context information, return matching class members as
# well
if lookupclass:
try:
return values + self._proxied.getattr(
name, context, class_context=False
)
except exceptions.AttributeInferenceError:
pass
return values
def igetattr(self, name, context=None):
"""inferred getattr"""
if not context:
context = contextmod.InferenceContext()
try:
# avoid recursively inferring the same attr on the same class
if context.push((self._proxied, name)):
return
# XXX frame should be self._proxied, or not ?
get_attr = self.getattr(name, context, lookupclass=False)
yield from _infer_stmts(
self._wrap_attr(get_attr, context), context, frame=self
)
except exceptions.AttributeInferenceError as error:
try:
# fallback to class.igetattr since it has some logic to handle
# descriptors
# But only if the _proxied is the Class.
if self._proxied.__class__.__name__ != "ClassDef":
raise exceptions.InferenceError(**vars(error)) from error
attrs = self._proxied.igetattr(name, context, class_context=False)
yield from self._wrap_attr(attrs, context)
except exceptions.AttributeInferenceError as error:
raise exceptions.InferenceError(**vars(error)) from error
def _wrap_attr(self, attrs, context=None):
"""wrap bound methods of attrs in a InstanceMethod proxies"""
for attr in attrs:
if isinstance(attr, UnboundMethod):
if _is_property(attr):
yield from attr.infer_call_result(self, context)
else:
yield BoundMethod(attr, self)
elif hasattr(attr, "name") and attr.name == "<lambda>":
# This is a lambda function defined at class level,
# since its scope is the underlying _proxied class.
# Unfortunately, we can't do an isinstance check here,
# because of the circular dependency between astroid.bases
# and astroid.scoped_nodes.
if attr.statement().scope() == self._proxied:
if attr.args.args and attr.args.args[0].name == "self":
yield BoundMethod(attr, self)
continue
yield attr
else:
yield attr
def infer_call_result(self, caller, context=None):
"""infer what a class instance is returning when called"""
context = contextmod.bind_context_to_node(context, self)
inferred = False
for node in self._proxied.igetattr("__call__", context):
if node is util.Uninferable or not node.callable():
continue
for res in node.infer_call_result(caller, context):
inferred = True
yield res
if not inferred:
raise exceptions.InferenceError(node=self, caller=caller, context=context)
class Instance(BaseInstance):
"""A special node representing a class instance."""
# pylint: disable=unnecessary-lambda
special_attributes = util.lazy_descriptor(lambda: objectmodel.InstanceModel())
def __repr__(self):
return "<Instance of %s.%s at 0x%s>" % (
self._proxied.root().name,
self._proxied.name,
id(self),
)
def __str__(self):
return "Instance of %s.%s" % (self._proxied.root().name, self._proxied.name)
def callable(self):
try:
self._proxied.getattr("__call__", class_context=False)
return True
except exceptions.AttributeInferenceError:
return False
def pytype(self):
return self._proxied.qname()
def display_type(self):
return "Instance of"
def bool_value(self):
"""Infer the truth value for an Instance
The truth value of an instance is determined by these conditions:
* if it implements __bool__ on Python 3 or __nonzero__
on Python 2, then its bool value will be determined by
calling this special method and checking its result.
* when this method is not defined, __len__() is called, if it
is defined, and the object is considered true if its result is
nonzero. If a class defines neither __len__() nor __bool__(),
all its instances are considered true.
"""
context = contextmod.InferenceContext()
context.callcontext = contextmod.CallContext(args=[])
context.boundnode = self
try:
result = _infer_method_result_truth(self, BOOL_SPECIAL_METHOD, context)
except (exceptions.InferenceError, exceptions.AttributeInferenceError):
# Fallback to __len__.
try:
result = _infer_method_result_truth(self, "__len__", context)
except (exceptions.AttributeInferenceError, exceptions.InferenceError):
return True
return result
# This is set in inference.py.
def getitem(self, index, context=None):
pass
class UnboundMethod(Proxy):
"""a special node representing a method not bound to an instance"""
# pylint: disable=unnecessary-lambda
special_attributes = util.lazy_descriptor(lambda: objectmodel.UnboundMethodModel())
def __repr__(self):
frame = self._proxied.parent.frame()
return "<%s %s of %s at 0x%s" % (
self.__class__.__name__,
self._proxied.name,
frame.qname(),
id(self),
)
def implicit_parameters(self):
return 0
def is_bound(self):
return False
def getattr(self, name, context=None):
if name in self.special_attributes:
return [self.special_attributes.lookup(name)]
return self._proxied.getattr(name, context)
def igetattr(self, name, context=None):
if name in self.special_attributes:
return iter((self.special_attributes.lookup(name),))
return self._proxied.igetattr(name, context)
def infer_call_result(self, caller, context):
"""
The boundnode of the regular context with a function called
on ``object.__new__`` will be of type ``object``,
which is incorrect for the argument in general.
If no context is given the ``object.__new__`` call argument will
correctly inferred except when inside a call that requires
the additional context (such as a classmethod) of the boundnode
to determine which class the method was called from
"""
# If we're unbound method __new__ of builtin object, the result is an
# instance of the class given as first argument.
if (
self._proxied.name == "__new__"
and self._proxied.parent.frame().qname() == "%s.object" % BUILTINS
):
if caller.args:
node_context = context.extra_context.get(caller.args[0])
infer = caller.args[0].infer(context=node_context)
else:
infer = []
return (Instance(x) if x is not util.Uninferable else x for x in infer)
return self._proxied.infer_call_result(caller, context)
def bool_value(self):
return True
class BoundMethod(UnboundMethod):
"""a special node representing a method bound to an instance"""
# pylint: disable=unnecessary-lambda
special_attributes = util.lazy_descriptor(lambda: objectmodel.BoundMethodModel())
def __init__(self, proxy, bound):
UnboundMethod.__init__(self, proxy)
self.bound = bound
def implicit_parameters(self):
return 1
def is_bound(self):
return True
def _infer_type_new_call(self, caller, context):
"""Try to infer what type.__new__(mcs, name, bases, attrs) returns.
In order for such call to be valid, the metaclass needs to be
a subtype of ``type``, the name needs to be a string, the bases
needs to be a tuple of classes
"""
from astroid import node_classes
# Verify the metaclass
mcs = next(caller.args[0].infer(context=context))
if mcs.__class__.__name__ != "ClassDef":
# Not a valid first argument.
return None
if not mcs.is_subtype_of("%s.type" % BUILTINS):
# Not a valid metaclass.
return None
# Verify the name
name = next(caller.args[1].infer(context=context))
if name.__class__.__name__ != "Const":
# Not a valid name, needs to be a const.
return None
if not isinstance(name.value, str):
# Needs to be a string.
return None
# Verify the bases
bases = next(caller.args[2].infer(context=context))
if bases.__class__.__name__ != "Tuple":
# Needs to be a tuple.
return None
inferred_bases = [next(elt.infer(context=context)) for elt in bases.elts]
if any(base.__class__.__name__ != "ClassDef" for base in inferred_bases):
# All the bases needs to be Classes
return None
# Verify the attributes.
attrs = next(caller.args[3].infer(context=context))
if attrs.__class__.__name__ != "Dict":
# Needs to be a dictionary.
return None
cls_locals = collections.defaultdict(list)
for key, value in attrs.items:
key = next(key.infer(context=context))
value = next(value.infer(context=context))
# Ignore non string keys
if key.__class__.__name__ == "Const" and isinstance(key.value, str):
cls_locals[key.value].append(value)
# Build the class from now.
cls = mcs.__class__(
name=name.value,
lineno=caller.lineno,
col_offset=caller.col_offset,
parent=caller,
)
empty = node_classes.Pass()
cls.postinit(
bases=bases.elts,
body=[empty],
decorators=[],
newstyle=True,
metaclass=mcs,
keywords=[],
)
cls.locals = cls_locals
return cls
def infer_call_result(self, caller, context=None):
context = contextmod.bind_context_to_node(context, self.bound)
if (
self.bound.__class__.__name__ == "ClassDef"
and self.bound.name == "type"
and self.name == "__new__"
and len(caller.args) == 4
):
# Check if we have a ``type.__new__(mcs, name, bases, attrs)`` call.
new_cls = self._infer_type_new_call(caller, context)
if new_cls:
return iter((new_cls,))
return super(BoundMethod, self).infer_call_result(caller, context)
def bool_value(self):
return True
class Generator(BaseInstance):
"""a special node representing a generator.
Proxied class is set once for all in raw_building.
"""
# pylint: disable=unnecessary-lambda
special_attributes = util.lazy_descriptor(lambda: objectmodel.GeneratorModel())
# pylint: disable=super-init-not-called
def __init__(self, parent=None):
self.parent = parent
def callable(self):
return False
def pytype(self):
return "%s.generator" % BUILTINS
def display_type(self):
return "Generator"
def bool_value(self):
return True
def __repr__(self):
return "<Generator(%s) l.%s at 0x%s>" % (
self._proxied.name,
self.lineno,
id(self),
)
def __str__(self):
return "Generator(%s)" % (self._proxied.name)
class AsyncGenerator(Generator):
"""Special node representing an async generator"""
def pytype(self):
return "%s.async_generator" % BUILTINS
def display_type(self):
return "AsyncGenerator"
def __repr__(self):
return "<AsyncGenerator(%s) l.%s at 0x%s>" % (
self._proxied.name,
self.lineno,
id(self),
)
def __str__(self):
return "AsyncGenerator(%s)" % (self._proxied.name)

View File

@@ -0,0 +1,33 @@
from astroid import MANAGER, arguments, nodes, inference_tip, UseInferenceDefault
def infer_namespace(node, context=None):
callsite = arguments.CallSite.from_call(node)
if not callsite.keyword_arguments:
# Cannot make sense of it.
raise UseInferenceDefault()
class_node = nodes.ClassDef("Namespace", "docstring")
class_node.parent = node.parent
for attr in set(callsite.keyword_arguments):
fake_node = nodes.EmptyNode()
fake_node.parent = class_node
fake_node.attrname = attr
class_node.instance_attrs[attr] = [fake_node]
return iter((class_node.instantiate_class(),))
def _looks_like_namespace(node):
func = node.func
if isinstance(func, nodes.Attribute):
return (
func.attrname == "Namespace"
and isinstance(func.expr, nodes.Name)
and func.expr.name == "argparse"
)
return False
MANAGER.register_transform(
nodes.Call, inference_tip(infer_namespace), _looks_like_namespace
)

View File

@@ -0,0 +1,59 @@
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""
Astroid hook for the attrs library
Without this hook pylint reports unsupported-assignment-operation
for attrs classes
"""
import astroid
from astroid import MANAGER
ATTRIB_NAMES = frozenset(("attr.ib", "attrib", "attr.attrib"))
ATTRS_NAMES = frozenset(("attr.s", "attrs", "attr.attrs", "attr.attributes"))
def is_decorated_with_attrs(node, decorator_names=ATTRS_NAMES):
"""Return True if a decorated node has
an attr decorator applied."""
if not node.decorators:
return False
for decorator_attribute in node.decorators.nodes:
if isinstance(decorator_attribute, astroid.Call): # decorator with arguments
decorator_attribute = decorator_attribute.func
if decorator_attribute.as_string() in decorator_names:
return True
return False
def attr_attributes_transform(node):
"""Given that the ClassNode has an attr decorator,
rewrite class attributes as instance attributes
"""
# Astroid can't infer this attribute properly
# Prevents https://github.com/PyCQA/pylint/issues/1884
node.locals["__attrs_attrs__"] = [astroid.Unknown(parent=node)]
for cdefbodynode in node.body:
if not isinstance(cdefbodynode, astroid.Assign):
continue
if isinstance(cdefbodynode.value, astroid.Call):
if cdefbodynode.value.func.as_string() not in ATTRIB_NAMES:
continue
else:
continue
for target in cdefbodynode.targets:
rhs_node = astroid.Unknown(
lineno=cdefbodynode.lineno,
col_offset=cdefbodynode.col_offset,
parent=cdefbodynode,
)
node.locals[target.name] = [rhs_node]
MANAGER.register_transform(
astroid.ClassDef, attr_attributes_transform, is_decorated_with_attrs
)

View File

@@ -0,0 +1,822 @@
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Astroid hooks for various builtins."""
from functools import partial
from textwrap import dedent
import six
from astroid import (
MANAGER,
Instance,
UseInferenceDefault,
AttributeInferenceError,
inference_tip,
InferenceError,
NameInferenceError,
AstroidTypeError,
MroError,
)
from astroid import arguments
from astroid.builder import AstroidBuilder
from astroid import helpers
from astroid import nodes
from astroid import objects
from astroid import scoped_nodes
from astroid import util
OBJECT_DUNDER_NEW = "object.__new__"
def _extend_str(class_node, rvalue):
"""function to extend builtin str/unicode class"""
code = dedent(
"""
class whatever(object):
def join(self, iterable):
return {rvalue}
def replace(self, old, new, count=None):
return {rvalue}
def format(self, *args, **kwargs):
return {rvalue}
def encode(self, encoding='ascii', errors=None):
return ''
def decode(self, encoding='ascii', errors=None):
return u''
def capitalize(self):
return {rvalue}
def title(self):
return {rvalue}
def lower(self):
return {rvalue}
def upper(self):
return {rvalue}
def swapcase(self):
return {rvalue}
def index(self, sub, start=None, end=None):
return 0
def find(self, sub, start=None, end=None):
return 0
def count(self, sub, start=None, end=None):
return 0
def strip(self, chars=None):
return {rvalue}
def lstrip(self, chars=None):
return {rvalue}
def rstrip(self, chars=None):
return {rvalue}
def rjust(self, width, fillchar=None):
return {rvalue}
def center(self, width, fillchar=None):
return {rvalue}
def ljust(self, width, fillchar=None):
return {rvalue}
"""
)
code = code.format(rvalue=rvalue)
fake = AstroidBuilder(MANAGER).string_build(code)["whatever"]
for method in fake.mymethods():
method.parent = class_node
method.lineno = None
method.col_offset = None
if "__class__" in method.locals:
method.locals["__class__"] = [class_node]
class_node.locals[method.name] = [method]
method.parent = class_node
def _extend_builtins(class_transforms):
builtin_ast = MANAGER.builtins_module
for class_name, transform in class_transforms.items():
transform(builtin_ast[class_name])
_extend_builtins(
{
"bytes": partial(_extend_str, rvalue="b''"),
"str": partial(_extend_str, rvalue="''"),
}
)
def _builtin_filter_predicate(node, builtin_name):
if isinstance(node.func, nodes.Name) and node.func.name == builtin_name:
return True
if isinstance(node.func, nodes.Attribute):
return (
node.func.attrname == "fromkeys"
and isinstance(node.func.expr, nodes.Name)
and node.func.expr.name == "dict"
)
return False
def register_builtin_transform(transform, builtin_name):
"""Register a new transform function for the given *builtin_name*.
The transform function must accept two parameters, a node and
an optional context.
"""
def _transform_wrapper(node, context=None):
result = transform(node, context=context)
if result:
if not result.parent:
# Let the transformation function determine
# the parent for its result. Otherwise,
# we set it to be the node we transformed from.
result.parent = node
if result.lineno is None:
result.lineno = node.lineno
if result.col_offset is None:
result.col_offset = node.col_offset
return iter([result])
MANAGER.register_transform(
nodes.Call,
inference_tip(_transform_wrapper),
partial(_builtin_filter_predicate, builtin_name=builtin_name),
)
def _generic_inference(node, context, node_type, transform):
args = node.args
if not args:
return node_type()
if len(node.args) > 1:
raise UseInferenceDefault()
arg, = args
transformed = transform(arg)
if not transformed:
try:
inferred = next(arg.infer(context=context))
except (InferenceError, StopIteration):
raise UseInferenceDefault()
if inferred is util.Uninferable:
raise UseInferenceDefault()
transformed = transform(inferred)
if not transformed or transformed is util.Uninferable:
raise UseInferenceDefault()
return transformed
def _generic_transform(arg, klass, iterables, build_elts):
if isinstance(arg, klass):
return arg
elif isinstance(arg, iterables):
if not all(isinstance(elt, nodes.Const) for elt in arg.elts):
raise UseInferenceDefault()
elts = [elt.value for elt in arg.elts]
elif isinstance(arg, nodes.Dict):
if not all(isinstance(elt[0], nodes.Const) for elt in arg.items):
raise UseInferenceDefault()
elts = [item[0].value for item in arg.items]
elif isinstance(arg, nodes.Const) and isinstance(
arg.value, (six.string_types, six.binary_type)
):
elts = arg.value
else:
return
return klass.from_constants(elts=build_elts(elts))
def _infer_builtin(node, context, klass=None, iterables=None, build_elts=None):
transform_func = partial(
_generic_transform, klass=klass, iterables=iterables, build_elts=build_elts
)
return _generic_inference(node, context, klass, transform_func)
# pylint: disable=invalid-name
infer_tuple = partial(
_infer_builtin,
klass=nodes.Tuple,
iterables=(
nodes.List,
nodes.Set,
objects.FrozenSet,
objects.DictItems,
objects.DictKeys,
objects.DictValues,
),
build_elts=tuple,
)
infer_list = partial(
_infer_builtin,
klass=nodes.List,
iterables=(
nodes.Tuple,
nodes.Set,
objects.FrozenSet,
objects.DictItems,
objects.DictKeys,
objects.DictValues,
),
build_elts=list,
)
infer_set = partial(
_infer_builtin,
klass=nodes.Set,
iterables=(nodes.List, nodes.Tuple, objects.FrozenSet, objects.DictKeys),
build_elts=set,
)
infer_frozenset = partial(
_infer_builtin,
klass=objects.FrozenSet,
iterables=(nodes.List, nodes.Tuple, nodes.Set, objects.FrozenSet, objects.DictKeys),
build_elts=frozenset,
)
def _get_elts(arg, context):
is_iterable = lambda n: isinstance(n, (nodes.List, nodes.Tuple, nodes.Set))
try:
inferred = next(arg.infer(context))
except (InferenceError, NameInferenceError):
raise UseInferenceDefault()
if isinstance(inferred, nodes.Dict):
items = inferred.items
elif is_iterable(inferred):
items = []
for elt in inferred.elts:
# If an item is not a pair of two items,
# then fallback to the default inference.
# Also, take in consideration only hashable items,
# tuples and consts. We are choosing Names as well.
if not is_iterable(elt):
raise UseInferenceDefault()
if len(elt.elts) != 2:
raise UseInferenceDefault()
if not isinstance(elt.elts[0], (nodes.Tuple, nodes.Const, nodes.Name)):
raise UseInferenceDefault()
items.append(tuple(elt.elts))
else:
raise UseInferenceDefault()
return items
def infer_dict(node, context=None):
"""Try to infer a dict call to a Dict node.
The function treats the following cases:
* dict()
* dict(mapping)
* dict(iterable)
* dict(iterable, **kwargs)
* dict(mapping, **kwargs)
* dict(**kwargs)
If a case can't be inferred, we'll fallback to default inference.
"""
call = arguments.CallSite.from_call(node)
if call.has_invalid_arguments() or call.has_invalid_keywords():
raise UseInferenceDefault
args = call.positional_arguments
kwargs = list(call.keyword_arguments.items())
if not args and not kwargs:
# dict()
return nodes.Dict()
elif kwargs and not args:
# dict(a=1, b=2, c=4)
items = [(nodes.Const(key), value) for key, value in kwargs]
elif len(args) == 1 and kwargs:
# dict(some_iterable, b=2, c=4)
elts = _get_elts(args[0], context)
keys = [(nodes.Const(key), value) for key, value in kwargs]
items = elts + keys
elif len(args) == 1:
items = _get_elts(args[0], context)
else:
raise UseInferenceDefault()
value = nodes.Dict(
col_offset=node.col_offset, lineno=node.lineno, parent=node.parent
)
value.postinit(items)
return value
def infer_super(node, context=None):
"""Understand super calls.
There are some restrictions for what can be understood:
* unbounded super (one argument form) is not understood.
* if the super call is not inside a function (classmethod or method),
then the default inference will be used.
* if the super arguments can't be inferred, the default inference
will be used.
"""
if len(node.args) == 1:
# Ignore unbounded super.
raise UseInferenceDefault
scope = node.scope()
if not isinstance(scope, nodes.FunctionDef):
# Ignore non-method uses of super.
raise UseInferenceDefault
if scope.type not in ("classmethod", "method"):
# Not interested in staticmethods.
raise UseInferenceDefault
cls = scoped_nodes.get_wrapping_class(scope)
if not len(node.args):
mro_pointer = cls
# In we are in a classmethod, the interpreter will fill
# automatically the class as the second argument, not an instance.
if scope.type == "classmethod":
mro_type = cls
else:
mro_type = cls.instantiate_class()
else:
try:
mro_pointer = next(node.args[0].infer(context=context))
except InferenceError:
raise UseInferenceDefault
try:
mro_type = next(node.args[1].infer(context=context))
except InferenceError:
raise UseInferenceDefault
if mro_pointer is util.Uninferable or mro_type is util.Uninferable:
# No way we could understand this.
raise UseInferenceDefault
super_obj = objects.Super(
mro_pointer=mro_pointer, mro_type=mro_type, self_class=cls, scope=scope
)
super_obj.parent = node
return super_obj
def _infer_getattr_args(node, context):
if len(node.args) not in (2, 3):
# Not a valid getattr call.
raise UseInferenceDefault
try:
obj = next(node.args[0].infer(context=context))
attr = next(node.args[1].infer(context=context))
except InferenceError:
raise UseInferenceDefault
if obj is util.Uninferable or attr is util.Uninferable:
# If one of the arguments is something we can't infer,
# then also make the result of the getattr call something
# which is unknown.
return util.Uninferable, util.Uninferable
is_string = isinstance(attr, nodes.Const) and isinstance(
attr.value, six.string_types
)
if not is_string:
raise UseInferenceDefault
return obj, attr.value
def infer_getattr(node, context=None):
"""Understand getattr calls
If one of the arguments is an Uninferable object, then the
result will be an Uninferable object. Otherwise, the normal attribute
lookup will be done.
"""
obj, attr = _infer_getattr_args(node, context)
if (
obj is util.Uninferable
or attr is util.Uninferable
or not hasattr(obj, "igetattr")
):
return util.Uninferable
try:
return next(obj.igetattr(attr, context=context))
except (StopIteration, InferenceError, AttributeInferenceError):
if len(node.args) == 3:
# Try to infer the default and return it instead.
try:
return next(node.args[2].infer(context=context))
except InferenceError:
raise UseInferenceDefault
raise UseInferenceDefault
def infer_hasattr(node, context=None):
"""Understand hasattr calls
This always guarantees three possible outcomes for calling
hasattr: Const(False) when we are sure that the object
doesn't have the intended attribute, Const(True) when
we know that the object has the attribute and Uninferable
when we are unsure of the outcome of the function call.
"""
try:
obj, attr = _infer_getattr_args(node, context)
if (
obj is util.Uninferable
or attr is util.Uninferable
or not hasattr(obj, "getattr")
):
return util.Uninferable
obj.getattr(attr, context=context)
except UseInferenceDefault:
# Can't infer something from this function call.
return util.Uninferable
except AttributeInferenceError:
# Doesn't have it.
return nodes.Const(False)
return nodes.Const(True)
def infer_callable(node, context=None):
"""Understand callable calls
This follows Python's semantics, where an object
is callable if it provides an attribute __call__,
even though that attribute is something which can't be
called.
"""
if len(node.args) != 1:
# Invalid callable call.
raise UseInferenceDefault
argument = node.args[0]
try:
inferred = next(argument.infer(context=context))
except InferenceError:
return util.Uninferable
if inferred is util.Uninferable:
return util.Uninferable
return nodes.Const(inferred.callable())
def infer_bool(node, context=None):
"""Understand bool calls."""
if len(node.args) > 1:
# Invalid bool call.
raise UseInferenceDefault
if not node.args:
return nodes.Const(False)
argument = node.args[0]
try:
inferred = next(argument.infer(context=context))
except InferenceError:
return util.Uninferable
if inferred is util.Uninferable:
return util.Uninferable
bool_value = inferred.bool_value()
if bool_value is util.Uninferable:
return util.Uninferable
return nodes.Const(bool_value)
def infer_type(node, context=None):
"""Understand the one-argument form of *type*."""
if len(node.args) != 1:
raise UseInferenceDefault
return helpers.object_type(node.args[0], context)
def infer_slice(node, context=None):
"""Understand `slice` calls."""
args = node.args
if not 0 < len(args) <= 3:
raise UseInferenceDefault
infer_func = partial(helpers.safe_infer, context=context)
args = [infer_func(arg) for arg in args]
for arg in args:
if not arg or arg is util.Uninferable:
raise UseInferenceDefault
if not isinstance(arg, nodes.Const):
raise UseInferenceDefault
if not isinstance(arg.value, (type(None), int)):
raise UseInferenceDefault
if len(args) < 3:
# Make sure we have 3 arguments.
args.extend([None] * (3 - len(args)))
slice_node = nodes.Slice(
lineno=node.lineno, col_offset=node.col_offset, parent=node.parent
)
slice_node.postinit(*args)
return slice_node
def _infer_object__new__decorator(node, context=None):
# Instantiate class immediately
# since that's what @object.__new__ does
return iter((node.instantiate_class(),))
def _infer_object__new__decorator_check(node):
"""Predicate before inference_tip
Check if the given ClassDef has an @object.__new__ decorator
"""
if not node.decorators:
return False
for decorator in node.decorators.nodes:
if isinstance(decorator, nodes.Attribute):
if decorator.as_string() == OBJECT_DUNDER_NEW:
return True
return False
def infer_issubclass(callnode, context=None):
"""Infer issubclass() calls
:param nodes.Call callnode: an `issubclass` call
:param InferenceContext: the context for the inference
:rtype nodes.Const: Boolean Const value of the `issubclass` call
:raises UseInferenceDefault: If the node cannot be inferred
"""
call = arguments.CallSite.from_call(callnode)
if call.keyword_arguments:
# issubclass doesn't support keyword arguments
raise UseInferenceDefault("TypeError: issubclass() takes no keyword arguments")
if len(call.positional_arguments) != 2:
raise UseInferenceDefault(
"Expected two arguments, got {count}".format(
count=len(call.positional_arguments)
)
)
# The left hand argument is the obj to be checked
obj_node, class_or_tuple_node = call.positional_arguments
try:
obj_type = next(obj_node.infer(context=context))
except InferenceError as exc:
raise UseInferenceDefault from exc
if not isinstance(obj_type, nodes.ClassDef):
raise UseInferenceDefault("TypeError: arg 1 must be class")
# The right hand argument is the class(es) that the given
# object is to be checked against.
try:
class_container = _class_or_tuple_to_container(
class_or_tuple_node, context=context
)
except InferenceError as exc:
raise UseInferenceDefault from exc
try:
issubclass_bool = helpers.object_issubclass(obj_type, class_container, context)
except AstroidTypeError as exc:
raise UseInferenceDefault("TypeError: " + str(exc)) from exc
except MroError as exc:
raise UseInferenceDefault from exc
return nodes.Const(issubclass_bool)
def infer_isinstance(callnode, context=None):
"""Infer isinstance calls
:param nodes.Call callnode: an isinstance call
:param InferenceContext: context for call
(currently unused but is a common interface for inference)
:rtype nodes.Const: Boolean Const value of isinstance call
:raises UseInferenceDefault: If the node cannot be inferred
"""
call = arguments.CallSite.from_call(callnode)
if call.keyword_arguments:
# isinstance doesn't support keyword arguments
raise UseInferenceDefault("TypeError: isinstance() takes no keyword arguments")
if len(call.positional_arguments) != 2:
raise UseInferenceDefault(
"Expected two arguments, got {count}".format(
count=len(call.positional_arguments)
)
)
# The left hand argument is the obj to be checked
obj_node, class_or_tuple_node = call.positional_arguments
# The right hand argument is the class(es) that the given
# obj is to be check is an instance of
try:
class_container = _class_or_tuple_to_container(
class_or_tuple_node, context=context
)
except InferenceError:
raise UseInferenceDefault
try:
isinstance_bool = helpers.object_isinstance(obj_node, class_container, context)
except AstroidTypeError as exc:
raise UseInferenceDefault("TypeError: " + str(exc))
except MroError as exc:
raise UseInferenceDefault from exc
if isinstance_bool is util.Uninferable:
raise UseInferenceDefault
return nodes.Const(isinstance_bool)
def _class_or_tuple_to_container(node, context=None):
# Move inferences results into container
# to simplify later logic
# raises InferenceError if any of the inferences fall through
node_infer = next(node.infer(context=context))
# arg2 MUST be a type or a TUPLE of types
# for isinstance
if isinstance(node_infer, nodes.Tuple):
class_container = [
next(node.infer(context=context)) for node in node_infer.elts
]
class_container = [
klass_node for klass_node in class_container if klass_node is not None
]
else:
class_container = [node_infer]
return class_container
def infer_len(node, context=None):
"""Infer length calls
:param nodes.Call node: len call to infer
:param context.InferenceContext: node context
:rtype nodes.Const: a Const node with the inferred length, if possible
"""
call = arguments.CallSite.from_call(node)
if call.keyword_arguments:
raise UseInferenceDefault("TypeError: len() must take no keyword arguments")
if len(call.positional_arguments) != 1:
raise UseInferenceDefault(
"TypeError: len() must take exactly one argument "
"({len}) given".format(len=len(call.positional_arguments))
)
[argument_node] = call.positional_arguments
try:
return nodes.Const(helpers.object_len(argument_node))
except (AstroidTypeError, InferenceError) as exc:
raise UseInferenceDefault(str(exc)) from exc
def infer_str(node, context=None):
"""Infer str() calls
:param nodes.Call node: str() call to infer
:param context.InferenceContext: node context
:rtype nodes.Const: a Const containing an empty string
"""
call = arguments.CallSite.from_call(node)
if call.keyword_arguments:
raise UseInferenceDefault("TypeError: str() must take no keyword arguments")
try:
return nodes.Const("")
except (AstroidTypeError, InferenceError) as exc:
raise UseInferenceDefault(str(exc)) from exc
def infer_int(node, context=None):
"""Infer int() calls
:param nodes.Call node: int() call to infer
:param context.InferenceContext: node context
:rtype nodes.Const: a Const containing the integer value of the int() call
"""
call = arguments.CallSite.from_call(node)
if call.keyword_arguments:
raise UseInferenceDefault("TypeError: int() must take no keyword arguments")
if call.positional_arguments:
try:
first_value = next(call.positional_arguments[0].infer(context=context))
except InferenceError as exc:
raise UseInferenceDefault(str(exc)) from exc
if first_value is util.Uninferable:
raise UseInferenceDefault
if isinstance(first_value, nodes.Const) and isinstance(
first_value.value, (int, str)
):
try:
actual_value = int(first_value.value)
except ValueError:
return nodes.Const(0)
return nodes.Const(actual_value)
return nodes.Const(0)
def infer_dict_fromkeys(node, context=None):
"""Infer dict.fromkeys
:param nodes.Call node: dict.fromkeys() call to infer
:param context.InferenceContext: node context
:rtype nodes.Dict:
a Dictionary containing the values that astroid was able to infer.
In case the inference failed for any reason, an empty dictionary
will be inferred instead.
"""
def _build_dict_with_elements(elements):
new_node = nodes.Dict(
col_offset=node.col_offset, lineno=node.lineno, parent=node.parent
)
new_node.postinit(elements)
return new_node
call = arguments.CallSite.from_call(node)
if call.keyword_arguments:
raise UseInferenceDefault("TypeError: int() must take no keyword arguments")
if len(call.positional_arguments) not in {1, 2}:
raise UseInferenceDefault(
"TypeError: Needs between 1 and 2 positional arguments"
)
default = nodes.Const(None)
values = call.positional_arguments[0]
try:
inferred_values = next(values.infer(context=context))
except InferenceError:
return _build_dict_with_elements([])
if inferred_values is util.Uninferable:
return _build_dict_with_elements([])
# Limit to a couple of potential values, as this can become pretty complicated
accepted_iterable_elements = (nodes.Const,)
if isinstance(inferred_values, (nodes.List, nodes.Set, nodes.Tuple)):
elements = inferred_values.elts
for element in elements:
if not isinstance(element, accepted_iterable_elements):
# Fallback to an empty dict
return _build_dict_with_elements([])
elements_with_value = [(element, default) for element in elements]
return _build_dict_with_elements(elements_with_value)
elif isinstance(inferred_values, nodes.Const) and isinstance(
inferred_values.value, (str, bytes)
):
elements = [
(nodes.Const(element), default) for element in inferred_values.value
]
return _build_dict_with_elements(elements)
elif isinstance(inferred_values, nodes.Dict):
keys = inferred_values.itered()
for key in keys:
if not isinstance(key, accepted_iterable_elements):
# Fallback to an empty dict
return _build_dict_with_elements([])
elements_with_value = [(element, default) for element in keys]
return _build_dict_with_elements(elements_with_value)
# Fallback to an empty dictionary
return _build_dict_with_elements([])
# Builtins inference
register_builtin_transform(infer_bool, "bool")
register_builtin_transform(infer_super, "super")
register_builtin_transform(infer_callable, "callable")
register_builtin_transform(infer_getattr, "getattr")
register_builtin_transform(infer_hasattr, "hasattr")
register_builtin_transform(infer_tuple, "tuple")
register_builtin_transform(infer_set, "set")
register_builtin_transform(infer_list, "list")
register_builtin_transform(infer_dict, "dict")
register_builtin_transform(infer_frozenset, "frozenset")
register_builtin_transform(infer_type, "type")
register_builtin_transform(infer_slice, "slice")
register_builtin_transform(infer_isinstance, "isinstance")
register_builtin_transform(infer_issubclass, "issubclass")
register_builtin_transform(infer_len, "len")
register_builtin_transform(infer_str, "str")
register_builtin_transform(infer_int, "int")
register_builtin_transform(infer_dict_fromkeys, "dict.fromkeys")
# Infer object.__new__ calls
MANAGER.register_transform(
nodes.ClassDef,
inference_tip(_infer_object__new__decorator),
_infer_object__new__decorator_check,
)

View File

@@ -0,0 +1,82 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
# Copyright (c) 2017 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2018 Ioana Tagirta <ioana.tagirta@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
import sys
import astroid
PY34 = sys.version_info >= (3, 4)
PY35 = sys.version_info >= (3, 5)
def _collections_transform():
return astroid.parse(
"""
class defaultdict(dict):
default_factory = None
def __missing__(self, key): pass
def __getitem__(self, key): return default_factory
"""
+ _deque_mock()
+ _ordered_dict_mock()
)
def _deque_mock():
base_deque_class = """
class deque(object):
maxlen = 0
def __init__(self, iterable=None, maxlen=None):
self.iterable = iterable or []
def append(self, x): pass
def appendleft(self, x): pass
def clear(self): pass
def count(self, x): return 0
def extend(self, iterable): pass
def extendleft(self, iterable): pass
def pop(self): return self.iterable[0]
def popleft(self): return self.iterable[0]
def remove(self, value): pass
def reverse(self): return reversed(self.iterable)
def rotate(self, n=1): return self
def __iter__(self): return self
def __reversed__(self): return self.iterable[::-1]
def __getitem__(self, index): return self.iterable[index]
def __setitem__(self, index, value): pass
def __delitem__(self, index): pass
def __bool__(self): return bool(self.iterable)
def __nonzero__(self): return bool(self.iterable)
def __contains__(self, o): return o in self.iterable
def __len__(self): return len(self.iterable)
def __copy__(self): return deque(self.iterable)"""
if PY35:
base_deque_class += """
def copy(self): return deque(self.iterable)
def index(self, x, start=0, end=0): return 0
def insert(self, x, i): pass
def __add__(self, other): pass
def __iadd__(self, other): pass
def __mul__(self, other): pass
def __imul__(self, other): pass
def __rmul__(self, other): pass"""
return base_deque_class
def _ordered_dict_mock():
base_ordered_dict_class = """
class OrderedDict(dict):
def __reversed__(self): return self[::-1]
"""
if PY34:
base_ordered_dict_class += """
def move_to_end(self, key, last=False): pass"""
return base_ordered_dict_class
astroid.register_module_extender(astroid.MANAGER, "collections", _collections_transform)

View File

@@ -0,0 +1,179 @@
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
import astroid
def _curses_transform():
return astroid.parse(
"""
A_ALTCHARSET = 1
A_BLINK = 1
A_BOLD = 1
A_DIM = 1
A_INVIS = 1
A_ITALIC = 1
A_NORMAL = 1
A_PROTECT = 1
A_REVERSE = 1
A_STANDOUT = 1
A_UNDERLINE = 1
A_HORIZONTAL = 1
A_LEFT = 1
A_LOW = 1
A_RIGHT = 1
A_TOP = 1
A_VERTICAL = 1
A_CHARTEXT = 1
A_ATTRIBUTES = 1
A_CHARTEXT = 1
A_COLOR = 1
KEY_MIN = 1
KEY_BREAK = 1
KEY_DOWN = 1
KEY_UP = 1
KEY_LEFT = 1
KEY_RIGHT = 1
KEY_HOME = 1
KEY_BACKSPACE = 1
KEY_F0 = 1
KEY_Fn = 1
KEY_DL = 1
KEY_IL = 1
KEY_DC = 1
KEY_IC = 1
KEY_EIC = 1
KEY_CLEAR = 1
KEY_EOS = 1
KEY_EOL = 1
KEY_SF = 1
KEY_SR = 1
KEY_NPAGE = 1
KEY_PPAGE = 1
KEY_STAB = 1
KEY_CTAB = 1
KEY_CATAB = 1
KEY_ENTER = 1
KEY_SRESET = 1
KEY_RESET = 1
KEY_PRINT = 1
KEY_LL = 1
KEY_A1 = 1
KEY_A3 = 1
KEY_B2 = 1
KEY_C1 = 1
KEY_C3 = 1
KEY_BTAB = 1
KEY_BEG = 1
KEY_CANCEL = 1
KEY_CLOSE = 1
KEY_COMMAND = 1
KEY_COPY = 1
KEY_CREATE = 1
KEY_END = 1
KEY_EXIT = 1
KEY_FIND = 1
KEY_HELP = 1
KEY_MARK = 1
KEY_MESSAGE = 1
KEY_MOVE = 1
KEY_NEXT = 1
KEY_OPEN = 1
KEY_OPTIONS = 1
KEY_PREVIOUS = 1
KEY_REDO = 1
KEY_REFERENCE = 1
KEY_REFRESH = 1
KEY_REPLACE = 1
KEY_RESTART = 1
KEY_RESUME = 1
KEY_SAVE = 1
KEY_SBEG = 1
KEY_SCANCEL = 1
KEY_SCOMMAND = 1
KEY_SCOPY = 1
KEY_SCREATE = 1
KEY_SDC = 1
KEY_SDL = 1
KEY_SELECT = 1
KEY_SEND = 1
KEY_SEOL = 1
KEY_SEXIT = 1
KEY_SFIND = 1
KEY_SHELP = 1
KEY_SHOME = 1
KEY_SIC = 1
KEY_SLEFT = 1
KEY_SMESSAGE = 1
KEY_SMOVE = 1
KEY_SNEXT = 1
KEY_SOPTIONS = 1
KEY_SPREVIOUS = 1
KEY_SPRINT = 1
KEY_SREDO = 1
KEY_SREPLACE = 1
KEY_SRIGHT = 1
KEY_SRSUME = 1
KEY_SSAVE = 1
KEY_SSUSPEND = 1
KEY_SUNDO = 1
KEY_SUSPEND = 1
KEY_UNDO = 1
KEY_MOUSE = 1
KEY_RESIZE = 1
KEY_MAX = 1
ACS_BBSS = 1
ACS_BLOCK = 1
ACS_BOARD = 1
ACS_BSBS = 1
ACS_BSSB = 1
ACS_BSSS = 1
ACS_BTEE = 1
ACS_BULLET = 1
ACS_CKBOARD = 1
ACS_DARROW = 1
ACS_DEGREE = 1
ACS_DIAMOND = 1
ACS_GEQUAL = 1
ACS_HLINE = 1
ACS_LANTERN = 1
ACS_LARROW = 1
ACS_LEQUAL = 1
ACS_LLCORNER = 1
ACS_LRCORNER = 1
ACS_LTEE = 1
ACS_NEQUAL = 1
ACS_PI = 1
ACS_PLMINUS = 1
ACS_PLUS = 1
ACS_RARROW = 1
ACS_RTEE = 1
ACS_S1 = 1
ACS_S3 = 1
ACS_S7 = 1
ACS_S9 = 1
ACS_SBBS = 1
ACS_SBSB = 1
ACS_SBSS = 1
ACS_SSBB = 1
ACS_SSBS = 1
ACS_SSSB = 1
ACS_SSSS = 1
ACS_STERLING = 1
ACS_TTEE = 1
ACS_UARROW = 1
ACS_ULCORNER = 1
ACS_URCORNER = 1
ACS_VLINE = 1
COLOR_BLACK = 1
COLOR_BLUE = 1
COLOR_CYAN = 1
COLOR_GREEN = 1
COLOR_MAGENTA = 1
COLOR_RED = 1
COLOR_WHITE = 1
COLOR_YELLOW = 1
"""
)
astroid.register_module_extender(astroid.MANAGER, "curses", _curses_transform)

View File

@@ -0,0 +1,28 @@
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2015 raylu <lurayl@gmail.com>
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Astroid hooks for dateutil"""
import textwrap
from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder
def dateutil_transform():
return AstroidBuilder(MANAGER).string_build(
textwrap.dedent(
"""
import datetime
def parse(timestr, parserinfo=None, **kwargs):
return datetime.datetime()
"""
)
)
register_module_extender(MANAGER, "dateutil.parser", dateutil_transform)

View File

@@ -0,0 +1,51 @@
# Copyright (c) 2017 Claudiu Popa <pcmanticore@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
import collections
import sys
import astroid
def _clone_node_with_lineno(node, parent, lineno):
cls = node.__class__
other_fields = node._other_fields
_astroid_fields = node._astroid_fields
init_params = {"lineno": lineno, "col_offset": node.col_offset, "parent": parent}
postinit_params = {param: getattr(node, param) for param in _astroid_fields}
if other_fields:
init_params.update({param: getattr(node, param) for param in other_fields})
new_node = cls(**init_params)
if hasattr(node, "postinit") and _astroid_fields:
for param, child in postinit_params.items():
if child and not isinstance(child, collections.Sequence):
cloned_child = _clone_node_with_lineno(
node=child, lineno=new_node.lineno, parent=new_node
)
postinit_params[param] = cloned_child
new_node.postinit(**postinit_params)
return new_node
def _transform_formatted_value(node):
if node.value and node.value.lineno == 1:
if node.lineno != node.value.lineno:
new_node = astroid.FormattedValue(
lineno=node.lineno, col_offset=node.col_offset, parent=node.parent
)
new_value = _clone_node_with_lineno(
node=node.value, lineno=node.lineno, parent=new_node
)
new_node.postinit(value=new_value, format_spec=node.format_spec)
return new_node
if sys.version_info[:2] >= (3, 6):
# TODO: this fix tries to *patch* http://bugs.python.org/issue29051
# The problem is that FormattedValue.value, which is a Name node,
# has wrong line numbers, usually 1. This creates problems for pylint,
# which expects correct line numbers for things such as message control.
astroid.MANAGER.register_transform(
astroid.FormattedValue, _transform_formatted_value
)

View File

@@ -0,0 +1,157 @@
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
"""Astroid hooks for understanding functools library module."""
from functools import partial
from itertools import chain
import astroid
from astroid import arguments
from astroid import BoundMethod
from astroid import extract_node
from astroid import helpers
from astroid.interpreter import objectmodel
from astroid import MANAGER
from astroid import objects
LRU_CACHE = "functools.lru_cache"
class LruWrappedModel(objectmodel.FunctionModel):
"""Special attribute model for functions decorated with functools.lru_cache.
The said decorators patches at decoration time some functions onto
the decorated function.
"""
@property
def attr___wrapped__(self):
return self._instance
@property
def attr_cache_info(self):
cache_info = extract_node(
"""
from functools import _CacheInfo
_CacheInfo(0, 0, 0, 0)
"""
)
class CacheInfoBoundMethod(BoundMethod):
def infer_call_result(self, caller, context=None):
yield helpers.safe_infer(cache_info)
return CacheInfoBoundMethod(proxy=self._instance, bound=self._instance)
@property
def attr_cache_clear(self):
node = extract_node("""def cache_clear(self): pass""")
return BoundMethod(proxy=node, bound=self._instance.parent.scope())
def _transform_lru_cache(node, context=None):
# TODO: this is not ideal, since the node should be immutable,
# but due to https://github.com/PyCQA/astroid/issues/354,
# there's not much we can do now.
# Replacing the node would work partially, because,
# in pylint, the old node would still be available, leading
# to spurious false positives.
node.special_attributes = LruWrappedModel()(node)
return
def _functools_partial_inference(node, context=None):
call = arguments.CallSite.from_call(node)
number_of_positional = len(call.positional_arguments)
if number_of_positional < 1:
raise astroid.UseInferenceDefault(
"functools.partial takes at least one argument"
)
if number_of_positional == 1 and not call.keyword_arguments:
raise astroid.UseInferenceDefault(
"functools.partial needs at least to have some filled arguments"
)
partial_function = call.positional_arguments[0]
try:
inferred_wrapped_function = next(partial_function.infer(context=context))
except astroid.InferenceError as exc:
raise astroid.UseInferenceDefault from exc
if inferred_wrapped_function is astroid.Uninferable:
raise astroid.UseInferenceDefault("Cannot infer the wrapped function")
if not isinstance(inferred_wrapped_function, astroid.FunctionDef):
raise astroid.UseInferenceDefault("The wrapped function is not a function")
# Determine if the passed keywords into the callsite are supported
# by the wrapped function.
function_parameters = chain(
inferred_wrapped_function.args.args or (),
inferred_wrapped_function.args.kwonlyargs or (),
)
parameter_names = set(
param.name
for param in function_parameters
if isinstance(param, astroid.AssignName)
)
if set(call.keyword_arguments) - parameter_names:
raise astroid.UseInferenceDefault(
"wrapped function received unknown parameters"
)
partial_function = objects.PartialFunction(
call,
name=inferred_wrapped_function.name,
doc=inferred_wrapped_function.doc,
lineno=inferred_wrapped_function.lineno,
col_offset=inferred_wrapped_function.col_offset,
parent=inferred_wrapped_function.parent,
)
partial_function.postinit(
args=inferred_wrapped_function.args,
body=inferred_wrapped_function.body,
decorators=inferred_wrapped_function.decorators,
returns=inferred_wrapped_function.returns,
type_comment_returns=inferred_wrapped_function.type_comment_returns,
type_comment_args=inferred_wrapped_function.type_comment_args,
)
return iter((partial_function,))
def _looks_like_lru_cache(node):
"""Check if the given function node is decorated with lru_cache."""
if not node.decorators:
return False
for decorator in node.decorators.nodes:
if not isinstance(decorator, astroid.Call):
continue
if _looks_like_functools_member(decorator, "lru_cache"):
return True
return False
def _looks_like_functools_member(node, member):
"""Check if the given Call node is a functools.partial call"""
if isinstance(node.func, astroid.Name):
return node.func.name == member
elif isinstance(node.func, astroid.Attribute):
return (
node.func.attrname == member
and isinstance(node.func.expr, astroid.Name)
and node.func.expr.name == "functools"
)
_looks_like_partial = partial(_looks_like_functools_member, member="partial")
MANAGER.register_transform(
astroid.FunctionDef, _transform_lru_cache, _looks_like_lru_cache
)
MANAGER.register_transform(
astroid.Call,
astroid.inference_tip(_functools_partial_inference),
_looks_like_partial,
)

View File

@@ -0,0 +1,220 @@
# Copyright (c) 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2014 Cole Robinson <crobinso@redhat.com>
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2015 David Shea <dshea@redhat.com>
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
# Copyright (c) 2016 Giuseppe Scrivano <gscrivan@redhat.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Astroid hooks for the Python 2 GObject introspection bindings.
Helps with understanding everything imported from 'gi.repository'
"""
import inspect
import itertools
import sys
import re
import warnings
from astroid import MANAGER, AstroidBuildingError, nodes
from astroid.builder import AstroidBuilder
_inspected_modules = {}
_identifier_re = r"^[A-Za-z_]\w*$"
def _gi_build_stub(parent):
"""
Inspect the passed module recursively and build stubs for functions,
classes, etc.
"""
classes = {}
functions = {}
constants = {}
methods = {}
for name in dir(parent):
if name.startswith("__"):
continue
# Check if this is a valid name in python
if not re.match(_identifier_re, name):
continue
try:
obj = getattr(parent, name)
except:
continue
if inspect.isclass(obj):
classes[name] = obj
elif inspect.isfunction(obj) or inspect.isbuiltin(obj):
functions[name] = obj
elif inspect.ismethod(obj) or inspect.ismethoddescriptor(obj):
methods[name] = obj
elif (
str(obj).startswith("<flags")
or str(obj).startswith("<enum ")
or str(obj).startswith("<GType ")
or inspect.isdatadescriptor(obj)
):
constants[name] = 0
elif isinstance(obj, (int, str)):
constants[name] = obj
elif callable(obj):
# Fall back to a function for anything callable
functions[name] = obj
else:
# Assume everything else is some manner of constant
constants[name] = 0
ret = ""
if constants:
ret += "# %s constants\n\n" % parent.__name__
for name in sorted(constants):
if name[0].isdigit():
# GDK has some busted constant names like
# Gdk.EventType.2BUTTON_PRESS
continue
val = constants[name]
strval = str(val)
if isinstance(val, str):
strval = '"%s"' % str(val).replace("\\", "\\\\")
ret += "%s = %s\n" % (name, strval)
if ret:
ret += "\n\n"
if functions:
ret += "# %s functions\n\n" % parent.__name__
for name in sorted(functions):
ret += "def %s(*args, **kwargs):\n" % name
ret += " pass\n"
if ret:
ret += "\n\n"
if methods:
ret += "# %s methods\n\n" % parent.__name__
for name in sorted(methods):
ret += "def %s(self, *args, **kwargs):\n" % name
ret += " pass\n"
if ret:
ret += "\n\n"
if classes:
ret += "# %s classes\n\n" % parent.__name__
for name, obj in sorted(classes.items()):
base = "object"
if issubclass(obj, Exception):
base = "Exception"
ret += "class %s(%s):\n" % (name, base)
classret = _gi_build_stub(obj)
if not classret:
classret = "pass\n"
for line in classret.splitlines():
ret += " " + line + "\n"
ret += "\n"
return ret
def _import_gi_module(modname):
# we only consider gi.repository submodules
if not modname.startswith("gi.repository."):
raise AstroidBuildingError(modname=modname)
# build astroid representation unless we already tried so
if modname not in _inspected_modules:
modnames = [modname]
optional_modnames = []
# GLib and GObject may have some special case handling
# in pygobject that we need to cope with. However at
# least as of pygobject3-3.13.91 the _glib module doesn't
# exist anymore, so if treat these modules as optional.
if modname == "gi.repository.GLib":
optional_modnames.append("gi._glib")
elif modname == "gi.repository.GObject":
optional_modnames.append("gi._gobject")
try:
modcode = ""
for m in itertools.chain(modnames, optional_modnames):
try:
with warnings.catch_warnings():
# Just inspecting the code can raise gi deprecation
# warnings, so ignore them.
try:
from gi import PyGIDeprecationWarning, PyGIWarning
warnings.simplefilter("ignore", PyGIDeprecationWarning)
warnings.simplefilter("ignore", PyGIWarning)
except Exception:
pass
__import__(m)
modcode += _gi_build_stub(sys.modules[m])
except ImportError:
if m not in optional_modnames:
raise
except ImportError:
astng = _inspected_modules[modname] = None
else:
astng = AstroidBuilder(MANAGER).string_build(modcode, modname)
_inspected_modules[modname] = astng
else:
astng = _inspected_modules[modname]
if astng is None:
raise AstroidBuildingError(modname=modname)
return astng
def _looks_like_require_version(node):
# Return whether this looks like a call to gi.require_version(<name>, <version>)
# Only accept function calls with two constant arguments
if len(node.args) != 2:
return False
if not all(isinstance(arg, nodes.Const) for arg in node.args):
return False
func = node.func
if isinstance(func, nodes.Attribute):
if func.attrname != "require_version":
return False
if isinstance(func.expr, nodes.Name) and func.expr.name == "gi":
return True
return False
if isinstance(func, nodes.Name):
return func.name == "require_version"
return False
def _register_require_version(node):
# Load the gi.require_version locally
try:
import gi
gi.require_version(node.args[0].value, node.args[1].value)
except Exception:
pass
return node
MANAGER.register_failed_import_hook(_import_gi_module)
MANAGER.register_transform(
nodes.Call, _register_require_version, _looks_like_require_version
)

View File

@@ -0,0 +1,67 @@
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2018 Ioana Tagirta <ioana.tagirta@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
import sys
import six
import astroid
PY36 = sys.version_info >= (3, 6)
def _hashlib_transform():
signature = "value=''"
template = """
class %(name)s(object):
def __init__(self, %(signature)s): pass
def digest(self):
return %(digest)s
def copy(self):
return self
def update(self, value): pass
def hexdigest(self):
return ''
@property
def name(self):
return %(name)r
@property
def block_size(self):
return 1
@property
def digest_size(self):
return 1
"""
algorithms_with_signature = dict.fromkeys(
["md5", "sha1", "sha224", "sha256", "sha384", "sha512"], signature
)
if PY36:
blake2b_signature = "data=b'', *, digest_size=64, key=b'', salt=b'', \
person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
node_depth=0, inner_size=0, last_node=False"
blake2s_signature = "data=b'', *, digest_size=32, key=b'', salt=b'', \
person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
node_depth=0, inner_size=0, last_node=False"
new_algorithms = dict.fromkeys(
["sha3_224", "sha3_256", "sha3_384", "sha3_512", "shake_128", "shake_256"],
signature,
)
algorithms_with_signature.update(new_algorithms)
algorithms_with_signature.update(
{"blake2b": blake2b_signature, "blake2s": blake2s_signature}
)
classes = "".join(
template
% {
"name": hashfunc,
"digest": 'b""' if six.PY3 else '""',
"signature": signature,
}
for hashfunc, signature in algorithms_with_signature.items()
)
return astroid.parse(classes)
astroid.register_module_extender(astroid.MANAGER, "hashlib", _hashlib_transform)

View File

@@ -0,0 +1,81 @@
# Copyright (c) 2018 Claudiu Popa <pcmanticore@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Astroid brain hints for some of the `http` module."""
import textwrap
import astroid
from astroid.builder import AstroidBuilder
def _http_transform():
return AstroidBuilder(astroid.MANAGER).string_build(
textwrap.dedent(
"""
from http import HTTPStatus
CONTINUE = HTTPStatus.CONTINUE
SWITCHING_PROTOCOLS = HTTPStatus.SWITCHING_PROTOCOLS
PROCESSING = HTTPStatus.PROCESSING
OK = HTTPStatus.OK
CREATED = HTTPStatus.CREATED
ACCEPTED = HTTPStatus.ACCEPTED
NON_AUTHORITATIVE_INFORMATION = HTTPStatus.NON_AUTHORITATIVE_INFORMATION
NO_CONTENT = HTTPStatus.NO_CONTENT
RESET_CONTENT = HTTPStatus.RESET_CONTENT
PARTIAL_CONTENT = HTTPStatus.PARTIAL_CONTENT
MULTI_STATUS = HTTPStatus.MULTI_STATUS
ALREADY_REPORTED = HTTPStatus.ALREADY_REPORTED
IM_USED = HTTPStatus.IM_USED
MULTIPLE_CHOICES = HTTPStatus.MULTIPLE_CHOICES
MOVED_PERMANENTLY = HTTPStatus.MOVED_PERMANENTLY
FOUND = HTTPStatus.FOUND
SEE_OTHER = HTTPStatus.SEE_OTHER
NOT_MODIFIED = HTTPStatus.NOT_MODIFIED
USE_PROXY = HTTPStatus.USE_PROXY
TEMPORARY_REDIRECT = HTTPStatus.TEMPORARY_REDIRECT
PERMANENT_REDIRECT = HTTPStatus.PERMANENT_REDIRECT
BAD_REQUEST = HTTPStatus.BAD_REQUEST
UNAUTHORIZED = HTTPStatus.UNAUTHORIZED
PAYMENT_REQUIRED = HTTPStatus.PAYMENT_REQUIRED
FORBIDDEN = HTTPStatus.FORBIDDEN
NOT_FOUND = HTTPStatus.NOT_FOUND
METHOD_NOT_ALLOWED = HTTPStatus.METHOD_NOT_ALLOWED
NOT_ACCEPTABLE = HTTPStatus.NOT_ACCEPTABLE
PROXY_AUTHENTICATION_REQUIRED = HTTPStatus.PROXY_AUTHENTICATION_REQUIRED
REQUEST_TIMEOUT = HTTPStatus.REQUEST_TIMEOUT
CONFLICT = HTTPStatus.CONFLICT
GONE = HTTPStatus.GONE
LENGTH_REQUIRED = HTTPStatus.LENGTH_REQUIRED
PRECONDITION_FAILED = HTTPStatus.PRECONDITION_FAILED
REQUEST_ENTITY_TOO_LARGE = HTTPStatus.REQUEST_ENTITY_TOO_LARGE
REQUEST_URI_TOO_LONG = HTTPStatus.REQUEST_URI_TOO_LONG
UNSUPPORTED_MEDIA_TYPE = HTTPStatus.UNSUPPORTED_MEDIA_TYPE
REQUESTED_RANGE_NOT_SATISFIABLE = HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE
EXPECTATION_FAILED = HTTPStatus.EXPECTATION_FAILED
UNPROCESSABLE_ENTITY = HTTPStatus.UNPROCESSABLE_ENTITY
LOCKED = HTTPStatus.LOCKED
FAILED_DEPENDENCY = HTTPStatus.FAILED_DEPENDENCY
UPGRADE_REQUIRED = HTTPStatus.UPGRADE_REQUIRED
PRECONDITION_REQUIRED = HTTPStatus.PRECONDITION_REQUIRED
TOO_MANY_REQUESTS = HTTPStatus.TOO_MANY_REQUESTS
REQUEST_HEADER_FIELDS_TOO_LARGE = HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE
INTERNAL_SERVER_ERROR = HTTPStatus.INTERNAL_SERVER_ERROR
NOT_IMPLEMENTED = HTTPStatus.NOT_IMPLEMENTED
BAD_GATEWAY = HTTPStatus.BAD_GATEWAY
SERVICE_UNAVAILABLE = HTTPStatus.SERVICE_UNAVAILABLE
GATEWAY_TIMEOUT = HTTPStatus.GATEWAY_TIMEOUT
HTTP_VERSION_NOT_SUPPORTED = HTTPStatus.HTTP_VERSION_NOT_SUPPORTED
VARIANT_ALSO_NEGOTIATES = HTTPStatus.VARIANT_ALSO_NEGOTIATES
INSUFFICIENT_STORAGE = HTTPStatus.INSUFFICIENT_STORAGE
LOOP_DETECTED = HTTPStatus.LOOP_DETECTED
NOT_EXTENDED = HTTPStatus.NOT_EXTENDED
NETWORK_AUTHENTICATION_REQUIRED = HTTPStatus.NETWORK_AUTHENTICATION_REQUIRED
"""
)
)
astroid.register_module_extender(astroid.MANAGER, "http.client", _http_transform)

View File

@@ -0,0 +1,45 @@
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Astroid brain hints for some of the _io C objects."""
import astroid
BUFFERED = {"BufferedWriter", "BufferedReader"}
TextIOWrapper = "TextIOWrapper"
FileIO = "FileIO"
BufferedWriter = "BufferedWriter"
def _generic_io_transform(node, name, cls):
"""Transform the given name, by adding the given *class* as a member of the node."""
io_module = astroid.MANAGER.ast_from_module_name("_io")
attribute_object = io_module[cls]
instance = attribute_object.instantiate_class()
node.locals[name] = [instance]
def _transform_text_io_wrapper(node):
# This is not always correct, since it can vary with the type of the descriptor,
# being stdout, stderr or stdin. But we cannot get access to the name of the
# stream, which is why we are using the BufferedWriter class as a default
# value
return _generic_io_transform(node, name="buffer", cls=BufferedWriter)
def _transform_buffered(node):
return _generic_io_transform(node, name="raw", cls=FileIO)
astroid.MANAGER.register_transform(
astroid.ClassDef, _transform_buffered, lambda node: node.name in BUFFERED
)
astroid.MANAGER.register_transform(
astroid.ClassDef,
_transform_text_io_wrapper,
lambda node: node.name == TextIOWrapper,
)

View File

@@ -0,0 +1,29 @@
# Copyright (c) 2012-2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder
def mechanize_transform():
return AstroidBuilder(MANAGER).string_build(
"""
class Browser(object):
def open(self, url, data=None, timeout=None):
return None
def open_novisit(self, url, data=None, timeout=None):
return None
def open_local_file(self, filename):
return None
"""
)
register_module_extender(MANAGER, "mechanize", mechanize_transform)

View File

@@ -0,0 +1,112 @@
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
import sys
import astroid
from astroid import exceptions
PY34 = sys.version_info >= (3, 4)
def _multiprocessing_transform():
module = astroid.parse(
"""
from multiprocessing.managers import SyncManager
def Manager():
return SyncManager()
"""
)
if not PY34:
return module
# On Python 3.4, multiprocessing uses a getattr lookup inside contexts,
# in order to get the attributes they need. Since it's extremely
# dynamic, we use this approach to fake it.
node = astroid.parse(
"""
from multiprocessing.context import DefaultContext, BaseContext
default = DefaultContext()
base = BaseContext()
"""
)
try:
context = next(node["default"].infer())
base = next(node["base"].infer())
except exceptions.InferenceError:
return module
for node in (context, base):
for key, value in node.locals.items():
if key.startswith("_"):
continue
value = value[0]
if isinstance(value, astroid.FunctionDef):
# We need to rebound this, since otherwise
# it will have an extra argument (self).
value = astroid.BoundMethod(value, node)
module[key] = value
return module
def _multiprocessing_managers_transform():
return astroid.parse(
"""
import array
import threading
import multiprocessing.pool as pool
import six
class Namespace(object):
pass
class Value(object):
def __init__(self, typecode, value, lock=True):
self._typecode = typecode
self._value = value
def get(self):
return self._value
def set(self, value):
self._value = value
def __repr__(self):
return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value)
value = property(get, set)
def Array(typecode, sequence, lock=True):
return array.array(typecode, sequence)
class SyncManager(object):
Queue = JoinableQueue = six.moves.queue.Queue
Event = threading.Event
RLock = threading.RLock
BoundedSemaphore = threading.BoundedSemaphore
Condition = threading.Condition
Barrier = threading.Barrier
Pool = pool.Pool
list = list
dict = dict
Value = Value
Array = Array
Namespace = Namespace
__enter__ = lambda self: self
__exit__ = lambda *args: args
def start(self, initializer=None, initargs=None):
pass
def shutdown(self):
pass
"""
)
astroid.register_module_extender(
astroid.MANAGER, "multiprocessing.managers", _multiprocessing_managers_transform
)
astroid.register_module_extender(
astroid.MANAGER, "multiprocessing", _multiprocessing_transform
)

View File

@@ -0,0 +1,440 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2013-2014 Google, Inc.
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
# Copyright (c) 2015 David Shea <dshea@redhat.com>
# Copyright (c) 2015 Philip Lorenz <philip@bithub.de>
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
# Copyright (c) 2016 Mateusz Bysiek <mb@mbdev.pl>
# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Astroid hooks for the Python standard library."""
import functools
import keyword
from textwrap import dedent
from astroid import MANAGER, UseInferenceDefault, inference_tip, InferenceError
from astroid import arguments
from astroid import exceptions
from astroid import nodes
from astroid.builder import AstroidBuilder, extract_node
from astroid import util
TYPING_NAMEDTUPLE_BASENAMES = {"NamedTuple", "typing.NamedTuple"}
ENUM_BASE_NAMES = {
"Enum",
"IntEnum",
"enum.Enum",
"enum.IntEnum",
"IntFlag",
"enum.IntFlag",
}
def _infer_first(node, context):
if node is util.Uninferable:
raise UseInferenceDefault
try:
value = next(node.infer(context=context))
if value is util.Uninferable:
raise UseInferenceDefault()
else:
return value
except StopIteration:
raise InferenceError()
def _find_func_form_arguments(node, context):
def _extract_namedtuple_arg_or_keyword(position, key_name=None):
if len(args) > position:
return _infer_first(args[position], context)
if key_name and key_name in found_keywords:
return _infer_first(found_keywords[key_name], context)
args = node.args
keywords = node.keywords
found_keywords = (
{keyword.arg: keyword.value for keyword in keywords} if keywords else {}
)
name = _extract_namedtuple_arg_or_keyword(position=0, key_name="typename")
names = _extract_namedtuple_arg_or_keyword(position=1, key_name="field_names")
if name and names:
return name.value, names
raise UseInferenceDefault()
def infer_func_form(node, base_type, context=None, enum=False):
"""Specific inference function for namedtuple or Python 3 enum. """
# node is a Call node, class name as first argument and generated class
# attributes as second argument
# namedtuple or enums list of attributes can be a list of strings or a
# whitespace-separate string
try:
name, names = _find_func_form_arguments(node, context)
try:
attributes = names.value.replace(",", " ").split()
except AttributeError:
if not enum:
attributes = [
_infer_first(const, context).value for const in names.elts
]
else:
# Enums supports either iterator of (name, value) pairs
# or mappings.
if hasattr(names, "items") and isinstance(names.items, list):
attributes = [
_infer_first(const[0], context).value
for const in names.items
if isinstance(const[0], nodes.Const)
]
elif hasattr(names, "elts"):
# Enums can support either ["a", "b", "c"]
# or [("a", 1), ("b", 2), ...], but they can't
# be mixed.
if all(isinstance(const, nodes.Tuple) for const in names.elts):
attributes = [
_infer_first(const.elts[0], context).value
for const in names.elts
if isinstance(const, nodes.Tuple)
]
else:
attributes = [
_infer_first(const, context).value for const in names.elts
]
else:
raise AttributeError
if not attributes:
raise AttributeError
except (AttributeError, exceptions.InferenceError):
raise UseInferenceDefault()
# If we can't infer the name of the class, don't crash, up to this point
# we know it is a namedtuple anyway.
name = name or "Uninferable"
# we want to return a Class node instance with proper attributes set
class_node = nodes.ClassDef(name, "docstring")
class_node.parent = node.parent
# set base class=tuple
class_node.bases.append(base_type)
# XXX add __init__(*attributes) method
for attr in attributes:
fake_node = nodes.EmptyNode()
fake_node.parent = class_node
fake_node.attrname = attr
class_node.instance_attrs[attr] = [fake_node]
return class_node, name, attributes
def _has_namedtuple_base(node):
"""Predicate for class inference tip
:type node: ClassDef
:rtype: bool
"""
return set(node.basenames) & TYPING_NAMEDTUPLE_BASENAMES
def _looks_like(node, name):
func = node.func
if isinstance(func, nodes.Attribute):
return func.attrname == name
if isinstance(func, nodes.Name):
return func.name == name
return False
_looks_like_namedtuple = functools.partial(_looks_like, name="namedtuple")
_looks_like_enum = functools.partial(_looks_like, name="Enum")
_looks_like_typing_namedtuple = functools.partial(_looks_like, name="NamedTuple")
def infer_named_tuple(node, context=None):
"""Specific inference function for namedtuple Call node"""
tuple_base_name = nodes.Name(name="tuple", parent=node.root())
class_node, name, attributes = infer_func_form(
node, tuple_base_name, context=context
)
call_site = arguments.CallSite.from_call(node)
func = next(extract_node("import collections; collections.namedtuple").infer())
try:
rename = next(call_site.infer_argument(func, "rename", context)).bool_value()
except InferenceError:
rename = False
if rename:
attributes = _get_renamed_namedtuple_attributes(attributes)
replace_args = ", ".join("{arg}=None".format(arg=arg) for arg in attributes)
field_def = (
" {name} = property(lambda self: self[{index:d}], "
"doc='Alias for field number {index:d}')"
)
field_defs = "\n".join(
field_def.format(name=name, index=index)
for index, name in enumerate(attributes)
)
fake = AstroidBuilder(MANAGER).string_build(
"""
class %(name)s(tuple):
__slots__ = ()
_fields = %(fields)r
def _asdict(self):
return self.__dict__
@classmethod
def _make(cls, iterable, new=tuple.__new__, len=len):
return new(cls, iterable)
def _replace(self, %(replace_args)s):
return self
def __getnewargs__(self):
return tuple(self)
%(field_defs)s
"""
% {
"name": name,
"fields": attributes,
"field_defs": field_defs,
"replace_args": replace_args,
}
)
class_node.locals["_asdict"] = fake.body[0].locals["_asdict"]
class_node.locals["_make"] = fake.body[0].locals["_make"]
class_node.locals["_replace"] = fake.body[0].locals["_replace"]
class_node.locals["_fields"] = fake.body[0].locals["_fields"]
for attr in attributes:
class_node.locals[attr] = fake.body[0].locals[attr]
# we use UseInferenceDefault, we can't be a generator so return an iterator
return iter([class_node])
def _get_renamed_namedtuple_attributes(field_names):
names = list(field_names)
seen = set()
for i, name in enumerate(field_names):
if (
not all(c.isalnum() or c == "_" for c in name)
or keyword.iskeyword(name)
or not name
or name[0].isdigit()
or name.startswith("_")
or name in seen
):
names[i] = "_%d" % i
seen.add(name)
return tuple(names)
def infer_enum(node, context=None):
""" Specific inference function for enum Call node. """
enum_meta = extract_node(
"""
class EnumMeta(object):
'docstring'
def __call__(self, node):
class EnumAttribute(object):
name = ''
value = 0
return EnumAttribute()
def __iter__(self):
class EnumAttribute(object):
name = ''
value = 0
return [EnumAttribute()]
def __reversed__(self):
class EnumAttribute(object):
name = ''
value = 0
return (EnumAttribute, )
def __next__(self):
return next(iter(self))
def __getitem__(self, attr):
class Value(object):
@property
def name(self):
return ''
@property
def value(self):
return attr
return Value()
__members__ = ['']
"""
)
class_node = infer_func_form(node, enum_meta, context=context, enum=True)[0]
return iter([class_node.instantiate_class()])
INT_FLAG_ADDITION_METHODS = """
def __or__(self, other):
return {name}(self.value | other.value)
def __and__(self, other):
return {name}(self.value & other.value)
def __xor__(self, other):
return {name}(self.value ^ other.value)
def __add__(self, other):
return {name}(self.value + other.value)
def __div__(self, other):
return {name}(self.value / other.value)
def __invert__(self):
return {name}(~self.value)
def __mul__(self, other):
return {name}(self.value * other.value)
"""
def infer_enum_class(node):
""" Specific inference for enums. """
for basename in node.basenames:
# TODO: doesn't handle subclasses yet. This implementation
# is a hack to support enums.
if basename not in ENUM_BASE_NAMES:
continue
if node.root().name == "enum":
# Skip if the class is directly from enum module.
break
for local, values in node.locals.items():
if any(not isinstance(value, nodes.AssignName) for value in values):
continue
targets = []
stmt = values[0].statement()
if isinstance(stmt, nodes.Assign):
if isinstance(stmt.targets[0], nodes.Tuple):
targets = stmt.targets[0].itered()
else:
targets = stmt.targets
elif isinstance(stmt, nodes.AnnAssign):
targets = [stmt.target]
inferred_return_value = None
if isinstance(stmt, nodes.Assign):
if isinstance(stmt.value, nodes.Const):
if isinstance(stmt.value.value, str):
inferred_return_value = repr(stmt.value.value)
else:
inferred_return_value = stmt.value.value
else:
inferred_return_value = stmt.value.as_string()
new_targets = []
for target in targets:
# Replace all the assignments with our mocked class.
classdef = dedent(
"""
class {name}({types}):
@property
def value(self):
return {return_value}
@property
def name(self):
return "{name}"
""".format(
name=target.name,
types=", ".join(node.basenames),
return_value=inferred_return_value,
)
)
if "IntFlag" in basename:
# Alright, we need to add some additional methods.
# Unfortunately we still can't infer the resulting objects as
# Enum members, but once we'll be able to do that, the following
# should result in some nice symbolic execution
classdef += INT_FLAG_ADDITION_METHODS.format(name=target.name)
fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name]
fake.parent = target.parent
for method in node.mymethods():
fake.locals[method.name] = [method]
new_targets.append(fake.instantiate_class())
node.locals[local] = new_targets
break
return node
def infer_typing_namedtuple_class(class_node, context=None):
"""Infer a subclass of typing.NamedTuple"""
# Check if it has the corresponding bases
annassigns_fields = [
annassign.target.name
for annassign in class_node.body
if isinstance(annassign, nodes.AnnAssign)
]
code = dedent(
"""
from collections import namedtuple
namedtuple({typename!r}, {fields!r})
"""
).format(typename=class_node.name, fields=",".join(annassigns_fields))
node = extract_node(code)
generated_class_node = next(infer_named_tuple(node, context))
for method in class_node.mymethods():
generated_class_node.locals[method.name] = [method]
return iter((generated_class_node,))
def infer_typing_namedtuple(node, context=None):
"""Infer a typing.NamedTuple(...) call."""
# This is essentially a namedtuple with different arguments
# so we extract the args and infer a named tuple.
try:
func = next(node.func.infer())
except InferenceError:
raise UseInferenceDefault
if func.qname() != "typing.NamedTuple":
raise UseInferenceDefault
if len(node.args) != 2:
raise UseInferenceDefault
if not isinstance(node.args[1], (nodes.List, nodes.Tuple)):
raise UseInferenceDefault
names = []
for elt in node.args[1].elts:
if not isinstance(elt, (nodes.List, nodes.Tuple)):
raise UseInferenceDefault
if len(elt.elts) != 2:
raise UseInferenceDefault
names.append(elt.elts[0].as_string())
typename = node.args[0].as_string()
if names:
field_names = "({},)".format(",".join(names))
else:
field_names = "''"
node = extract_node(
"namedtuple({typename}, {fields})".format(typename=typename, fields=field_names)
)
return infer_named_tuple(node, context)
MANAGER.register_transform(
nodes.Call, inference_tip(infer_named_tuple), _looks_like_namedtuple
)
MANAGER.register_transform(nodes.Call, inference_tip(infer_enum), _looks_like_enum)
MANAGER.register_transform(
nodes.ClassDef,
infer_enum_class,
predicate=lambda cls: any(
basename for basename in cls.basenames if basename in ENUM_BASE_NAMES
),
)
MANAGER.register_transform(
nodes.ClassDef, inference_tip(infer_typing_namedtuple_class), _has_namedtuple_base
)
MANAGER.register_transform(
nodes.Call, inference_tip(infer_typing_namedtuple), _looks_like_typing_namedtuple
)

View File

@@ -0,0 +1,77 @@
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Hooks for nose library."""
import re
import textwrap
import astroid
import astroid.builder
_BUILDER = astroid.builder.AstroidBuilder(astroid.MANAGER)
def _pep8(name, caps=re.compile("([A-Z])")):
return caps.sub(lambda m: "_" + m.groups()[0].lower(), name)
def _nose_tools_functions():
"""Get an iterator of names and bound methods."""
module = _BUILDER.string_build(
textwrap.dedent(
"""
import unittest
class Test(unittest.TestCase):
pass
a = Test()
"""
)
)
try:
case = next(module["a"].infer())
except astroid.InferenceError:
return
for method in case.methods():
if method.name.startswith("assert") and "_" not in method.name:
pep8_name = _pep8(method.name)
yield pep8_name, astroid.BoundMethod(method, case)
if method.name == "assertEqual":
# nose also exports assert_equals.
yield "assert_equals", astroid.BoundMethod(method, case)
def _nose_tools_transform(node):
for method_name, method in _nose_tools_functions():
node.locals[method_name] = [method]
def _nose_tools_trivial_transform():
"""Custom transform for the nose.tools module."""
stub = _BUILDER.string_build("""__all__ = []""")
all_entries = ["ok_", "eq_"]
for pep8_name, method in _nose_tools_functions():
all_entries.append(pep8_name)
stub[pep8_name] = method
# Update the __all__ variable, since nose.tools
# does this manually with .append.
all_assign = stub["__all__"].parent
all_object = astroid.List(all_entries)
all_object.parent = all_assign
all_assign.value = all_object
return stub
astroid.register_module_extender(
astroid.MANAGER, "nose.tools.trivial", _nose_tools_trivial_transform
)
astroid.MANAGER.register_transform(
astroid.Module, _nose_tools_transform, lambda n: n.name == "nose.tools"
)

View File

@@ -0,0 +1,557 @@
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2017-2018 hippo91 <guillaume.peillex@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Astroid hooks for numpy."""
import functools
import astroid
def numpy_random_mtrand_transform():
return astroid.parse(
"""
def beta(a, b, size=None): return uninferable
def binomial(n, p, size=None): return uninferable
def bytes(length): return uninferable
def chisquare(df, size=None): return uninferable
def choice(a, size=None, replace=True, p=None): return uninferable
def dirichlet(alpha, size=None): return uninferable
def exponential(scale=1.0, size=None): return uninferable
def f(dfnum, dfden, size=None): return uninferable
def gamma(shape, scale=1.0, size=None): return uninferable
def geometric(p, size=None): return uninferable
def get_state(): return uninferable
def gumbel(loc=0.0, scale=1.0, size=None): return uninferable
def hypergeometric(ngood, nbad, nsample, size=None): return uninferable
def laplace(loc=0.0, scale=1.0, size=None): return uninferable
def logistic(loc=0.0, scale=1.0, size=None): return uninferable
def lognormal(mean=0.0, sigma=1.0, size=None): return uninferable
def logseries(p, size=None): return uninferable
def multinomial(n, pvals, size=None): return uninferable
def multivariate_normal(mean, cov, size=None): return uninferable
def negative_binomial(n, p, size=None): return uninferable
def noncentral_chisquare(df, nonc, size=None): return uninferable
def noncentral_f(dfnum, dfden, nonc, size=None): return uninferable
def normal(loc=0.0, scale=1.0, size=None): return uninferable
def pareto(a, size=None): return uninferable
def permutation(x): return uninferable
def poisson(lam=1.0, size=None): return uninferable
def power(a, size=None): return uninferable
def rand(*args): return uninferable
def randint(low, high=None, size=None, dtype='l'): return uninferable
def randn(*args): return uninferable
def random_integers(low, high=None, size=None): return uninferable
def random_sample(size=None): return uninferable
def rayleigh(scale=1.0, size=None): return uninferable
def seed(seed=None): return uninferable
def set_state(state): return uninferable
def shuffle(x): return uninferable
def standard_cauchy(size=None): return uninferable
def standard_exponential(size=None): return uninferable
def standard_gamma(shape, size=None): return uninferable
def standard_normal(size=None): return uninferable
def standard_t(df, size=None): return uninferable
def triangular(left, mode, right, size=None): return uninferable
def uniform(low=0.0, high=1.0, size=None): return uninferable
def vonmises(mu, kappa, size=None): return uninferable
def wald(mean, scale, size=None): return uninferable
def weibull(a, size=None): return uninferable
def zipf(a, size=None): return uninferable
"""
)
def numpy_core_umath_transform():
ufunc_optional_keyword_arguments = (
"""out=None, where=True, casting='same_kind', order='K', """
"""dtype=None, subok=True"""
)
return astroid.parse(
"""
# Constants
e = 2.718281828459045
euler_gamma = 0.5772156649015329
# No arg functions
def geterrobj(): return uninferable
# One arg functions
def seterrobj(errobj): return uninferable
# One arg functions with optional kwargs
def arccos(x, {opt_args:s}): return uninferable
def arccosh(x, {opt_args:s}): return uninferable
def arcsin(x, {opt_args:s}): return uninferable
def arcsinh(x, {opt_args:s}): return uninferable
def arctan(x, {opt_args:s}): return uninferable
def arctanh(x, {opt_args:s}): return uninferable
def cbrt(x, {opt_args:s}): return uninferable
def conj(x, {opt_args:s}): return uninferable
def conjugate(x, {opt_args:s}): return uninferable
def cosh(x, {opt_args:s}): return uninferable
def deg2rad(x, {opt_args:s}): return uninferable
def degrees(x, {opt_args:s}): return uninferable
def exp2(x, {opt_args:s}): return uninferable
def expm1(x, {opt_args:s}): return uninferable
def fabs(x, {opt_args:s}): return uninferable
def frexp(x, {opt_args:s}): return uninferable
def isfinite(x, {opt_args:s}): return uninferable
def isinf(x, {opt_args:s}): return uninferable
def log(x, {opt_args:s}): return uninferable
def log1p(x, {opt_args:s}): return uninferable
def log2(x, {opt_args:s}): return uninferable
def logical_not(x, {opt_args:s}): return uninferable
def modf(x, {opt_args:s}): return uninferable
def negative(x, {opt_args:s}): return uninferable
def rad2deg(x, {opt_args:s}): return uninferable
def radians(x, {opt_args:s}): return uninferable
def reciprocal(x, {opt_args:s}): return uninferable
def rint(x, {opt_args:s}): return uninferable
def sign(x, {opt_args:s}): return uninferable
def signbit(x, {opt_args:s}): return uninferable
def sinh(x, {opt_args:s}): return uninferable
def spacing(x, {opt_args:s}): return uninferable
def square(x, {opt_args:s}): return uninferable
def tan(x, {opt_args:s}): return uninferable
def tanh(x, {opt_args:s}): return uninferable
def trunc(x, {opt_args:s}): return uninferable
# Two args functions with optional kwargs
def bitwise_and(x1, x2, {opt_args:s}): return uninferable
def bitwise_or(x1, x2, {opt_args:s}): return uninferable
def bitwise_xor(x1, x2, {opt_args:s}): return uninferable
def copysign(x1, x2, {opt_args:s}): return uninferable
def divide(x1, x2, {opt_args:s}): return uninferable
def equal(x1, x2, {opt_args:s}): return uninferable
def float_power(x1, x2, {opt_args:s}): return uninferable
def floor_divide(x1, x2, {opt_args:s}): return uninferable
def fmax(x1, x2, {opt_args:s}): return uninferable
def fmin(x1, x2, {opt_args:s}): return uninferable
def fmod(x1, x2, {opt_args:s}): return uninferable
def greater(x1, x2, {opt_args:s}): return uninferable
def hypot(x1, x2, {opt_args:s}): return uninferable
def ldexp(x1, x2, {opt_args:s}): return uninferable
def left_shift(x1, x2, {opt_args:s}): return uninferable
def less(x1, x2, {opt_args:s}): return uninferable
def logaddexp(x1, x2, {opt_args:s}): return uninferable
def logaddexp2(x1, x2, {opt_args:s}): return uninferable
def logical_and(x1, x2, {opt_args:s}): return uninferable
def logical_or(x1, x2, {opt_args:s}): return uninferable
def logical_xor(x1, x2, {opt_args:s}): return uninferable
def maximum(x1, x2, {opt_args:s}): return uninferable
def minimum(x1, x2, {opt_args:s}): return uninferable
def nextafter(x1, x2, {opt_args:s}): return uninferable
def not_equal(x1, x2, {opt_args:s}): return uninferable
def power(x1, x2, {opt_args:s}): return uninferable
def remainder(x1, x2, {opt_args:s}): return uninferable
def right_shift(x1, x2, {opt_args:s}): return uninferable
def subtract(x1, x2, {opt_args:s}): return uninferable
def true_divide(x1, x2, {opt_args:s}): return uninferable
""".format(
opt_args=ufunc_optional_keyword_arguments
)
)
def numpy_core_numerictypes_transform():
return astroid.parse(
"""
# different types defined in numerictypes.py
class generic(object):
def __init__(self, value):
self.T = None
self.base = None
self.data = None
self.dtype = None
self.flags = None
self.flat = None
self.imag = None
self.itemsize = None
self.nbytes = None
self.ndim = None
self.real = None
self.size = None
self.strides = None
def all(self): return uninferable
def any(self): return uninferable
def argmax(self): return uninferable
def argmin(self): return uninferable
def argsort(self): return uninferable
def astype(self): return uninferable
def base(self): return uninferable
def byteswap(self): return uninferable
def choose(self): return uninferable
def clip(self): return uninferable
def compress(self): return uninferable
def conj(self): return uninferable
def conjugate(self): return uninferable
def copy(self): return uninferable
def cumprod(self): return uninferable
def cumsum(self): return uninferable
def data(self): return uninferable
def diagonal(self): return uninferable
def dtype(self): return uninferable
def dump(self): return uninferable
def dumps(self): return uninferable
def fill(self): return uninferable
def flags(self): return uninferable
def flat(self): return uninferable
def flatten(self): return uninferable
def getfield(self): return uninferable
def imag(self): return uninferable
def item(self): return uninferable
def itemset(self): return uninferable
def itemsize(self): return uninferable
def max(self): return uninferable
def mean(self): return uninferable
def min(self): return uninferable
def nbytes(self): return uninferable
def ndim(self): return uninferable
def newbyteorder(self): return uninferable
def nonzero(self): return uninferable
def prod(self): return uninferable
def ptp(self): return uninferable
def put(self): return uninferable
def ravel(self): return uninferable
def real(self): return uninferable
def repeat(self): return uninferable
def reshape(self): return uninferable
def resize(self): return uninferable
def round(self): return uninferable
def searchsorted(self): return uninferable
def setfield(self): return uninferable
def setflags(self): return uninferable
def shape(self): return uninferable
def size(self): return uninferable
def sort(self): return uninferable
def squeeze(self): return uninferable
def std(self): return uninferable
def strides(self): return uninferable
def sum(self): return uninferable
def swapaxes(self): return uninferable
def take(self): return uninferable
def tobytes(self): return uninferable
def tofile(self): return uninferable
def tolist(self): return uninferable
def tostring(self): return uninferable
def trace(self): return uninferable
def transpose(self): return uninferable
def var(self): return uninferable
def view(self): return uninferable
class dtype(object):
def __init__(self, obj, align=False, copy=False):
self.alignment = None
self.base = None
self.byteorder = None
self.char = None
self.descr = None
self.fields = None
self.flags = None
self.hasobject = None
self.isalignedstruct = None
self.isbuiltin = None
self.isnative = None
self.itemsize = None
self.kind = None
self.metadata = None
self.name = None
self.names = None
self.num = None
self.shape = None
self.str = None
self.subdtype = None
self.type = None
def newbyteorder(self, new_order='S'): return uninferable
def __neg__(self): return uninferable
class ndarray(object):
def __init__(self, shape, dtype=float, buffer=None, offset=0,
strides=None, order=None):
self.T = None
self.base = None
self.ctypes = None
self.data = None
self.dtype = None
self.flags = None
self.flat = None
self.imag = None
self.itemsize = None
self.nbytes = None
self.ndim = None
self.real = None
self.shape = None
self.size = None
self.strides = None
def __neg__(self): return uninferable
def __inv__(self): return uninferable
def __invert__(self): return uninferable
def all(self): return uninferable
def any(self): return uninferable
def argmax(self): return uninferable
def argmin(self): return uninferable
def argpartition(self): return uninferable
def argsort(self): return uninferable
def astype(self): return uninferable
def byteswap(self): return uninferable
def choose(self): return uninferable
def clip(self): return uninferable
def compress(self): return uninferable
def conj(self): return uninferable
def conjugate(self): return uninferable
def copy(self): return uninferable
def cumprod(self): return uninferable
def cumsum(self): return uninferable
def diagonal(self): return uninferable
def dot(self): return uninferable
def dump(self): return uninferable
def dumps(self): return uninferable
def fill(self): return uninferable
def flatten(self): return uninferable
def getfield(self): return uninferable
def item(self): return uninferable
def itemset(self): return uninferable
def max(self): return uninferable
def mean(self): return uninferable
def min(self): return uninferable
def newbyteorder(self): return uninferable
def nonzero(self): return uninferable
def partition(self): return uninferable
def prod(self): return uninferable
def ptp(self): return uninferable
def put(self): return uninferable
def ravel(self): return uninferable
def repeat(self): return uninferable
def reshape(self): return uninferable
def resize(self): return uninferable
def round(self): return uninferable
def searchsorted(self): return uninferable
def setfield(self): return uninferable
def setflags(self): return uninferable
def sort(self): return uninferable
def squeeze(self): return uninferable
def std(self): return uninferable
def sum(self): return uninferable
def swapaxes(self): return uninferable
def take(self): return uninferable
def tobytes(self): return uninferable
def tofile(self): return uninferable
def tolist(self): return uninferable
def tostring(self): return uninferable
def trace(self): return uninferable
def transpose(self): return uninferable
def var(self): return uninferable
def view(self): return uninferable
class busdaycalendar(object):
def __init__(self, weekmask='1111100', holidays=None):
self.holidays = None
self.weekmask = None
class flexible(generic): pass
class bool_(generic): pass
class number(generic):
def __neg__(self): return uninferable
class datetime64(generic): pass
class void(flexible):
def __init__(self, *args, **kwargs):
self.base = None
self.dtype = None
self.flags = None
def getfield(self): return uninferable
def setfield(self): return uninferable
class character(flexible): pass
class integer(number):
def __init__(self, value):
self.denominator = None
self.numerator = None
class inexact(number): pass
class str_(str, character):
def maketrans(self, x, y=None, z=None): return uninferable
class bytes_(bytes, character):
def fromhex(self, string): return uninferable
def maketrans(self, frm, to): return uninferable
class signedinteger(integer): pass
class unsignedinteger(integer): pass
class complexfloating(inexact): pass
class floating(inexact): pass
class float64(floating, float):
def fromhex(self, string): return uninferable
class uint64(unsignedinteger): pass
class complex64(complexfloating): pass
class int16(signedinteger): pass
class float96(floating): pass
class int8(signedinteger): pass
class uint32(unsignedinteger): pass
class uint8(unsignedinteger): pass
class _typedict(dict): pass
class complex192(complexfloating): pass
class timedelta64(signedinteger): pass
class int32(signedinteger): pass
class uint16(unsignedinteger): pass
class float32(floating): pass
class complex128(complexfloating, complex): pass
class float16(floating): pass
class int64(signedinteger): pass
buffer_type = memoryview
bool8 = bool_
byte = int8
bytes0 = bytes_
cdouble = complex128
cfloat = complex128
clongdouble = complex192
clongfloat = complex192
complex_ = complex128
csingle = complex64
double = float64
float_ = float64
half = float16
int0 = int32
int_ = int32
intc = int32
intp = int32
long = int32
longcomplex = complex192
longdouble = float96
longfloat = float96
longlong = int64
object0 = object_
object_ = object_
short = int16
single = float32
singlecomplex = complex64
str0 = str_
string_ = bytes_
ubyte = uint8
uint = uint32
uint0 = uint32
uintc = uint32
uintp = uint32
ulonglong = uint64
unicode = str_
unicode_ = str_
ushort = uint16
void0 = void
"""
)
def numpy_funcs():
return astroid.parse(
"""
import builtins
def sum(a, axis=None, dtype=None, out=None, keepdims=None):
return builtins.sum(a)
"""
)
def _looks_like_numpy_function(func_name, numpy_module_name, node):
"""
Return True if the current node correspond to the function inside
the numpy module in parameters
:param node: the current node
:type node: FunctionDef
:param func_name: name of the function
:type func_name: str
:param numpy_module_name: name of the numpy module
:type numpy_module_name: str
:return: True if the current node correspond to the function looked for
:rtype: bool
"""
return node.name == func_name and node.parent.name == numpy_module_name
def numpy_function_infer_call_result(node):
"""
A wrapper around infer_call_result method bounded to the node.
:param node: the node which infer_call_result should be filtered
:type node: FunctionDef
:return: a function that filter the results of the call to node.infer_call_result
:rtype: function
"""
#  Put the origin infer_call_result method into the closure
origin_infer_call_result = node.infer_call_result
def infer_call_result_wrapper(caller=None, context=None):
"""
Call the origin infer_call_result method bounded to the node instance and
filter the results to remove List and Tuple instances
"""
unfiltered_infer_call_result = origin_infer_call_result(caller, context)
return (
x
for x in unfiltered_infer_call_result
if not isinstance(x, (astroid.List, astroid.Tuple))
)
return infer_call_result_wrapper
def _replace_numpy_function_infer_call_result(node, context=None):
node.infer_call_result = numpy_function_infer_call_result(node)
return
astroid.MANAGER.register_transform(
astroid.FunctionDef,
_replace_numpy_function_infer_call_result,
functools.partial(
_looks_like_numpy_function, "linspace", "numpy.core.function_base"
),
)
astroid.MANAGER.register_transform(
astroid.FunctionDef,
_replace_numpy_function_infer_call_result,
functools.partial(_looks_like_numpy_function, "array", "numpy.core.records"),
)
astroid.register_module_extender(
astroid.MANAGER, "numpy.core.umath", numpy_core_umath_transform
)
astroid.register_module_extender(
astroid.MANAGER, "numpy.random.mtrand", numpy_random_mtrand_transform
)
astroid.register_module_extender(
astroid.MANAGER, "numpy.core.numerictypes", numpy_core_numerictypes_transform
)
astroid.register_module_extender(astroid.MANAGER, "numpy", numpy_funcs)

View File

@@ -0,0 +1,75 @@
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
import astroid
from astroid import parse
from astroid import inference_tip
from astroid import register_module_extender
from astroid import MANAGER
def pkg_resources_transform():
return parse(
"""
def require(*requirements):
return pkg_resources.working_set.require(*requirements)
def run_script(requires, script_name):
return pkg_resources.working_set.run_script(requires, script_name)
def iter_entry_points(group, name=None):
return pkg_resources.working_set.iter_entry_points(group, name)
def resource_exists(package_or_requirement, resource_name):
return get_provider(package_or_requirement).has_resource(resource_name)
def resource_isdir(package_or_requirement, resource_name):
return get_provider(package_or_requirement).resource_isdir(
resource_name)
def resource_filename(package_or_requirement, resource_name):
return get_provider(package_or_requirement).get_resource_filename(
self, resource_name)
def resource_stream(package_or_requirement, resource_name):
return get_provider(package_or_requirement).get_resource_stream(
self, resource_name)
def resource_string(package_or_requirement, resource_name):
return get_provider(package_or_requirement).get_resource_string(
self, resource_name)
def resource_listdir(package_or_requirement, resource_name):
return get_provider(package_or_requirement).resource_listdir(
resource_name)
def extraction_error():
pass
def get_cache_path(archive_name, names=()):
extract_path = self.extraction_path or get_default_cache()
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
return target_path
def postprocess(tempname, filename):
pass
def set_extraction_path(path):
pass
def cleanup_resources(force=False):
pass
def get_distribution(dist):
return Distribution(dist)
_namespace_packages = {}
"""
)
register_module_extender(MANAGER, "pkg_resources", pkg_resources_transform)

View File

@@ -0,0 +1,88 @@
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Jeff Quast <contact@jeffquast.com>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2016 Florian Bruhin <me@the-compiler.org>
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Astroid hooks for pytest."""
from __future__ import absolute_import
from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder
def pytest_transform():
return AstroidBuilder(MANAGER).string_build(
"""
try:
import _pytest.mark
import _pytest.recwarn
import _pytest.runner
import _pytest.python
import _pytest.skipping
import _pytest.assertion
except ImportError:
pass
else:
deprecated_call = _pytest.recwarn.deprecated_call
warns = _pytest.recwarn.warns
exit = _pytest.runner.exit
fail = _pytest.runner.fail
skip = _pytest.runner.skip
importorskip = _pytest.runner.importorskip
xfail = _pytest.skipping.xfail
mark = _pytest.mark.MarkGenerator()
raises = _pytest.python.raises
# New in pytest 3.0
try:
approx = _pytest.python.approx
register_assert_rewrite = _pytest.assertion.register_assert_rewrite
except AttributeError:
pass
# Moved in pytest 3.0
try:
import _pytest.freeze_support
freeze_includes = _pytest.freeze_support.freeze_includes
except ImportError:
try:
import _pytest.genscript
freeze_includes = _pytest.genscript.freeze_includes
except ImportError:
pass
try:
import _pytest.debugging
set_trace = _pytest.debugging.pytestPDB().set_trace
except ImportError:
try:
import _pytest.pdb
set_trace = _pytest.pdb.pytestPDB().set_trace
except ImportError:
pass
try:
import _pytest.fixtures
fixture = _pytest.fixtures.fixture
yield_fixture = _pytest.fixtures.yield_fixture
except ImportError:
try:
import _pytest.python
fixture = _pytest.python.fixture
yield_fixture = _pytest.python.yield_fixture
except ImportError:
pass
"""
)
register_module_extender(MANAGER, "pytest", pytest_transform)
register_module_extender(MANAGER, "py.test", pytest_transform)

View File

@@ -0,0 +1,82 @@
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2017 Roy Wright <roy@wright.org>
# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Astroid hooks for the PyQT library."""
from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder
from astroid import nodes
from astroid import parse
def _looks_like_signal(node, signal_name="pyqtSignal"):
if "__class__" in node.instance_attrs:
try:
cls = node.instance_attrs["__class__"][0]
return cls.name == signal_name
except AttributeError:
# return False if the cls does not have a name attribute
pass
return False
def transform_pyqt_signal(node):
module = parse(
"""
class pyqtSignal(object):
def connect(self, slot, type=None, no_receiver_check=False):
pass
def disconnect(self, slot):
pass
def emit(self, *args):
pass
"""
)
signal_cls = module["pyqtSignal"]
node.instance_attrs["emit"] = signal_cls["emit"]
node.instance_attrs["disconnect"] = signal_cls["disconnect"]
node.instance_attrs["connect"] = signal_cls["connect"]
def transform_pyside_signal(node):
module = parse(
"""
class NotPySideSignal(object):
def connect(self, receiver, type=None):
pass
def disconnect(self, receiver):
pass
def emit(self, *args):
pass
"""
)
signal_cls = module["NotPySideSignal"]
node.instance_attrs["connect"] = signal_cls["connect"]
node.instance_attrs["disconnect"] = signal_cls["disconnect"]
node.instance_attrs["emit"] = signal_cls["emit"]
def pyqt4_qtcore_transform():
return AstroidBuilder(MANAGER).string_build(
"""
def SIGNAL(signal_name): pass
class QObject(object):
def emit(self, signal): pass
"""
)
register_module_extender(MANAGER, "PyQt4.QtCore", pyqt4_qtcore_transform)
MANAGER.register_transform(nodes.FunctionDef, transform_pyqt_signal, _looks_like_signal)
MANAGER.register_transform(
nodes.ClassDef,
transform_pyside_signal,
lambda node: node.qname() == "PySide.QtCore.Signal",
)

View File

@@ -0,0 +1,75 @@
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
import random
import astroid
from astroid import helpers
from astroid import MANAGER
ACCEPTED_ITERABLES_FOR_SAMPLE = (astroid.List, astroid.Set, astroid.Tuple)
def _clone_node_with_lineno(node, parent, lineno):
cls = node.__class__
other_fields = node._other_fields
_astroid_fields = node._astroid_fields
init_params = {"lineno": lineno, "col_offset": node.col_offset, "parent": parent}
postinit_params = {param: getattr(node, param) for param in _astroid_fields}
if other_fields:
init_params.update({param: getattr(node, param) for param in other_fields})
new_node = cls(**init_params)
if hasattr(node, "postinit") and _astroid_fields:
new_node.postinit(**postinit_params)
return new_node
def infer_random_sample(node, context=None):
if len(node.args) != 2:
raise astroid.UseInferenceDefault
length = node.args[1]
if not isinstance(length, astroid.Const):
raise astroid.UseInferenceDefault
if not isinstance(length.value, int):
raise astroid.UseInferenceDefault
inferred_sequence = helpers.safe_infer(node.args[0], context=context)
if not inferred_sequence:
raise astroid.UseInferenceDefault
if not isinstance(inferred_sequence, ACCEPTED_ITERABLES_FOR_SAMPLE):
raise astroid.UseInferenceDefault
if length.value > len(inferred_sequence.elts):
# In this case, this will raise a ValueError
raise astroid.UseInferenceDefault
try:
elts = random.sample(inferred_sequence.elts, length.value)
except ValueError:
raise astroid.UseInferenceDefault
new_node = astroid.List(
lineno=node.lineno, col_offset=node.col_offset, parent=node.scope()
)
new_elts = [
_clone_node_with_lineno(elt, parent=new_node, lineno=new_node.lineno)
for elt in elts
]
new_node.postinit(new_elts)
return iter((new_node,))
def _looks_like_random_sample(node):
func = node.func
if isinstance(func, astroid.Attribute):
return func.attrname == "sample"
if isinstance(func, astroid.Name):
return func.name == "sample"
return False
MANAGER.register_transform(
astroid.Call, astroid.inference_tip(infer_random_sample), _looks_like_random_sample
)

View File

@@ -0,0 +1,36 @@
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
import sys
import astroid
PY36 = sys.version_info >= (3, 6)
if PY36:
# Since Python 3.6 there is the RegexFlag enum
# where every entry will be exposed via updating globals()
def _re_transform():
return astroid.parse(
"""
import sre_compile
ASCII = sre_compile.SRE_FLAG_ASCII
IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE
LOCALE = sre_compile.SRE_FLAG_LOCALE
UNICODE = sre_compile.SRE_FLAG_UNICODE
MULTILINE = sre_compile.SRE_FLAG_MULTILINE
DOTALL = sre_compile.SRE_FLAG_DOTALL
VERBOSE = sre_compile.SRE_FLAG_VERBOSE
A = ASCII
I = IGNORECASE
L = LOCALE
U = UNICODE
M = MULTILINE
S = DOTALL
X = VERBOSE
TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE
T = TEMPLATE
DEBUG = sre_compile.SRE_FLAG_DEBUG
"""
)
astroid.register_module_extender(astroid.MANAGER, "re", _re_transform)

View File

@@ -0,0 +1,200 @@
# Copyright (c) 2014-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Astroid hooks for six module."""
from textwrap import dedent
from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder
from astroid.exceptions import (
AstroidBuildingError,
InferenceError,
AttributeInferenceError,
)
from astroid import nodes
SIX_ADD_METACLASS = "six.add_metaclass"
def _indent(text, prefix, predicate=None):
"""Adds 'prefix' to the beginning of selected lines in 'text'.
If 'predicate' is provided, 'prefix' will only be added to the lines
where 'predicate(line)' is True. If 'predicate' is not provided,
it will default to adding 'prefix' to all non-empty lines that do not
consist solely of whitespace characters.
"""
if predicate is None:
predicate = lambda line: line.strip()
def prefixed_lines():
for line in text.splitlines(True):
yield prefix + line if predicate(line) else line
return "".join(prefixed_lines())
_IMPORTS = """
import _io
cStringIO = _io.StringIO
filter = filter
from itertools import filterfalse
input = input
from sys import intern
map = map
range = range
from imp import reload as reload_module
from functools import reduce
from shlex import quote as shlex_quote
from io import StringIO
from collections import UserDict, UserList, UserString
xrange = range
zip = zip
from itertools import zip_longest
import builtins
import configparser
import copyreg
import _dummy_thread
import http.cookiejar as http_cookiejar
import http.cookies as http_cookies
import html.entities as html_entities
import html.parser as html_parser
import http.client as http_client
import http.server as http_server
BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server
import pickle as cPickle
import queue
import reprlib
import socketserver
import _thread
import winreg
import xmlrpc.server as xmlrpc_server
import xmlrpc.client as xmlrpc_client
import urllib.robotparser as urllib_robotparser
import email.mime.multipart as email_mime_multipart
import email.mime.nonmultipart as email_mime_nonmultipart
import email.mime.text as email_mime_text
import email.mime.base as email_mime_base
import urllib.parse as urllib_parse
import urllib.error as urllib_error
import tkinter
import tkinter.dialog as tkinter_dialog
import tkinter.filedialog as tkinter_filedialog
import tkinter.scrolledtext as tkinter_scrolledtext
import tkinter.simpledialog as tkinder_simpledialog
import tkinter.tix as tkinter_tix
import tkinter.ttk as tkinter_ttk
import tkinter.constants as tkinter_constants
import tkinter.dnd as tkinter_dnd
import tkinter.colorchooser as tkinter_colorchooser
import tkinter.commondialog as tkinter_commondialog
import tkinter.filedialog as tkinter_tkfiledialog
import tkinter.font as tkinter_font
import tkinter.messagebox as tkinter_messagebox
import urllib
import urllib.request as urllib_request
import urllib.robotparser as urllib_robotparser
import urllib.parse as urllib_parse
import urllib.error as urllib_error
"""
def six_moves_transform():
code = dedent(
"""
class Moves(object):
{}
moves = Moves()
"""
).format(_indent(_IMPORTS, " "))
module = AstroidBuilder(MANAGER).string_build(code)
module.name = "six.moves"
return module
def _six_fail_hook(modname):
"""Fix six.moves imports due to the dynamic nature of this
class.
Construct a pseudo-module which contains all the necessary imports
for six
:param modname: Name of failed module
:type modname: str
:return: An astroid module
:rtype: nodes.Module
"""
attribute_of = modname != "six.moves" and modname.startswith("six.moves")
if modname != "six.moves" and not attribute_of:
raise AstroidBuildingError(modname=modname)
module = AstroidBuilder(MANAGER).string_build(_IMPORTS)
module.name = "six.moves"
if attribute_of:
# Facilitate import of submodules in Moves
start_index = len(module.name)
attribute = modname[start_index:].lstrip(".").replace(".", "_")
try:
import_attr = module.getattr(attribute)[0]
except AttributeInferenceError:
raise AstroidBuildingError(modname=modname)
if isinstance(import_attr, nodes.Import):
submodule = MANAGER.ast_from_module_name(import_attr.names[0][0])
return submodule
# Let dummy submodule imports pass through
# This will cause an Uninferable result, which is okay
return module
def _looks_like_decorated_with_six_add_metaclass(node):
if not node.decorators:
return False
for decorator in node.decorators.nodes:
if not isinstance(decorator, nodes.Call):
continue
if decorator.func.as_string() == SIX_ADD_METACLASS:
return True
return False
def transform_six_add_metaclass(node):
"""Check if the given class node is decorated with *six.add_metaclass*
If so, inject its argument as the metaclass of the underlying class.
"""
if not node.decorators:
return
for decorator in node.decorators.nodes:
if not isinstance(decorator, nodes.Call):
continue
try:
func = next(decorator.func.infer())
except InferenceError:
continue
if func.qname() == SIX_ADD_METACLASS and decorator.args:
metaclass = decorator.args[0]
node._metaclass = metaclass
return node
register_module_extender(MANAGER, "six", six_moves_transform)
register_module_extender(
MANAGER, "requests.packages.urllib3.packages.six", six_moves_transform
)
MANAGER.register_failed_import_hook(_six_fail_hook)
MANAGER.register_transform(
nodes.ClassDef,
transform_six_add_metaclass,
_looks_like_decorated_with_six_add_metaclass,
)

View File

@@ -0,0 +1,74 @@
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Astroid hooks for the ssl library."""
from astroid import MANAGER, register_module_extender
from astroid.builder import AstroidBuilder
from astroid import nodes
from astroid import parse
def ssl_transform():
return parse(
"""
from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION
from _ssl import _SSLContext, MemoryBIO
from _ssl import (
SSLError, SSLZeroReturnError, SSLWantReadError, SSLWantWriteError,
SSLSyscallError, SSLEOFError,
)
from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
from _ssl import txt2obj as _txt2obj, nid2obj as _nid2obj
from _ssl import RAND_status, RAND_add, RAND_bytes, RAND_pseudo_bytes
try:
from _ssl import RAND_egd
except ImportError:
# LibreSSL does not provide RAND_egd
pass
from _ssl import (OP_ALL, OP_CIPHER_SERVER_PREFERENCE,
OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3,
OP_NO_TLSv1, OP_NO_TLSv1_1, OP_NO_TLSv1_2,
OP_SINGLE_DH_USE, OP_SINGLE_ECDH_USE)
from _ssl import (ALERT_DESCRIPTION_ACCESS_DENIED, ALERT_DESCRIPTION_BAD_CERTIFICATE,
ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE,
ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE,
ALERT_DESCRIPTION_BAD_RECORD_MAC,
ALERT_DESCRIPTION_CERTIFICATE_EXPIRED,
ALERT_DESCRIPTION_CERTIFICATE_REVOKED,
ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN,
ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE,
ALERT_DESCRIPTION_CLOSE_NOTIFY, ALERT_DESCRIPTION_DECODE_ERROR,
ALERT_DESCRIPTION_DECOMPRESSION_FAILURE,
ALERT_DESCRIPTION_DECRYPT_ERROR,
ALERT_DESCRIPTION_HANDSHAKE_FAILURE,
ALERT_DESCRIPTION_ILLEGAL_PARAMETER,
ALERT_DESCRIPTION_INSUFFICIENT_SECURITY,
ALERT_DESCRIPTION_INTERNAL_ERROR,
ALERT_DESCRIPTION_NO_RENEGOTIATION,
ALERT_DESCRIPTION_PROTOCOL_VERSION,
ALERT_DESCRIPTION_RECORD_OVERFLOW,
ALERT_DESCRIPTION_UNEXPECTED_MESSAGE,
ALERT_DESCRIPTION_UNKNOWN_CA,
ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY,
ALERT_DESCRIPTION_UNRECOGNIZED_NAME,
ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE,
ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION,
ALERT_DESCRIPTION_USER_CANCELLED)
from _ssl import (SSL_ERROR_EOF, SSL_ERROR_INVALID_ERROR_CODE, SSL_ERROR_SSL,
SSL_ERROR_SYSCALL, SSL_ERROR_WANT_CONNECT, SSL_ERROR_WANT_READ,
SSL_ERROR_WANT_WRITE, SSL_ERROR_WANT_X509_LOOKUP, SSL_ERROR_ZERO_RETURN)
from _ssl import VERIFY_CRL_CHECK_CHAIN, VERIFY_CRL_CHECK_LEAF, VERIFY_DEFAULT, VERIFY_X509_STRICT
from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN
from _ssl import _OPENSSL_API_VERSION
from _ssl import PROTOCOL_SSLv23, PROTOCOL_TLSv1, PROTOCOL_TLSv1_1, PROTOCOL_TLSv1_2
from _ssl import PROTOCOL_TLS, PROTOCOL_TLS_CLIENT, PROTOCOL_TLS_SERVER
"""
)
register_module_extender(MANAGER, "ssl", ssl_transform)

View File

@@ -0,0 +1,119 @@
# Copyright (c) 2016-2017 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
import sys
import textwrap
import six
import astroid
PY37 = sys.version_info >= (3, 7)
PY36 = sys.version_info >= (3, 6)
PY34 = sys.version_info >= (3, 4)
PY33 = sys.version_info >= (3, 3)
def _subprocess_transform():
if six.PY3:
communicate = (bytes("string", "ascii"), bytes("string", "ascii"))
communicate_signature = "def communicate(self, input=None, timeout=None)"
if PY37:
init = """
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0, restore_signals=True,
start_new_session=False, pass_fds=(), *,
encoding=None, errors=None, text=None):
pass
"""
elif PY36:
init = """
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0, restore_signals=True,
start_new_session=False, pass_fds=(), *,
encoding=None, errors=None):
pass
"""
else:
init = """
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0, restore_signals=True,
start_new_session=False, pass_fds=()):
pass
"""
else:
communicate = ("string", "string")
communicate_signature = "def communicate(self, input=None)"
init = """
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0):
pass
"""
if PY34:
wait_signature = "def wait(self, timeout=None)"
else:
wait_signature = "def wait(self)"
if six.PY3:
ctx_manager = """
def __enter__(self): return self
def __exit__(self, *args): pass
"""
else:
ctx_manager = ""
py3_args = ""
if PY33:
py3_args = "args = []"
code = textwrap.dedent(
"""
class Popen(object):
returncode = pid = 0
stdin = stdout = stderr = file()
%(py3_args)s
%(communicate_signature)s:
return %(communicate)r
%(wait_signature)s:
return self.returncode
def poll(self):
return self.returncode
def send_signal(self, signal):
pass
def terminate(self):
pass
def kill(self):
pass
%(ctx_manager)s
"""
% {
"communicate": communicate,
"communicate_signature": communicate_signature,
"wait_signature": wait_signature,
"ctx_manager": ctx_manager,
"py3_args": py3_args,
}
)
init_lines = textwrap.dedent(init).splitlines()
indented_init = "\n".join(" " * 4 + line for line in init_lines)
code += indented_init
return astroid.parse(code)
astroid.register_module_extender(astroid.MANAGER, "subprocess", _subprocess_transform)

View File

@@ -0,0 +1,29 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
import astroid
def _thread_transform():
return astroid.parse(
"""
class lock(object):
def acquire(self, blocking=True, timeout=-1):
pass
def release(self):
pass
def __enter__(self):
return True
def __exit__(self, *args):
pass
def Lock():
return lock()
"""
)
astroid.register_module_extender(astroid.MANAGER, "threading", _thread_transform)

View File

@@ -0,0 +1,96 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
# Copyright (c) 2017 David Euresti <github@euresti.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
"""Astroid hooks for typing.py support."""
import typing
from astroid import (
MANAGER,
UseInferenceDefault,
extract_node,
inference_tip,
nodes,
InferenceError,
)
TYPING_NAMEDTUPLE_BASENAMES = {"NamedTuple", "typing.NamedTuple"}
TYPING_TYPEVARS = {"TypeVar", "NewType"}
TYPING_TYPEVARS_QUALIFIED = {"typing.TypeVar", "typing.NewType"}
TYPING_TYPE_TEMPLATE = """
class Meta(type):
def __getitem__(self, item):
return self
@property
def __args__(self):
return ()
class {0}(metaclass=Meta):
pass
"""
TYPING_MEMBERS = set(typing.__all__)
def looks_like_typing_typevar_or_newtype(node):
func = node.func
if isinstance(func, nodes.Attribute):
return func.attrname in TYPING_TYPEVARS
if isinstance(func, nodes.Name):
return func.name in TYPING_TYPEVARS
return False
def infer_typing_typevar_or_newtype(node, context=None):
"""Infer a typing.TypeVar(...) or typing.NewType(...) call"""
try:
func = next(node.func.infer(context=context))
except InferenceError as exc:
raise UseInferenceDefault from exc
if func.qname() not in TYPING_TYPEVARS_QUALIFIED:
raise UseInferenceDefault
if not node.args:
raise UseInferenceDefault
typename = node.args[0].as_string().strip("'")
node = extract_node(TYPING_TYPE_TEMPLATE.format(typename))
return node.infer(context=context)
def _looks_like_typing_subscript(node):
"""Try to figure out if a Subscript node *might* be a typing-related subscript"""
if isinstance(node, nodes.Name):
return node.name in TYPING_MEMBERS
elif isinstance(node, nodes.Attribute):
return node.attrname in TYPING_MEMBERS
elif isinstance(node, nodes.Subscript):
return _looks_like_typing_subscript(node.value)
return False
def infer_typing_attr(node, context=None):
"""Infer a typing.X[...] subscript"""
try:
value = next(node.value.infer())
except InferenceError as exc:
raise UseInferenceDefault from exc
if not value.qname().startswith("typing."):
raise UseInferenceDefault
node = extract_node(TYPING_TYPE_TEMPLATE.format(value.qname().split(".")[-1]))
return node.infer(context=context)
MANAGER.register_transform(
nodes.Call,
inference_tip(infer_typing_typevar_or_newtype),
looks_like_typing_typevar_or_newtype,
)
MANAGER.register_transform(
nodes.Subscript, inference_tip(infer_typing_attr), _looks_like_typing_subscript
)

View File

@@ -0,0 +1,20 @@
# Copyright (c) 2017 Claudiu Popa <pcmanticore@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Astroid hooks for the UUID module."""
from astroid import MANAGER
from astroid import nodes
def _patch_uuid_class(node):
# The .int member is patched using __dict__
node.locals["int"] = [nodes.Const(0, parent=node)]
MANAGER.register_transform(
nodes.ClassDef, _patch_uuid_class, lambda node: node.qname() == "uuid.UUID"
)

View File

@@ -0,0 +1,435 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2013 Phil Schaf <flying-sheep@web.de>
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014-2015 Google, Inc.
# Copyright (c) 2014 Alexander Presnyakov <flagist0@gmail.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""The AstroidBuilder makes astroid from living object and / or from _ast
The builder is not thread safe and can't be used to parse different sources
at the same time.
"""
import os
import textwrap
from tokenize import detect_encoding
from astroid._ast import _parse
from astroid import bases
from astroid import exceptions
from astroid import manager
from astroid import modutils
from astroid import raw_building
from astroid import rebuilder
from astroid import nodes
from astroid import util
# The name of the transient function that is used to
# wrap expressions to be extracted when calling
# extract_node.
_TRANSIENT_FUNCTION = "__"
# The comment used to select a statement to be extracted
# when calling extract_node.
_STATEMENT_SELECTOR = "#@"
MANAGER = manager.AstroidManager()
def open_source_file(filename):
with open(filename, "rb") as byte_stream:
encoding = detect_encoding(byte_stream.readline)[0]
stream = open(filename, "r", newline=None, encoding=encoding)
data = stream.read()
return stream, encoding, data
def _can_assign_attr(node, attrname):
try:
slots = node.slots()
except NotImplementedError:
pass
else:
if slots and attrname not in {slot.value for slot in slots}:
return False
return True
class AstroidBuilder(raw_building.InspectBuilder):
"""Class for building an astroid tree from source code or from a live module.
The param *manager* specifies the manager class which should be used.
If no manager is given, then the default one will be used. The
param *apply_transforms* determines if the transforms should be
applied after the tree was built from source or from a live object,
by default being True.
"""
# pylint: disable=redefined-outer-name
def __init__(self, manager=None, apply_transforms=True):
super(AstroidBuilder, self).__init__()
self._manager = manager or MANAGER
self._apply_transforms = apply_transforms
def module_build(self, module, modname=None):
"""Build an astroid from a living module instance."""
node = None
path = getattr(module, "__file__", None)
if path is not None:
path_, ext = os.path.splitext(modutils._path_from_filename(path))
if ext in (".py", ".pyc", ".pyo") and os.path.exists(path_ + ".py"):
node = self.file_build(path_ + ".py", modname)
if node is None:
# this is a built-in module
# get a partial representation by introspection
node = self.inspect_build(module, modname=modname, path=path)
if self._apply_transforms:
# We have to handle transformation by ourselves since the
# rebuilder isn't called for builtin nodes
node = self._manager.visit_transforms(node)
return node
def file_build(self, path, modname=None):
"""Build astroid from a source code file (i.e. from an ast)
*path* is expected to be a python source file
"""
try:
stream, encoding, data = open_source_file(path)
except IOError as exc:
raise exceptions.AstroidBuildingError(
"Unable to load file {path}:\n{error}",
modname=modname,
path=path,
error=exc,
) from exc
except (SyntaxError, LookupError) as exc:
raise exceptions.AstroidSyntaxError(
"Python 3 encoding specification error or unknown encoding:\n"
"{error}",
modname=modname,
path=path,
error=exc,
) from exc
except UnicodeError as exc: # wrong encoding
# detect_encoding returns utf-8 if no encoding specified
raise exceptions.AstroidBuildingError(
"Wrong or no encoding specified for {filename}.", filename=path
) from exc
with stream:
# get module name if necessary
if modname is None:
try:
modname = ".".join(modutils.modpath_from_file(path))
except ImportError:
modname = os.path.splitext(os.path.basename(path))[0]
# build astroid representation
module = self._data_build(data, modname, path)
return self._post_build(module, encoding)
def string_build(self, data, modname="", path=None):
"""Build astroid from source code string."""
module = self._data_build(data, modname, path)
module.file_bytes = data.encode("utf-8")
return self._post_build(module, "utf-8")
def _post_build(self, module, encoding):
"""Handles encoding and delayed nodes after a module has been built"""
module.file_encoding = encoding
self._manager.cache_module(module)
# post tree building steps after we stored the module in the cache:
for from_node in module._import_from_nodes:
if from_node.modname == "__future__":
for symbol, _ in from_node.names:
module.future_imports.add(symbol)
self.add_from_names_to_locals(from_node)
# handle delayed assattr nodes
for delayed in module._delayed_assattr:
self.delayed_assattr(delayed)
# Visit the transforms
if self._apply_transforms:
module = self._manager.visit_transforms(module)
return module
def _data_build(self, data, modname, path):
"""Build tree node from data and add some informations"""
try:
node = _parse(data + "\n")
except (TypeError, ValueError, SyntaxError) as exc:
raise exceptions.AstroidSyntaxError(
"Parsing Python code failed:\n{error}",
source=data,
modname=modname,
path=path,
error=exc,
) from exc
if path is not None:
node_file = os.path.abspath(path)
else:
node_file = "<?>"
if modname.endswith(".__init__"):
modname = modname[:-9]
package = True
else:
package = (
path is not None
and os.path.splitext(os.path.basename(path))[0] == "__init__"
)
builder = rebuilder.TreeRebuilder(self._manager)
module = builder.visit_module(node, modname, node_file, package)
module._import_from_nodes = builder._import_from_nodes
module._delayed_assattr = builder._delayed_assattr
return module
def add_from_names_to_locals(self, node):
"""Store imported names to the locals
Resort the locals if coming from a delayed node
"""
_key_func = lambda node: node.fromlineno
def sort_locals(my_list):
my_list.sort(key=_key_func)
for (name, asname) in node.names:
if name == "*":
try:
imported = node.do_import_module()
except exceptions.AstroidBuildingError:
continue
for name in imported.public_names():
node.parent.set_local(name, node)
sort_locals(node.parent.scope().locals[name])
else:
node.parent.set_local(asname or name, node)
sort_locals(node.parent.scope().locals[asname or name])
def delayed_assattr(self, node):
"""Visit a AssAttr node
This adds name to locals and handle members definition.
"""
try:
frame = node.frame()
for inferred in node.expr.infer():
if inferred is util.Uninferable:
continue
try:
if inferred.__class__ is bases.Instance:
inferred = inferred._proxied
iattrs = inferred.instance_attrs
if not _can_assign_attr(inferred, node.attrname):
continue
elif isinstance(inferred, bases.Instance):
# Const, Tuple, ... we may be wrong, may be not, but
# anyway we don't want to pollute builtin's namespace
continue
elif inferred.is_function:
iattrs = inferred.instance_attrs
else:
iattrs = inferred.locals
except AttributeError:
# XXX log error
continue
values = iattrs.setdefault(node.attrname, [])
if node in values:
continue
# get assign in __init__ first XXX useful ?
if (
frame.name == "__init__"
and values
and values[0].frame().name != "__init__"
):
values.insert(0, node)
else:
values.append(node)
except exceptions.InferenceError:
pass
def build_namespace_package_module(name, path):
return nodes.Module(name, doc="", path=path, package=True)
def parse(code, module_name="", path=None, apply_transforms=True):
"""Parses a source string in order to obtain an astroid AST from it
:param str code: The code for the module.
:param str module_name: The name for the module, if any
:param str path: The path for the module
:param bool apply_transforms:
Apply the transforms for the give code. Use it if you
don't want the default transforms to be applied.
"""
code = textwrap.dedent(code)
builder = AstroidBuilder(manager=MANAGER, apply_transforms=apply_transforms)
return builder.string_build(code, modname=module_name, path=path)
def _extract_expressions(node):
"""Find expressions in a call to _TRANSIENT_FUNCTION and extract them.
The function walks the AST recursively to search for expressions that
are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an
expression, it completely removes the function call node from the tree,
replacing it by the wrapped expression inside the parent.
:param node: An astroid node.
:type node: astroid.bases.NodeNG
:yields: The sequence of wrapped expressions on the modified tree
expression can be found.
"""
if (
isinstance(node, nodes.Call)
and isinstance(node.func, nodes.Name)
and node.func.name == _TRANSIENT_FUNCTION
):
real_expr = node.args[0]
real_expr.parent = node.parent
# Search for node in all _astng_fields (the fields checked when
# get_children is called) of its parent. Some of those fields may
# be lists or tuples, in which case the elements need to be checked.
# When we find it, replace it by real_expr, so that the AST looks
# like no call to _TRANSIENT_FUNCTION ever took place.
for name in node.parent._astroid_fields:
child = getattr(node.parent, name)
if isinstance(child, (list, tuple)):
for idx, compound_child in enumerate(child):
if compound_child is node:
child[idx] = real_expr
elif child is node:
setattr(node.parent, name, real_expr)
yield real_expr
else:
for child in node.get_children():
yield from _extract_expressions(child)
def _find_statement_by_line(node, line):
"""Extracts the statement on a specific line from an AST.
If the line number of node matches line, it will be returned;
otherwise its children are iterated and the function is called
recursively.
:param node: An astroid node.
:type node: astroid.bases.NodeNG
:param line: The line number of the statement to extract.
:type line: int
:returns: The statement on the line, or None if no statement for the line
can be found.
:rtype: astroid.bases.NodeNG or None
"""
if isinstance(node, (nodes.ClassDef, nodes.FunctionDef)):
# This is an inaccuracy in the AST: the nodes that can be
# decorated do not carry explicit information on which line
# the actual definition (class/def), but .fromline seems to
# be close enough.
node_line = node.fromlineno
else:
node_line = node.lineno
if node_line == line:
return node
for child in node.get_children():
result = _find_statement_by_line(child, line)
if result:
return result
return None
def extract_node(code, module_name=""):
"""Parses some Python code as a module and extracts a designated AST node.
Statements:
To extract one or more statement nodes, append #@ to the end of the line
Examples:
>>> def x():
>>> def y():
>>> return 1 #@
The return statement will be extracted.
>>> class X(object):
>>> def meth(self): #@
>>> pass
The function object 'meth' will be extracted.
Expressions:
To extract arbitrary expressions, surround them with the fake
function call __(...). After parsing, the surrounded expression
will be returned and the whole AST (accessible via the returned
node's parent attribute) will look like the function call was
never there in the first place.
Examples:
>>> a = __(1)
The const node will be extracted.
>>> def x(d=__(foo.bar)): pass
The node containing the default argument will be extracted.
>>> def foo(a, b):
>>> return 0 < __(len(a)) < b
The node containing the function call 'len' will be extracted.
If no statements or expressions are selected, the last toplevel
statement will be returned.
If the selected statement is a discard statement, (i.e. an expression
turned into a statement), the wrapped expression is returned instead.
For convenience, singleton lists are unpacked.
:param str code: A piece of Python code that is parsed as
a module. Will be passed through textwrap.dedent first.
:param str module_name: The name of the module.
:returns: The designated node from the parse tree, or a list of nodes.
:rtype: astroid.bases.NodeNG, or a list of nodes.
"""
def _extract(node):
if isinstance(node, nodes.Expr):
return node.value
return node
requested_lines = []
for idx, line in enumerate(code.splitlines()):
if line.strip().endswith(_STATEMENT_SELECTOR):
requested_lines.append(idx + 1)
tree = parse(code, module_name=module_name)
if not tree.body:
raise ValueError("Empty tree, cannot extract from it")
extracted = []
if requested_lines:
extracted = [_find_statement_by_line(tree, line) for line in requested_lines]
# Modifies the tree.
extracted.extend(_extract_expressions(tree))
if not extracted:
extracted.append(tree.body[-1])
extracted = [_extract(node) for node in extracted]
if len(extracted) == 1:
return extracted[0]
return extracted

View File

@@ -0,0 +1,179 @@
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Various context related utilities, including inference and call contexts."""
import contextlib
import pprint
from typing import Optional
class InferenceContext:
"""Provide context for inference
Store already inferred nodes to save time
Account for already visited nodes to infinite stop infinite recursion
"""
__slots__ = (
"path",
"lookupname",
"callcontext",
"boundnode",
"inferred",
"extra_context",
)
def __init__(self, path=None, inferred=None):
self.path = path or set()
"""
:type: set(tuple(NodeNG, optional(str)))
Path of visited nodes and their lookupname
Currently this key is ``(node, context.lookupname)``
"""
self.lookupname = None
"""
:type: optional[str]
The original name of the node
e.g.
foo = 1
The inference of 'foo' is nodes.Const(1) but the lookup name is 'foo'
"""
self.callcontext = None
"""
:type: optional[CallContext]
The call arguments and keywords for the given context
"""
self.boundnode = None
"""
:type: optional[NodeNG]
The bound node of the given context
e.g. the bound node of object.__new__(cls) is the object node
"""
self.inferred = inferred or {}
"""
:type: dict(seq, seq)
Inferred node contexts to their mapped results
Currently the key is ``(node, lookupname, callcontext, boundnode)``
and the value is tuple of the inferred results
"""
self.extra_context = {}
"""
:type: dict(NodeNG, Context)
Context that needs to be passed down through call stacks
for call arguments
"""
def push(self, node):
"""Push node into inference path
:return: True if node is already in context path else False
:rtype: bool
Allows one to see if the given node has already
been looked at for this inference context"""
name = self.lookupname
if (node, name) in self.path:
return True
self.path.add((node, name))
return False
def clone(self):
"""Clone inference path
For example, each side of a binary operation (BinOp)
starts with the same context but diverge as each side is inferred
so the InferenceContext will need be cloned"""
# XXX copy lookupname/callcontext ?
clone = InferenceContext(self.path, inferred=self.inferred)
clone.callcontext = self.callcontext
clone.boundnode = self.boundnode
clone.extra_context = self.extra_context
return clone
def cache_generator(self, key, generator):
"""Cache result of generator into dictionary
Used to cache inference results"""
results = []
for result in generator:
results.append(result)
yield result
self.inferred[key] = tuple(results)
@contextlib.contextmanager
def restore_path(self):
path = set(self.path)
yield
self.path = path
def __str__(self):
state = (
"%s=%s"
% (field, pprint.pformat(getattr(self, field), width=80 - len(field)))
for field in self.__slots__
)
return "%s(%s)" % (type(self).__name__, ",\n ".join(state))
class CallContext:
"""Holds information for a call site."""
__slots__ = ("args", "keywords")
def __init__(self, args, keywords=None):
"""
:param List[NodeNG] args: Call positional arguments
:param Union[List[nodes.Keyword], None] keywords: Call keywords
"""
self.args = args
if keywords:
keywords = [(arg.arg, arg.value) for arg in keywords]
else:
keywords = []
self.keywords = keywords
def copy_context(context: Optional[InferenceContext]) -> InferenceContext:
"""Clone a context if given, or return a fresh contexxt"""
if context is not None:
return context.clone()
return InferenceContext()
def bind_context_to_node(context, node):
"""Give a context a boundnode
to retrieve the correct function name or attribute value
with from further inference.
Do not use an existing context since the boundnode could then
be incorrectly propagated higher up in the call stack.
:param context: Context to use
:type context: Optional(context)
:param node: Node to do name lookups from
:type node NodeNG:
:returns: A new context
:rtype: InferenceContext
"""
context = copy_context(context)
context.boundnode = node
return context

View File

@@ -0,0 +1,141 @@
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
# Copyright (c) 2018 HoverHell <hoverhell@gmail.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
""" A few useful function/method decorators."""
import functools
import wrapt
from astroid import context as contextmod
from astroid import exceptions
from astroid import util
@wrapt.decorator
def cached(func, instance, args, kwargs):
"""Simple decorator to cache result of method calls without args."""
cache = getattr(instance, "__cache", None)
if cache is None:
instance.__cache = cache = {}
try:
return cache[func]
except KeyError:
cache[func] = result = func(*args, **kwargs)
return result
class cachedproperty:
""" Provides a cached property equivalent to the stacking of
@cached and @property, but more efficient.
After first usage, the <property_name> becomes part of the object's
__dict__. Doing:
del obj.<property_name> empties the cache.
Idea taken from the pyramid_ framework and the mercurial_ project.
.. _pyramid: http://pypi.python.org/pypi/pyramid
.. _mercurial: http://pypi.python.org/pypi/Mercurial
"""
__slots__ = ("wrapped",)
def __init__(self, wrapped):
try:
wrapped.__name__
except AttributeError as exc:
raise TypeError("%s must have a __name__ attribute" % wrapped) from exc
self.wrapped = wrapped
@property
def __doc__(self):
doc = getattr(self.wrapped, "__doc__", None)
return "<wrapped by the cachedproperty decorator>%s" % (
"\n%s" % doc if doc else ""
)
def __get__(self, inst, objtype=None):
if inst is None:
return self
val = self.wrapped(inst)
setattr(inst, self.wrapped.__name__, val)
return val
def path_wrapper(func):
"""return the given infer function wrapped to handle the path
Used to stop inference if the node has already been looked
at for a given `InferenceContext` to prevent infinite recursion
"""
@functools.wraps(func)
def wrapped(node, context=None, _func=func, **kwargs):
"""wrapper function handling context"""
if context is None:
context = contextmod.InferenceContext()
if context.push(node):
return None
yielded = set()
generator = _func(node, context, **kwargs)
try:
while True:
res = next(generator)
# unproxy only true instance, not const, tuple, dict...
if res.__class__.__name__ == "Instance":
ares = res._proxied
else:
ares = res
if ares not in yielded:
yield res
yielded.add(ares)
except StopIteration as error:
if error.args:
return error.args[0]
return None
return wrapped
@wrapt.decorator
def yes_if_nothing_inferred(func, instance, args, kwargs):
generator = func(*args, **kwargs)
try:
yield next(generator)
except StopIteration:
# generator is empty
yield util.Uninferable
return
yield from generator
@wrapt.decorator
def raise_if_nothing_inferred(func, instance, args, kwargs):
generator = func(*args, **kwargs)
try:
yield next(generator)
except StopIteration as error:
# generator is empty
if error.args:
# pylint: disable=not-a-mapping
raise exceptions.InferenceError(**error.args[0])
raise exceptions.InferenceError(
"StopIteration raised without any error information."
)
yield from generator

View File

@@ -0,0 +1,230 @@
# Copyright (c) 2007, 2009-2010, 2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""this module contains exceptions used in the astroid library
"""
from astroid import util
class AstroidError(Exception):
"""base exception class for all astroid related exceptions
AstroidError and its subclasses are structured, intended to hold
objects representing state when the exception is thrown. Field
values are passed to the constructor as keyword-only arguments.
Each subclass has its own set of standard fields, but use your
best judgment to decide whether a specific exception instance
needs more or fewer fields for debugging. Field values may be
used to lazily generate the error message: self.message.format()
will be called with the field names and values supplied as keyword
arguments.
"""
def __init__(self, message="", **kws):
super(AstroidError, self).__init__(message)
self.message = message
for key, value in kws.items():
setattr(self, key, value)
def __str__(self):
return self.message.format(**vars(self))
class AstroidBuildingError(AstroidError):
"""exception class when we are unable to build an astroid representation
Standard attributes:
modname: Name of the module that AST construction failed for.
error: Exception raised during construction.
"""
def __init__(self, message="Failed to import module {modname}.", **kws):
super(AstroidBuildingError, self).__init__(message, **kws)
class AstroidImportError(AstroidBuildingError):
"""Exception class used when a module can't be imported by astroid."""
class TooManyLevelsError(AstroidImportError):
"""Exception class which is raised when a relative import was beyond the top-level.
Standard attributes:
level: The level which was attempted.
name: the name of the module on which the relative import was attempted.
"""
level = None
name = None
def __init__(
self,
message="Relative import with too many levels " "({level}) for module {name!r}",
**kws
):
super(TooManyLevelsError, self).__init__(message, **kws)
class AstroidSyntaxError(AstroidBuildingError):
"""Exception class used when a module can't be parsed."""
class NoDefault(AstroidError):
"""raised by function's `default_value` method when an argument has
no default value
Standard attributes:
func: Function node.
name: Name of argument without a default.
"""
func = None
name = None
def __init__(self, message="{func!r} has no default for {name!r}.", **kws):
super(NoDefault, self).__init__(message, **kws)
class ResolveError(AstroidError):
"""Base class of astroid resolution/inference error.
ResolveError is not intended to be raised.
Standard attributes:
context: InferenceContext object.
"""
context = None
class MroError(ResolveError):
"""Error raised when there is a problem with method resolution of a class.
Standard attributes:
mros: A sequence of sequences containing ClassDef nodes.
cls: ClassDef node whose MRO resolution failed.
context: InferenceContext object.
"""
mros = ()
cls = None
def __str__(self):
mro_names = ", ".join(
"({})".format(", ".join(b.name for b in m)) for m in self.mros
)
return self.message.format(mros=mro_names, cls=self.cls)
class DuplicateBasesError(MroError):
"""Error raised when there are duplicate bases in the same class bases."""
class InconsistentMroError(MroError):
"""Error raised when a class's MRO is inconsistent."""
class SuperError(ResolveError):
"""Error raised when there is a problem with a *super* call.
Standard attributes:
*super_*: The Super instance that raised the exception.
context: InferenceContext object.
"""
super_ = None
def __str__(self):
return self.message.format(**vars(self.super_))
class InferenceError(ResolveError):
"""raised when we are unable to infer a node
Standard attributes:
node: The node inference was called on.
context: InferenceContext object.
"""
node = None
context = None
def __init__(self, message="Inference failed for {node!r}.", **kws):
super(InferenceError, self).__init__(message, **kws)
# Why does this inherit from InferenceError rather than ResolveError?
# Changing it causes some inference tests to fail.
class NameInferenceError(InferenceError):
"""Raised when a name lookup fails, corresponds to NameError.
Standard attributes:
name: The name for which lookup failed, as a string.
scope: The node representing the scope in which the lookup occurred.
context: InferenceContext object.
"""
name = None
scope = None
def __init__(self, message="{name!r} not found in {scope!r}.", **kws):
super(NameInferenceError, self).__init__(message, **kws)
class AttributeInferenceError(ResolveError):
"""Raised when an attribute lookup fails, corresponds to AttributeError.
Standard attributes:
target: The node for which lookup failed.
attribute: The attribute for which lookup failed, as a string.
context: InferenceContext object.
"""
target = None
attribute = None
def __init__(self, message="{attribute!r} not found on {target!r}.", **kws):
super(AttributeInferenceError, self).__init__(message, **kws)
class UseInferenceDefault(Exception):
"""exception to be raised in custom inference function to indicate that it
should go back to the default behaviour
"""
class _NonDeducibleTypeHierarchy(Exception):
"""Raised when is_subtype / is_supertype can't deduce the relation between two types."""
class AstroidIndexError(AstroidError):
"""Raised when an Indexable / Mapping does not have an index / key."""
class AstroidTypeError(AstroidError):
"""Raised when a TypeError would be expected in Python code."""
class InferenceOverwriteError(AstroidError):
"""Raised when an inference tip is overwritten
Currently only used for debugging.
"""
# Backwards-compatibility aliases
OperationError = util.BadOperationMessage
UnaryOperationError = util.BadUnaryOperationMessage
BinaryOperationError = util.BadBinaryOperationMessage
SuperArgumentTypeError = SuperError
UnresolvableName = NameInferenceError
NotFoundError = AttributeInferenceError
AstroidBuildingException = AstroidBuildingError

View File

@@ -0,0 +1,272 @@
# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""
Various helper utilities.
"""
import builtins as builtins_mod
from astroid import bases
from astroid import context as contextmod
from astroid import exceptions
from astroid import manager
from astroid import nodes
from astroid import raw_building
from astroid import scoped_nodes
from astroid import util
BUILTINS = builtins_mod.__name__
def _build_proxy_class(cls_name, builtins):
proxy = raw_building.build_class(cls_name)
proxy.parent = builtins
return proxy
def _function_type(function, builtins):
if isinstance(function, scoped_nodes.Lambda):
if function.root().name == BUILTINS:
cls_name = "builtin_function_or_method"
else:
cls_name = "function"
elif isinstance(function, bases.BoundMethod):
cls_name = "method"
elif isinstance(function, bases.UnboundMethod):
cls_name = "function"
return _build_proxy_class(cls_name, builtins)
def _object_type(node, context=None):
astroid_manager = manager.AstroidManager()
builtins = astroid_manager.builtins_module
context = context or contextmod.InferenceContext()
for inferred in node.infer(context=context):
if isinstance(inferred, scoped_nodes.ClassDef):
if inferred.newstyle:
metaclass = inferred.metaclass()
if metaclass:
yield metaclass
continue
yield builtins.getattr("type")[0]
elif isinstance(inferred, (scoped_nodes.Lambda, bases.UnboundMethod)):
yield _function_type(inferred, builtins)
elif isinstance(inferred, scoped_nodes.Module):
yield _build_proxy_class("module", builtins)
else:
yield inferred._proxied
def object_type(node, context=None):
"""Obtain the type of the given node
This is used to implement the ``type`` builtin, which means that it's
used for inferring type calls, as well as used in a couple of other places
in the inference.
The node will be inferred first, so this function can support all
sorts of objects, as long as they support inference.
"""
try:
types = set(_object_type(node, context))
except exceptions.InferenceError:
return util.Uninferable
if len(types) > 1 or not types:
return util.Uninferable
return list(types)[0]
def _object_type_is_subclass(obj_type, class_or_seq, context=None):
if not isinstance(class_or_seq, (tuple, list)):
class_seq = (class_or_seq,)
else:
class_seq = class_or_seq
if obj_type is util.Uninferable:
return util.Uninferable
# Instances are not types
class_seq = [
item if not isinstance(item, bases.Instance) else util.Uninferable
for item in class_seq
]
# strict compatibility with issubclass
# issubclass(type, (object, 1)) evaluates to true
# issubclass(object, (1, type)) raises TypeError
for klass in class_seq:
if klass is util.Uninferable:
raise exceptions.AstroidTypeError("arg 2 must be a type or tuple of types")
for obj_subclass in obj_type.mro():
if obj_subclass == klass:
return True
return False
def object_isinstance(node, class_or_seq, context=None):
"""Check if a node 'isinstance' any node in class_or_seq
:param node: A given node
:param class_or_seq: Union[nodes.NodeNG, Sequence[nodes.NodeNG]]
:rtype: bool
:raises AstroidTypeError: if the given ``classes_or_seq`` are not types
"""
obj_type = object_type(node, context)
if obj_type is util.Uninferable:
return util.Uninferable
return _object_type_is_subclass(obj_type, class_or_seq, context=context)
def object_issubclass(node, class_or_seq, context=None):
"""Check if a type is a subclass of any node in class_or_seq
:param node: A given node
:param class_or_seq: Union[Nodes.NodeNG, Sequence[nodes.NodeNG]]
:rtype: bool
:raises AstroidTypeError: if the given ``classes_or_seq`` are not types
:raises AstroidError: if the type of the given node cannot be inferred
or its type's mro doesn't work
"""
if not isinstance(node, nodes.ClassDef):
raise TypeError("{node} needs to be a ClassDef node".format(node=node))
return _object_type_is_subclass(node, class_or_seq, context=context)
def safe_infer(node, context=None):
"""Return the inferred value for the given node.
Return None if inference failed or if there is some ambiguity (more than
one node has been inferred).
"""
try:
inferit = node.infer(context=context)
value = next(inferit)
except exceptions.InferenceError:
return None
try:
next(inferit)
return None # None if there is ambiguity on the inferred node
except exceptions.InferenceError:
return None # there is some kind of ambiguity
except StopIteration:
return value
def has_known_bases(klass, context=None):
"""Return true if all base classes of a class could be inferred."""
try:
return klass._all_bases_known
except AttributeError:
pass
for base in klass.bases:
result = safe_infer(base, context=context)
# TODO: check for A->B->A->B pattern in class structure too?
if (
not isinstance(result, scoped_nodes.ClassDef)
or result is klass
or not has_known_bases(result, context=context)
):
klass._all_bases_known = False
return False
klass._all_bases_known = True
return True
def _type_check(type1, type2):
if not all(map(has_known_bases, (type1, type2))):
raise exceptions._NonDeducibleTypeHierarchy
if not all([type1.newstyle, type2.newstyle]):
return False
try:
return type1 in type2.mro()[:-1]
except exceptions.MroError:
# The MRO is invalid.
raise exceptions._NonDeducibleTypeHierarchy
def is_subtype(type1, type2):
"""Check if *type1* is a subtype of *typ2*."""
return _type_check(type2, type1)
def is_supertype(type1, type2):
"""Check if *type2* is a supertype of *type1*."""
return _type_check(type1, type2)
def class_instance_as_index(node):
"""Get the value as an index for the given instance.
If an instance provides an __index__ method, then it can
be used in some scenarios where an integer is expected,
for instance when multiplying or subscripting a list.
"""
context = contextmod.InferenceContext()
context.callcontext = contextmod.CallContext(args=[node])
try:
for inferred in node.igetattr("__index__", context=context):
if not isinstance(inferred, bases.BoundMethod):
continue
for result in inferred.infer_call_result(node, context=context):
if isinstance(result, nodes.Const) and isinstance(result.value, int):
return result
except exceptions.InferenceError:
pass
return None
def object_len(node, context=None):
"""Infer length of given node object
:param Union[nodes.ClassDef, nodes.Instance] node:
:param node: Node to infer length of
:raises AstroidTypeError: If an invalid node is returned
from __len__ method or no __len__ method exists
:raises InferenceError: If the given node cannot be inferred
or if multiple nodes are inferred
:rtype int: Integer length of node
"""
from astroid.objects import FrozenSet
inferred_node = safe_infer(node, context=context)
if inferred_node is None or inferred_node is util.Uninferable:
raise exceptions.InferenceError(node=node)
if isinstance(inferred_node, nodes.Const) and isinstance(
inferred_node.value, (bytes, str)
):
return len(inferred_node.value)
if isinstance(inferred_node, (nodes.List, nodes.Set, nodes.Tuple, FrozenSet)):
return len(inferred_node.elts)
if isinstance(inferred_node, nodes.Dict):
return len(inferred_node.items)
try:
node_type = object_type(inferred_node, context=context)
len_call = next(node_type.igetattr("__len__", context=context))
except exceptions.AttributeInferenceError:
raise exceptions.AstroidTypeError(
"object of type '{}' has no len()".format(len_call.pytype())
)
result_of_len = next(len_call.infer_call_result(node, context))
if (
isinstance(result_of_len, nodes.Const)
and result_of_len.pytype() == "builtins.int"
):
return result_of_len.value
raise exceptions.AstroidTypeError(
"'{}' object cannot be interpreted as an integer".format(result_of_len)
)

View File

@@ -0,0 +1,899 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
# Copyright (c) 2013-2014 Google, Inc.
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
# Copyright (c) 2017 Michał Masłowski <m.maslowski@clearcode.cc>
# Copyright (c) 2017 Calen Pennington <cale@edx.org>
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
# Copyright (c) 2018 HoverHell <hoverhell@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""this module contains a set of functions to handle inference on astroid trees
"""
import functools
import itertools
import operator
from astroid import bases
from astroid import context as contextmod
from astroid import exceptions
from astroid import decorators
from astroid import helpers
from astroid import manager
from astroid import nodes
from astroid.interpreter import dunder_lookup
from astroid import protocols
from astroid import util
MANAGER = manager.AstroidManager()
# .infer method ###############################################################
def infer_end(self, context=None):
"""inference's end for node such as Module, ClassDef, FunctionDef,
Const...
"""
yield self
nodes.Module._infer = infer_end
nodes.ClassDef._infer = infer_end
nodes.FunctionDef._infer = infer_end
nodes.Lambda._infer = infer_end
nodes.Const._infer = infer_end
nodes.Slice._infer = infer_end
def _infer_sequence_helper(node, context=None):
"""Infer all values based on _BaseContainer.elts"""
values = []
for elt in node.elts:
if isinstance(elt, nodes.Starred):
starred = helpers.safe_infer(elt.value, context)
if not starred:
raise exceptions.InferenceError(node=node, context=context)
if not hasattr(starred, "elts"):
raise exceptions.InferenceError(node=node, context=context)
values.extend(_infer_sequence_helper(starred))
else:
values.append(elt)
return values
@decorators.raise_if_nothing_inferred
def infer_sequence(self, context=None):
if not any(isinstance(e, nodes.Starred) for e in self.elts):
yield self
else:
values = _infer_sequence_helper(self, context)
new_seq = type(self)(
lineno=self.lineno, col_offset=self.col_offset, parent=self.parent
)
new_seq.postinit(values)
yield new_seq
nodes.List._infer = infer_sequence
nodes.Tuple._infer = infer_sequence
nodes.Set._infer = infer_sequence
def infer_map(self, context=None):
if not any(isinstance(k, nodes.DictUnpack) for k, _ in self.items):
yield self
else:
items = _infer_map(self, context)
new_seq = type(self)(self.lineno, self.col_offset, self.parent)
new_seq.postinit(list(items.items()))
yield new_seq
def _update_with_replacement(lhs_dict, rhs_dict):
"""Delete nodes that equate to duplicate keys
Since an astroid node doesn't 'equal' another node with the same value,
this function uses the as_string method to make sure duplicate keys
don't get through
Note that both the key and the value are astroid nodes
Fixes issue with DictUnpack causing duplicte keys
in inferred Dict items
:param dict(nodes.NodeNG, nodes.NodeNG) lhs_dict: Dictionary to 'merge' nodes into
:param dict(nodes.NodeNG, nodes.NodeNG) rhs_dict: Dictionary with nodes to pull from
:return dict(nodes.NodeNG, nodes.NodeNG): merged dictionary of nodes
"""
combined_dict = itertools.chain(lhs_dict.items(), rhs_dict.items())
# Overwrite keys which have the same string values
string_map = {key.as_string(): (key, value) for key, value in combined_dict}
# Return to dictionary
return dict(string_map.values())
def _infer_map(node, context):
"""Infer all values based on Dict.items"""
values = {}
for name, value in node.items:
if isinstance(name, nodes.DictUnpack):
double_starred = helpers.safe_infer(value, context)
if not double_starred:
raise exceptions.InferenceError
if not isinstance(double_starred, nodes.Dict):
raise exceptions.InferenceError(node=node, context=context)
unpack_items = _infer_map(double_starred, context)
values = _update_with_replacement(values, unpack_items)
else:
key = helpers.safe_infer(name, context=context)
value = helpers.safe_infer(value, context=context)
if any(not elem for elem in (key, value)):
raise exceptions.InferenceError(node=node, context=context)
values = _update_with_replacement(values, {key: value})
return values
nodes.Dict._infer = infer_map
def _higher_function_scope(node):
""" Search for the first function which encloses the given
scope. This can be used for looking up in that function's
scope, in case looking up in a lower scope for a particular
name fails.
:param node: A scope node.
:returns:
``None``, if no parent function scope was found,
otherwise an instance of :class:`astroid.scoped_nodes.Function`,
which encloses the given node.
"""
current = node
while current.parent and not isinstance(current.parent, nodes.FunctionDef):
current = current.parent
if current and current.parent:
return current.parent
return None
def infer_name(self, context=None):
"""infer a Name: use name lookup rules"""
frame, stmts = self.lookup(self.name)
if not stmts:
# Try to see if the name is enclosed in a nested function
# and use the higher (first function) scope for searching.
parent_function = _higher_function_scope(self.scope())
if parent_function:
_, stmts = parent_function.lookup(self.name)
if not stmts:
raise exceptions.NameInferenceError(
name=self.name, scope=self.scope(), context=context
)
context = contextmod.copy_context(context)
context.lookupname = self.name
return bases._infer_stmts(stmts, context, frame)
# pylint: disable=no-value-for-parameter
nodes.Name._infer = decorators.raise_if_nothing_inferred(
decorators.path_wrapper(infer_name)
)
nodes.AssignName.infer_lhs = infer_name # won't work with a path wrapper
@decorators.raise_if_nothing_inferred
@decorators.path_wrapper
def infer_call(self, context=None):
"""infer a Call node by trying to guess what the function returns"""
callcontext = contextmod.copy_context(context)
callcontext.callcontext = contextmod.CallContext(
args=self.args, keywords=self.keywords
)
callcontext.boundnode = None
if context is not None:
callcontext.extra_context = _populate_context_lookup(self, context.clone())
for callee in self.func.infer(context):
if callee is util.Uninferable:
yield callee
continue
try:
if hasattr(callee, "infer_call_result"):
yield from callee.infer_call_result(caller=self, context=callcontext)
except exceptions.InferenceError:
continue
return dict(node=self, context=context)
nodes.Call._infer = infer_call
@decorators.raise_if_nothing_inferred
@decorators.path_wrapper
def infer_import(self, context=None, asname=True):
"""infer an Import node: return the imported module/object"""
name = context.lookupname
if name is None:
raise exceptions.InferenceError(node=self, context=context)
try:
if asname:
yield self.do_import_module(self.real_name(name))
else:
yield self.do_import_module(name)
except exceptions.AstroidBuildingError as exc:
raise exceptions.InferenceError(node=self, context=context) from exc
nodes.Import._infer = infer_import
@decorators.raise_if_nothing_inferred
@decorators.path_wrapper
def infer_import_from(self, context=None, asname=True):
"""infer a ImportFrom node: return the imported module/object"""
name = context.lookupname
if name is None:
raise exceptions.InferenceError(node=self, context=context)
if asname:
name = self.real_name(name)
try:
module = self.do_import_module()
except exceptions.AstroidBuildingError as exc:
raise exceptions.InferenceError(node=self, context=context) from exc
try:
context = contextmod.copy_context(context)
context.lookupname = name
stmts = module.getattr(name, ignore_locals=module is self.root())
return bases._infer_stmts(stmts, context)
except exceptions.AttributeInferenceError as error:
raise exceptions.InferenceError(
error.message, target=self, attribute=name, context=context
) from error
nodes.ImportFrom._infer = infer_import_from
def infer_attribute(self, context=None):
"""infer an Attribute node by using getattr on the associated object"""
for owner in self.expr.infer(context):
if owner is util.Uninferable:
yield owner
continue
if context and context.boundnode:
# This handles the situation where the attribute is accessed through a subclass
# of a base class and the attribute is defined at the base class's level,
# by taking in consideration a redefinition in the subclass.
if isinstance(owner, bases.Instance) and isinstance(
context.boundnode, bases.Instance
):
try:
if helpers.is_subtype(
helpers.object_type(context.boundnode),
helpers.object_type(owner),
):
owner = context.boundnode
except exceptions._NonDeducibleTypeHierarchy:
# Can't determine anything useful.
pass
try:
context.boundnode = owner
yield from owner.igetattr(self.attrname, context)
context.boundnode = None
except (exceptions.AttributeInferenceError, exceptions.InferenceError):
context.boundnode = None
except AttributeError:
# XXX method / function
context.boundnode = None
return dict(node=self, context=context)
nodes.Attribute._infer = decorators.raise_if_nothing_inferred(
decorators.path_wrapper(infer_attribute)
)
# won't work with a path wrapper
nodes.AssignAttr.infer_lhs = decorators.raise_if_nothing_inferred(infer_attribute)
@decorators.raise_if_nothing_inferred
@decorators.path_wrapper
def infer_global(self, context=None):
if context.lookupname is None:
raise exceptions.InferenceError(node=self, context=context)
try:
return bases._infer_stmts(self.root().getattr(context.lookupname), context)
except exceptions.AttributeInferenceError as error:
raise exceptions.InferenceError(
error.message, target=self, attribute=context.lookupname, context=context
) from error
nodes.Global._infer = infer_global
_SUBSCRIPT_SENTINEL = object()
@decorators.raise_if_nothing_inferred
def infer_subscript(self, context=None):
"""Inference for subscripts
We're understanding if the index is a Const
or a slice, passing the result of inference
to the value's `getitem` method, which should
handle each supported index type accordingly.
"""
found_one = False
for value in self.value.infer(context):
if value is util.Uninferable:
yield util.Uninferable
return None
for index in self.slice.infer(context):
if index is util.Uninferable:
yield util.Uninferable
return None
# Try to deduce the index value.
index_value = _SUBSCRIPT_SENTINEL
if value.__class__ == bases.Instance:
index_value = index
else:
if index.__class__ == bases.Instance:
instance_as_index = helpers.class_instance_as_index(index)
if instance_as_index:
index_value = instance_as_index
else:
index_value = index
if index_value is _SUBSCRIPT_SENTINEL:
raise exceptions.InferenceError(node=self, context=context)
try:
assigned = value.getitem(index_value, context)
except (
exceptions.AstroidTypeError,
exceptions.AstroidIndexError,
exceptions.AttributeInferenceError,
AttributeError,
) as exc:
raise exceptions.InferenceError(node=self, context=context) from exc
# Prevent inferring if the inferred subscript
# is the same as the original subscripted object.
if self is assigned or assigned is util.Uninferable:
yield util.Uninferable
return None
yield from assigned.infer(context)
found_one = True
if found_one:
return dict(node=self, context=context)
return None
nodes.Subscript._infer = decorators.path_wrapper(infer_subscript)
nodes.Subscript.infer_lhs = infer_subscript
@decorators.raise_if_nothing_inferred
@decorators.path_wrapper
def _infer_boolop(self, context=None):
"""Infer a boolean operation (and / or / not).
The function will calculate the boolean operation
for all pairs generated through inference for each component
node.
"""
values = self.values
if self.op == "or":
predicate = operator.truth
else:
predicate = operator.not_
try:
values = [value.infer(context=context) for value in values]
except exceptions.InferenceError:
yield util.Uninferable
return None
for pair in itertools.product(*values):
if any(item is util.Uninferable for item in pair):
# Can't infer the final result, just yield Uninferable.
yield util.Uninferable
continue
bool_values = [item.bool_value() for item in pair]
if any(item is util.Uninferable for item in bool_values):
# Can't infer the final result, just yield Uninferable.
yield util.Uninferable
continue
# Since the boolean operations are short circuited operations,
# this code yields the first value for which the predicate is True
# and if no value respected the predicate, then the last value will
# be returned (or Uninferable if there was no last value).
# This is conforming to the semantics of `and` and `or`:
# 1 and 0 -> 1
# 0 and 1 -> 0
# 1 or 0 -> 1
# 0 or 1 -> 1
value = util.Uninferable
for value, bool_value in zip(pair, bool_values):
if predicate(bool_value):
yield value
break
else:
yield value
return dict(node=self, context=context)
nodes.BoolOp._infer = _infer_boolop
# UnaryOp, BinOp and AugAssign inferences
def _filter_operation_errors(self, infer_callable, context, error):
for result in infer_callable(self, context):
if isinstance(result, error):
# For the sake of .infer(), we don't care about operation
# errors, which is the job of pylint. So return something
# which shows that we can't infer the result.
yield util.Uninferable
else:
yield result
def _infer_unaryop(self, context=None):
"""Infer what an UnaryOp should return when evaluated."""
for operand in self.operand.infer(context):
try:
yield operand.infer_unary_op(self.op)
except TypeError as exc:
# The operand doesn't support this operation.
yield util.BadUnaryOperationMessage(operand, self.op, exc)
except AttributeError as exc:
meth = protocols.UNARY_OP_METHOD[self.op]
if meth is None:
# `not node`. Determine node's boolean
# value and negate its result, unless it is
# Uninferable, which will be returned as is.
bool_value = operand.bool_value()
if bool_value is not util.Uninferable:
yield nodes.const_factory(not bool_value)
else:
yield util.Uninferable
else:
if not isinstance(operand, (bases.Instance, nodes.ClassDef)):
# The operation was used on something which
# doesn't support it.
yield util.BadUnaryOperationMessage(operand, self.op, exc)
continue
try:
try:
methods = dunder_lookup.lookup(operand, meth)
except exceptions.AttributeInferenceError:
yield util.BadUnaryOperationMessage(operand, self.op, exc)
continue
meth = methods[0]
inferred = next(meth.infer(context=context))
if inferred is util.Uninferable or not inferred.callable():
continue
context = contextmod.copy_context(context)
context.callcontext = contextmod.CallContext(args=[operand])
call_results = inferred.infer_call_result(self, context=context)
result = next(call_results, None)
if result is None:
# Failed to infer, return the same type.
yield operand
else:
yield result
except exceptions.AttributeInferenceError as exc:
# The unary operation special method was not found.
yield util.BadUnaryOperationMessage(operand, self.op, exc)
except exceptions.InferenceError:
yield util.Uninferable
@decorators.raise_if_nothing_inferred
@decorators.path_wrapper
def infer_unaryop(self, context=None):
"""Infer what an UnaryOp should return when evaluated."""
yield from _filter_operation_errors(
self, _infer_unaryop, context, util.BadUnaryOperationMessage
)
return dict(node=self, context=context)
nodes.UnaryOp._infer_unaryop = _infer_unaryop
nodes.UnaryOp._infer = infer_unaryop
def _is_not_implemented(const):
"""Check if the given const node is NotImplemented."""
return isinstance(const, nodes.Const) and const.value is NotImplemented
def _invoke_binop_inference(instance, opnode, op, other, context, method_name):
"""Invoke binary operation inference on the given instance."""
methods = dunder_lookup.lookup(instance, method_name)
context = contextmod.bind_context_to_node(context, instance)
method = methods[0]
inferred = next(method.infer(context=context))
if inferred is util.Uninferable:
raise exceptions.InferenceError
return instance.infer_binary_op(opnode, op, other, context, inferred)
def _aug_op(instance, opnode, op, other, context, reverse=False):
"""Get an inference callable for an augmented binary operation."""
method_name = protocols.AUGMENTED_OP_METHOD[op]
return functools.partial(
_invoke_binop_inference,
instance=instance,
op=op,
opnode=opnode,
other=other,
context=context,
method_name=method_name,
)
def _bin_op(instance, opnode, op, other, context, reverse=False):
"""Get an inference callable for a normal binary operation.
If *reverse* is True, then the reflected method will be used instead.
"""
if reverse:
method_name = protocols.REFLECTED_BIN_OP_METHOD[op]
else:
method_name = protocols.BIN_OP_METHOD[op]
return functools.partial(
_invoke_binop_inference,
instance=instance,
op=op,
opnode=opnode,
other=other,
context=context,
method_name=method_name,
)
def _get_binop_contexts(context, left, right):
"""Get contexts for binary operations.
This will return two inference contexts, the first one
for x.__op__(y), the other one for y.__rop__(x), where
only the arguments are inversed.
"""
# The order is important, since the first one should be
# left.__op__(right).
for arg in (right, left):
new_context = context.clone()
new_context.callcontext = contextmod.CallContext(args=[arg])
new_context.boundnode = None
yield new_context
def _same_type(type1, type2):
"""Check if type1 is the same as type2."""
return type1.qname() == type2.qname()
def _get_binop_flow(
left, left_type, binary_opnode, right, right_type, context, reverse_context
):
"""Get the flow for binary operations.
The rules are a bit messy:
* if left and right have the same type, then only one
method will be called, left.__op__(right)
* if left and right are unrelated typewise, then first
left.__op__(right) is tried and if this does not exist
or returns NotImplemented, then right.__rop__(left) is tried.
* if left is a subtype of right, then only left.__op__(right)
is tried.
* if left is a supertype of right, then right.__rop__(left)
is first tried and then left.__op__(right)
"""
op = binary_opnode.op
if _same_type(left_type, right_type):
methods = [_bin_op(left, binary_opnode, op, right, context)]
elif helpers.is_subtype(left_type, right_type):
methods = [_bin_op(left, binary_opnode, op, right, context)]
elif helpers.is_supertype(left_type, right_type):
methods = [
_bin_op(right, binary_opnode, op, left, reverse_context, reverse=True),
_bin_op(left, binary_opnode, op, right, context),
]
else:
methods = [
_bin_op(left, binary_opnode, op, right, context),
_bin_op(right, binary_opnode, op, left, reverse_context, reverse=True),
]
return methods
def _get_aug_flow(
left, left_type, aug_opnode, right, right_type, context, reverse_context
):
"""Get the flow for augmented binary operations.
The rules are a bit messy:
* if left and right have the same type, then left.__augop__(right)
is first tried and then left.__op__(right).
* if left and right are unrelated typewise, then
left.__augop__(right) is tried, then left.__op__(right)
is tried and then right.__rop__(left) is tried.
* if left is a subtype of right, then left.__augop__(right)
is tried and then left.__op__(right).
* if left is a supertype of right, then left.__augop__(right)
is tried, then right.__rop__(left) and then
left.__op__(right)
"""
bin_op = aug_opnode.op.strip("=")
aug_op = aug_opnode.op
if _same_type(left_type, right_type):
methods = [
_aug_op(left, aug_opnode, aug_op, right, context),
_bin_op(left, aug_opnode, bin_op, right, context),
]
elif helpers.is_subtype(left_type, right_type):
methods = [
_aug_op(left, aug_opnode, aug_op, right, context),
_bin_op(left, aug_opnode, bin_op, right, context),
]
elif helpers.is_supertype(left_type, right_type):
methods = [
_aug_op(left, aug_opnode, aug_op, right, context),
_bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True),
_bin_op(left, aug_opnode, bin_op, right, context),
]
else:
methods = [
_aug_op(left, aug_opnode, aug_op, right, context),
_bin_op(left, aug_opnode, bin_op, right, context),
_bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True),
]
return methods
def _infer_binary_operation(left, right, binary_opnode, context, flow_factory):
"""Infer a binary operation between a left operand and a right operand
This is used by both normal binary operations and augmented binary
operations, the only difference is the flow factory used.
"""
context, reverse_context = _get_binop_contexts(context, left, right)
left_type = helpers.object_type(left)
right_type = helpers.object_type(right)
methods = flow_factory(
left, left_type, binary_opnode, right, right_type, context, reverse_context
)
for method in methods:
try:
results = list(method())
except AttributeError:
continue
except exceptions.AttributeInferenceError:
continue
except exceptions.InferenceError:
yield util.Uninferable
return
else:
if any(result is util.Uninferable for result in results):
yield util.Uninferable
return
if all(map(_is_not_implemented, results)):
continue
not_implemented = sum(
1 for result in results if _is_not_implemented(result)
)
if not_implemented and not_implemented != len(results):
# Can't infer yet what this is.
yield util.Uninferable
return
yield from results
return
# The operation doesn't seem to be supported so let the caller know about it
yield util.BadBinaryOperationMessage(left_type, binary_opnode.op, right_type)
def _infer_binop(self, context):
"""Binary operation inference logic."""
left = self.left
right = self.right
# we use two separate contexts for evaluating lhs and rhs because
# 1. evaluating lhs may leave some undesired entries in context.path
# which may not let us infer right value of rhs
context = context or contextmod.InferenceContext()
lhs_context = contextmod.copy_context(context)
rhs_context = contextmod.copy_context(context)
lhs_iter = left.infer(context=lhs_context)
rhs_iter = right.infer(context=rhs_context)
for lhs, rhs in itertools.product(lhs_iter, rhs_iter):
if any(value is util.Uninferable for value in (rhs, lhs)):
# Don't know how to process this.
yield util.Uninferable
return
try:
yield from _infer_binary_operation(lhs, rhs, self, context, _get_binop_flow)
except exceptions._NonDeducibleTypeHierarchy:
yield util.Uninferable
@decorators.yes_if_nothing_inferred
@decorators.path_wrapper
def infer_binop(self, context=None):
return _filter_operation_errors(
self, _infer_binop, context, util.BadBinaryOperationMessage
)
nodes.BinOp._infer_binop = _infer_binop
nodes.BinOp._infer = infer_binop
def _infer_augassign(self, context=None):
"""Inference logic for augmented binary operations."""
if context is None:
context = contextmod.InferenceContext()
rhs_context = context.clone()
lhs_iter = self.target.infer_lhs(context=context)
rhs_iter = self.value.infer(context=rhs_context)
for lhs, rhs in itertools.product(lhs_iter, rhs_iter):
if any(value is util.Uninferable for value in (rhs, lhs)):
# Don't know how to process this.
yield util.Uninferable
return
try:
yield from _infer_binary_operation(
left=lhs,
right=rhs,
binary_opnode=self,
context=context,
flow_factory=_get_aug_flow,
)
except exceptions._NonDeducibleTypeHierarchy:
yield util.Uninferable
@decorators.raise_if_nothing_inferred
@decorators.path_wrapper
def infer_augassign(self, context=None):
return _filter_operation_errors(
self, _infer_augassign, context, util.BadBinaryOperationMessage
)
nodes.AugAssign._infer_augassign = _infer_augassign
nodes.AugAssign._infer = infer_augassign
# End of binary operation inference.
@decorators.raise_if_nothing_inferred
def infer_arguments(self, context=None):
name = context.lookupname
if name is None:
raise exceptions.InferenceError(node=self, context=context)
return protocols._arguments_infer_argname(self, name, context)
nodes.Arguments._infer = infer_arguments
@decorators.raise_if_nothing_inferred
@decorators.path_wrapper
def infer_assign(self, context=None):
"""infer a AssignName/AssignAttr: need to inspect the RHS part of the
assign node
"""
stmt = self.statement()
if isinstance(stmt, nodes.AugAssign):
return stmt.infer(context)
stmts = list(self.assigned_stmts(context=context))
return bases._infer_stmts(stmts, context)
nodes.AssignName._infer = infer_assign
nodes.AssignAttr._infer = infer_assign
@decorators.raise_if_nothing_inferred
@decorators.path_wrapper
def infer_empty_node(self, context=None):
if not self.has_underlying_object():
yield util.Uninferable
else:
try:
yield from MANAGER.infer_ast_from_something(self.object, context=context)
except exceptions.AstroidError:
yield util.Uninferable
nodes.EmptyNode._infer = infer_empty_node
@decorators.raise_if_nothing_inferred
def infer_index(self, context=None):
return self.value.infer(context)
nodes.Index._infer = infer_index
# TODO: move directly into bases.Instance when the dependency hell
# will be solved.
def instance_getitem(self, index, context=None):
# Rewrap index to Const for this case
new_context = contextmod.bind_context_to_node(context, self)
if not context:
context = new_context
# Create a new callcontext for providing index as an argument.
new_context.callcontext = contextmod.CallContext(args=[index])
method = next(self.igetattr("__getitem__", context=context), None)
if not isinstance(method, bases.BoundMethod):
raise exceptions.InferenceError(
"Could not find __getitem__ for {node!r}.", node=self, context=context
)
return next(method.infer_call_result(self, new_context))
bases.Instance.getitem = instance_getitem
def _populate_context_lookup(call, context):
# Allows context to be saved for later
# for inference inside a function
context_lookup = {}
if context is None:
return context_lookup
for arg in call.args:
if isinstance(arg, nodes.Starred):
context_lookup[arg.value] = context
else:
context_lookup[arg] = context
keywords = call.keywords if call.keywords is not None else []
for keyword in keywords:
context_lookup[keyword.value] = context
return context_lookup

View File

@@ -0,0 +1,337 @@
# Copyright (c) 2016-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2017 Chris Philip <chrisp533@gmail.com>
# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
# Copyright (c) 2017 ioanatia <ioanatia@users.noreply.github.com>
# Copyright (c) 2017 Calen Pennington <cale@edx.org>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
import abc
import collections
import enum
import imp
import os
import sys
import zipimport
try:
import importlib.machinery
_HAS_MACHINERY = True
except ImportError:
_HAS_MACHINERY = False
try:
from functools import lru_cache
except ImportError:
from backports.functools_lru_cache import lru_cache
from . import util
ModuleType = enum.Enum(
"ModuleType",
"C_BUILTIN C_EXTENSION PKG_DIRECTORY "
"PY_CODERESOURCE PY_COMPILED PY_FROZEN PY_RESOURCE "
"PY_SOURCE PY_ZIPMODULE PY_NAMESPACE",
)
_ImpTypes = {
imp.C_BUILTIN: ModuleType.C_BUILTIN,
imp.C_EXTENSION: ModuleType.C_EXTENSION,
imp.PKG_DIRECTORY: ModuleType.PKG_DIRECTORY,
imp.PY_COMPILED: ModuleType.PY_COMPILED,
imp.PY_FROZEN: ModuleType.PY_FROZEN,
imp.PY_SOURCE: ModuleType.PY_SOURCE,
}
if hasattr(imp, "PY_RESOURCE"):
_ImpTypes[imp.PY_RESOURCE] = ModuleType.PY_RESOURCE
if hasattr(imp, "PY_CODERESOURCE"):
_ImpTypes[imp.PY_CODERESOURCE] = ModuleType.PY_CODERESOURCE
def _imp_type_to_module_type(imp_type):
return _ImpTypes[imp_type]
_ModuleSpec = collections.namedtuple(
"_ModuleSpec", "name type location " "origin submodule_search_locations"
)
class ModuleSpec(_ModuleSpec):
"""Defines a class similar to PEP 420's ModuleSpec
A module spec defines a name of a module, its type, location
and where submodules can be found, if the module is a package.
"""
def __new__(
cls,
name,
module_type,
location=None,
origin=None,
submodule_search_locations=None,
):
return _ModuleSpec.__new__(
cls,
name=name,
type=module_type,
location=location,
origin=origin,
submodule_search_locations=submodule_search_locations,
)
class Finder:
"""A finder is a class which knows how to find a particular module."""
def __init__(self, path=None):
self._path = path or sys.path
@abc.abstractmethod
def find_module(self, modname, module_parts, processed, submodule_path):
"""Find the given module
Each finder is responsible for each protocol of finding, as long as
they all return a ModuleSpec.
:param str modname: The module which needs to be searched.
:param list module_parts: It should be a list of strings,
where each part contributes to the module's
namespace.
:param list processed: What parts from the module parts were processed
so far.
:param list submodule_path: A list of paths where the module
can be looked into.
:returns: A ModuleSpec, describing how and where the module was found,
None, otherwise.
"""
def contribute_to_path(self, spec, processed):
"""Get a list of extra paths where this finder can search."""
class ImpFinder(Finder):
"""A finder based on the imp module."""
def find_module(self, modname, module_parts, processed, submodule_path):
if submodule_path is not None:
submodule_path = list(submodule_path)
try:
stream, mp_filename, mp_desc = imp.find_module(modname, submodule_path)
except ImportError:
return None
# Close resources.
if stream:
stream.close()
return ModuleSpec(
name=modname,
location=mp_filename,
module_type=_imp_type_to_module_type(mp_desc[2]),
)
def contribute_to_path(self, spec, processed):
if spec.location is None:
# Builtin.
return None
if _is_setuptools_namespace(spec.location):
# extend_path is called, search sys.path for module/packages
# of this name see pkgutil.extend_path documentation
path = [
os.path.join(p, *processed)
for p in sys.path
if os.path.isdir(os.path.join(p, *processed))
]
else:
path = [spec.location]
return path
class ExplicitNamespacePackageFinder(ImpFinder):
"""A finder for the explicit namespace packages, generated through pkg_resources."""
def find_module(self, modname, module_parts, processed, submodule_path):
if processed:
modname = ".".join(processed + [modname])
if util.is_namespace(modname) and modname in sys.modules:
submodule_path = sys.modules[modname].__path__
return ModuleSpec(
name=modname,
location="",
origin="namespace",
module_type=ModuleType.PY_NAMESPACE,
submodule_search_locations=submodule_path,
)
return None
def contribute_to_path(self, spec, processed):
return spec.submodule_search_locations
class ZipFinder(Finder):
"""Finder that knows how to find a module inside zip files."""
def __init__(self, path):
super(ZipFinder, self).__init__(path)
self._zipimporters = _precache_zipimporters(path)
def find_module(self, modname, module_parts, processed, submodule_path):
try:
file_type, filename, path = _search_zip(module_parts, self._zipimporters)
except ImportError:
return None
return ModuleSpec(
name=modname,
location=filename,
origin="egg",
module_type=file_type,
submodule_search_locations=path,
)
class PathSpecFinder(Finder):
"""Finder based on importlib.machinery.PathFinder."""
def find_module(self, modname, module_parts, processed, submodule_path):
spec = importlib.machinery.PathFinder.find_spec(modname, path=submodule_path)
if spec:
# origin can be either a string on older Python versions
# or None in case it is a namespace package:
# https://github.com/python/cpython/pull/5481
is_namespace_pkg = spec.origin in ("namespace", None)
location = spec.origin if not is_namespace_pkg else None
module_type = ModuleType.PY_NAMESPACE if is_namespace_pkg else None
spec = ModuleSpec(
name=spec.name,
location=location,
origin=spec.origin,
module_type=module_type,
submodule_search_locations=list(spec.submodule_search_locations or []),
)
return spec
def contribute_to_path(self, spec, processed):
if spec.type == ModuleType.PY_NAMESPACE:
return spec.submodule_search_locations
return None
_SPEC_FINDERS = (ImpFinder, ZipFinder)
if _HAS_MACHINERY and sys.version_info[:2] >= (3, 4):
_SPEC_FINDERS += (PathSpecFinder,)
_SPEC_FINDERS += (ExplicitNamespacePackageFinder,)
def _is_setuptools_namespace(location):
try:
with open(os.path.join(location, "__init__.py"), "rb") as stream:
data = stream.read(4096)
except IOError:
pass
else:
extend_path = b"pkgutil" in data and b"extend_path" in data
declare_namespace = (
b"pkg_resources" in data and b"declare_namespace(__name__)" in data
)
return extend_path or declare_namespace
@lru_cache()
def _cached_set_diff(left, right):
result = set(left)
result.difference_update(right)
return result
def _precache_zipimporters(path=None):
pic = sys.path_importer_cache
# When measured, despite having the same complexity (O(n)),
# converting to tuples and then caching the conversion to sets
# and the set difference is faster than converting to sets
# and then only caching the set difference.
req_paths = tuple(path or sys.path)
cached_paths = tuple(pic)
new_paths = _cached_set_diff(req_paths, cached_paths)
for entry_path in new_paths:
try:
pic[entry_path] = zipimport.zipimporter(entry_path)
except zipimport.ZipImportError:
continue
return pic
def _search_zip(modpath, pic):
for filepath, importer in list(pic.items()):
if importer is not None:
found = importer.find_module(modpath[0])
if found:
if not importer.find_module(os.path.sep.join(modpath)):
raise ImportError(
"No module named %s in %s/%s"
% (".".join(modpath[1:]), filepath, modpath)
)
# import code; code.interact(local=locals())
return (
ModuleType.PY_ZIPMODULE,
os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath),
filepath,
)
raise ImportError("No module named %s" % ".".join(modpath))
def _find_spec_with_path(search_path, modname, module_parts, processed, submodule_path):
finders = [finder(search_path) for finder in _SPEC_FINDERS]
for finder in finders:
spec = finder.find_module(modname, module_parts, processed, submodule_path)
if spec is None:
continue
return finder, spec
raise ImportError("No module named %s" % ".".join(module_parts))
def find_spec(modpath, path=None):
"""Find a spec for the given module.
:type modpath: list or tuple
:param modpath:
split module's name (i.e name of a module or package split
on '.'), with leading empty strings for explicit relative import
:type path: list or None
:param path:
optional list of path where the module or package should be
searched (use sys.path if nothing or None is given)
:rtype: ModuleSpec
:return: A module spec, which describes how the module was
found and where.
"""
_path = path or sys.path
# Need a copy for not mutating the argument.
modpath = modpath[:]
submodule_path = None
module_parts = modpath[:]
processed = []
while modpath:
modname = modpath.pop(0)
finder, spec = _find_spec_with_path(
_path, modname, module_parts, processed, submodule_path or path
)
processed.append(modname)
if modpath:
submodule_path = finder.contribute_to_path(spec, processed)
if spec.type == ModuleType.PKG_DIRECTORY:
spec = spec._replace(submodule_search_locations=submodule_path)
return spec

View File

@@ -0,0 +1,10 @@
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
try:
import pkg_resources
except ImportError:
pkg_resources = None
def is_namespace(modname):
return pkg_resources is not None and modname in pkg_resources._namespace_packages

View File

@@ -0,0 +1,66 @@
# Copyright (c) 2016-2018 Claudiu Popa <pcmanticore@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Contains logic for retrieving special methods.
This implementation does not rely on the dot attribute access
logic, found in ``.getattr()``. The difference between these two
is that the dunder methods are looked with the type slots
(you can find more about these here
http://lucumr.pocoo.org/2014/8/16/the-python-i-would-like-to-see/)
As such, the lookup for the special methods is actually simpler than
the dot attribute access.
"""
import itertools
import astroid
from astroid import exceptions
def _lookup_in_mro(node, name):
attrs = node.locals.get(name, [])
nodes = itertools.chain.from_iterable(
ancestor.locals.get(name, []) for ancestor in node.ancestors(recurs=True)
)
values = list(itertools.chain(attrs, nodes))
if not values:
raise exceptions.AttributeInferenceError(attribute=name, target=node)
return values
def lookup(node, name):
"""Lookup the given special method name in the given *node*
If the special method was found, then a list of attributes
will be returned. Otherwise, `astroid.AttributeInferenceError`
is going to be raised.
"""
if isinstance(
node, (astroid.List, astroid.Tuple, astroid.Const, astroid.Dict, astroid.Set)
):
return _builtin_lookup(node, name)
if isinstance(node, astroid.Instance):
return _lookup_in_mro(node, name)
if isinstance(node, astroid.ClassDef):
return _class_lookup(node, name)
raise exceptions.AttributeInferenceError(attribute=name, target=node)
def _class_lookup(node, name):
metaclass = node.metaclass()
if metaclass is None:
raise exceptions.AttributeInferenceError(attribute=name, target=node)
return _lookup_in_mro(metaclass, name)
def _builtin_lookup(node, name):
values = node.locals.get(name, [])
if not values:
raise exceptions.AttributeInferenceError(attribute=name, target=node)
return values

View File

@@ -0,0 +1,728 @@
# Copyright (c) 2016-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Copyright (c) 2017 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2017 Calen Pennington <cale@edx.org>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""
Data object model, as per https://docs.python.org/3/reference/datamodel.html.
This module describes, at least partially, a data object model for some
of astroid's nodes. The model contains special attributes that nodes such
as functions, classes, modules etc have, such as __doc__, __class__,
__module__ etc, being used when doing attribute lookups over nodes.
For instance, inferring `obj.__class__` will first trigger an inference
of the `obj` variable. If it was successfully inferred, then an attribute
`__class__ will be looked for in the inferred object. This is the part
where the data model occurs. The model is attached to those nodes
and the lookup mechanism will try to see if attributes such as
`__class__` are defined by the model or not. If they are defined,
the model will be requested to return the corresponding value of that
attribute. Thus the model can be viewed as a special part of the lookup
mechanism.
"""
import itertools
import pprint
import os
import types
from functools import lru_cache
import astroid
from astroid import context as contextmod
from astroid import exceptions
from astroid import node_classes
IMPL_PREFIX = "attr_"
def _dunder_dict(instance, attributes):
obj = node_classes.Dict(parent=instance)
# Convert the keys to node strings
keys = [
node_classes.Const(value=value, parent=obj) for value in list(attributes.keys())
]
# The original attribute has a list of elements for each key,
# but that is not useful for retrieving the special attribute's value.
# In this case, we're picking the last value from each list.
values = [elem[-1] for elem in attributes.values()]
obj.postinit(list(zip(keys, values)))
return obj
class ObjectModel:
def __init__(self):
self._instance = None
def __repr__(self):
result = []
cname = type(self).__name__
string = "%(cname)s(%(fields)s)"
alignment = len(cname) + 1
for field in sorted(self.attributes()):
width = 80 - len(field) - alignment
lines = pprint.pformat(field, indent=2, width=width).splitlines(True)
inner = [lines[0]]
for line in lines[1:]:
inner.append(" " * alignment + line)
result.append(field)
return string % {
"cname": cname,
"fields": (",\n" + " " * alignment).join(result),
}
def __call__(self, instance):
self._instance = instance
return self
def __get__(self, instance, cls=None):
# ObjectModel needs to be a descriptor so that just doing
# `special_attributes = SomeObjectModel` should be enough in the body of a node.
# But at the same time, node.special_attributes should return an object
# which can be used for manipulating the special attributes. That's the reason
# we pass the instance through which it got accessed to ObjectModel.__call__,
# returning itself afterwards, so we can still have access to the
# underlying data model and to the instance for which it got accessed.
return self(instance)
def __contains__(self, name):
return name in self.attributes()
@lru_cache(maxsize=None)
def attributes(self):
"""Get the attributes which are exported by this object model."""
return [
obj[len(IMPL_PREFIX) :] for obj in dir(self) if obj.startswith(IMPL_PREFIX)
]
def lookup(self, name):
"""Look up the given *name* in the current model
It should return an AST or an interpreter object,
but if the name is not found, then an AttributeInferenceError will be raised.
"""
if name in self.attributes():
return getattr(self, IMPL_PREFIX + name)
raise exceptions.AttributeInferenceError(target=self._instance, attribute=name)
class ModuleModel(ObjectModel):
def _builtins(self):
builtins_ast_module = astroid.MANAGER.builtins_module
return builtins_ast_module.special_attributes.lookup("__dict__")
@property
def attr_builtins(self):
return self._builtins()
@property
def attr___path__(self):
if not self._instance.package:
raise exceptions.AttributeInferenceError(
target=self._instance, attribute="__path__"
)
path_objs = [
node_classes.Const(
value=path
if not path.endswith("__init__.py")
else os.path.dirname(path),
parent=self._instance,
)
for path in self._instance.path
]
container = node_classes.List(parent=self._instance)
container.postinit(path_objs)
return container
@property
def attr___name__(self):
return node_classes.Const(value=self._instance.name, parent=self._instance)
@property
def attr___doc__(self):
return node_classes.Const(value=self._instance.doc, parent=self._instance)
@property
def attr___file__(self):
return node_classes.Const(value=self._instance.file, parent=self._instance)
@property
def attr___dict__(self):
return _dunder_dict(self._instance, self._instance.globals)
@property
def attr___package__(self):
if not self._instance.package:
value = ""
else:
value = self._instance.name
return node_classes.Const(value=value, parent=self._instance)
# These are related to the Python 3 implementation of the
# import system,
# https://docs.python.org/3/reference/import.html#import-related-module-attributes
@property
def attr___spec__(self):
# No handling for now.
return node_classes.Unknown()
@property
def attr___loader__(self):
# No handling for now.
return node_classes.Unknown()
@property
def attr___cached__(self):
# No handling for now.
return node_classes.Unknown()
class FunctionModel(ObjectModel):
@property
def attr___name__(self):
return node_classes.Const(value=self._instance.name, parent=self._instance)
@property
def attr___doc__(self):
return node_classes.Const(value=self._instance.doc, parent=self._instance)
@property
def attr___qualname__(self):
return node_classes.Const(value=self._instance.qname(), parent=self._instance)
@property
def attr___defaults__(self):
func = self._instance
if not func.args.defaults:
return node_classes.Const(value=None, parent=func)
defaults_obj = node_classes.Tuple(parent=func)
defaults_obj.postinit(func.args.defaults)
return defaults_obj
@property
def attr___annotations__(self):
obj = node_classes.Dict(parent=self._instance)
if not self._instance.returns:
returns = None
else:
returns = self._instance.returns
args = self._instance.args
pair_annotations = itertools.chain(
zip(args.args or [], args.annotations),
zip(args.kwonlyargs, args.kwonlyargs_annotations),
)
annotations = {
arg.name: annotation for (arg, annotation) in pair_annotations if annotation
}
if args.varargannotation:
annotations[args.vararg] = args.varargannotation
if args.kwargannotation:
annotations[args.kwarg] = args.kwargannotation
if returns:
annotations["return"] = returns
items = [
(node_classes.Const(key, parent=obj), value)
for (key, value) in annotations.items()
]
obj.postinit(items)
return obj
@property
def attr___dict__(self):
return node_classes.Dict(parent=self._instance)
attr___globals__ = attr___dict__
@property
def attr___kwdefaults__(self):
def _default_args(args, parent):
for arg in args.kwonlyargs:
try:
default = args.default_value(arg.name)
except exceptions.NoDefault:
continue
name = node_classes.Const(arg.name, parent=parent)
yield name, default
args = self._instance.args
obj = node_classes.Dict(parent=self._instance)
defaults = dict(_default_args(args, obj))
obj.postinit(list(defaults.items()))
return obj
@property
def attr___module__(self):
return node_classes.Const(self._instance.root().qname())
@property
def attr___get__(self):
from astroid import bases
func = self._instance
class DescriptorBoundMethod(bases.BoundMethod):
"""Bound method which knows how to understand calling descriptor binding."""
def implicit_parameters(self):
# Different than BoundMethod since the signature
# is different.
return 0
def infer_call_result(self, caller, context=None):
if len(caller.args) != 2:
raise exceptions.InferenceError(
"Invalid arguments for descriptor binding",
target=self,
context=context,
)
context = contextmod.copy_context(context)
cls = next(caller.args[0].infer(context=context))
if cls is astroid.Uninferable:
raise exceptions.InferenceError(
"Invalid class inferred", target=self, context=context
)
# For some reason func is a Node that the below
# code is not expecting
if isinstance(func, bases.BoundMethod):
yield func
return
# Rebuild the original value, but with the parent set as the
# class where it will be bound.
new_func = func.__class__(
name=func.name,
doc=func.doc,
lineno=func.lineno,
col_offset=func.col_offset,
parent=cls,
)
# pylint: disable=no-member
new_func.postinit(func.args, func.body, func.decorators, func.returns)
# Build a proper bound method that points to our newly built function.
proxy = bases.UnboundMethod(new_func)
yield bases.BoundMethod(proxy=proxy, bound=cls)
@property
def args(self):
"""Overwrite the underlying args to match those of the underlying func
Usually the underlying *func* is a function/method, as in:
def test(self):
pass
This has only the *self* parameter but when we access test.__get__
we get a new object which has two parameters, *self* and *type*.
"""
nonlocal func
params = func.args.args.copy()
params.append(astroid.AssignName(name="type"))
arguments = astroid.Arguments(parent=func.args.parent)
arguments.postinit(
args=params,
defaults=[],
kwonlyargs=[],
kw_defaults=[],
annotations=[],
)
return arguments
return DescriptorBoundMethod(proxy=self._instance, bound=self._instance)
# These are here just for completion.
@property
def attr___ne__(self):
return node_classes.Unknown()
attr___subclasshook__ = attr___ne__
attr___str__ = attr___ne__
attr___sizeof__ = attr___ne__
attr___setattr___ = attr___ne__
attr___repr__ = attr___ne__
attr___reduce__ = attr___ne__
attr___reduce_ex__ = attr___ne__
attr___new__ = attr___ne__
attr___lt__ = attr___ne__
attr___eq__ = attr___ne__
attr___gt__ = attr___ne__
attr___format__ = attr___ne__
attr___delattr___ = attr___ne__
attr___getattribute__ = attr___ne__
attr___hash__ = attr___ne__
attr___init__ = attr___ne__
attr___dir__ = attr___ne__
attr___call__ = attr___ne__
attr___class__ = attr___ne__
attr___closure__ = attr___ne__
attr___code__ = attr___ne__
class ClassModel(ObjectModel):
@property
def attr___module__(self):
return node_classes.Const(self._instance.root().qname())
@property
def attr___name__(self):
return node_classes.Const(self._instance.name)
@property
def attr___qualname__(self):
return node_classes.Const(self._instance.qname())
@property
def attr___doc__(self):
return node_classes.Const(self._instance.doc)
@property
def attr___mro__(self):
if not self._instance.newstyle:
raise exceptions.AttributeInferenceError(
target=self._instance, attribute="__mro__"
)
mro = self._instance.mro()
obj = node_classes.Tuple(parent=self._instance)
obj.postinit(mro)
return obj
@property
def attr_mro(self):
if not self._instance.newstyle:
raise exceptions.AttributeInferenceError(
target=self._instance, attribute="mro"
)
from astroid import bases
other_self = self
# Cls.mro is a method and we need to return one in order to have a proper inference.
# The method we're returning is capable of inferring the underlying MRO though.
class MroBoundMethod(bases.BoundMethod):
def infer_call_result(self, caller, context=None):
yield other_self.attr___mro__
implicit_metaclass = self._instance.implicit_metaclass()
mro_method = implicit_metaclass.locals["mro"][0]
return MroBoundMethod(proxy=mro_method, bound=implicit_metaclass)
@property
def attr___bases__(self):
obj = node_classes.Tuple()
context = contextmod.InferenceContext()
elts = list(self._instance._inferred_bases(context))
obj.postinit(elts=elts)
return obj
@property
def attr___class__(self):
from astroid import helpers
return helpers.object_type(self._instance)
@property
def attr___subclasses__(self):
"""Get the subclasses of the underlying class
This looks only in the current module for retrieving the subclasses,
thus it might miss a couple of them.
"""
from astroid import bases
from astroid import scoped_nodes
if not self._instance.newstyle:
raise exceptions.AttributeInferenceError(
target=self._instance, attribute="__subclasses__"
)
qname = self._instance.qname()
root = self._instance.root()
classes = [
cls
for cls in root.nodes_of_class(scoped_nodes.ClassDef)
if cls != self._instance and cls.is_subtype_of(qname)
]
obj = node_classes.List(parent=self._instance)
obj.postinit(classes)
class SubclassesBoundMethod(bases.BoundMethod):
def infer_call_result(self, caller, context=None):
yield obj
implicit_metaclass = self._instance.implicit_metaclass()
subclasses_method = implicit_metaclass.locals["__subclasses__"][0]
return SubclassesBoundMethod(proxy=subclasses_method, bound=implicit_metaclass)
@property
def attr___dict__(self):
return node_classes.Dict(parent=self._instance)
class SuperModel(ObjectModel):
@property
def attr___thisclass__(self):
return self._instance.mro_pointer
@property
def attr___self_class__(self):
return self._instance._self_class
@property
def attr___self__(self):
return self._instance.type
@property
def attr___class__(self):
return self._instance._proxied
class UnboundMethodModel(ObjectModel):
@property
def attr___class__(self):
from astroid import helpers
return helpers.object_type(self._instance)
@property
def attr___func__(self):
return self._instance._proxied
@property
def attr___self__(self):
return node_classes.Const(value=None, parent=self._instance)
attr_im_func = attr___func__
attr_im_class = attr___class__
attr_im_self = attr___self__
class BoundMethodModel(FunctionModel):
@property
def attr___func__(self):
return self._instance._proxied._proxied
@property
def attr___self__(self):
return self._instance.bound
class GeneratorModel(FunctionModel):
def __new__(cls, *args, **kwargs):
# Append the values from the GeneratorType unto this object.
ret = super(GeneratorModel, cls).__new__(cls, *args, **kwargs)
generator = astroid.MANAGER.builtins_module["generator"]
for name, values in generator.locals.items():
method = values[0]
patched = lambda cls, meth=method: meth
setattr(type(ret), IMPL_PREFIX + name, property(patched))
return ret
@property
def attr___name__(self):
return node_classes.Const(
value=self._instance.parent.name, parent=self._instance
)
@property
def attr___doc__(self):
return node_classes.Const(
value=self._instance.parent.doc, parent=self._instance
)
class AsyncGeneratorModel(GeneratorModel):
def __new__(cls, *args, **kwargs):
# Append the values from the AGeneratorType unto this object.
ret = super().__new__(cls, *args, **kwargs)
astroid_builtins = astroid.MANAGER.builtins_module
generator = astroid_builtins.get("async_generator")
if generator is None:
# Make it backward compatible.
generator = astroid_builtins.get("generator")
for name, values in generator.locals.items():
method = values[0]
patched = lambda cls, meth=method: meth
setattr(type(ret), IMPL_PREFIX + name, property(patched))
return ret
class InstanceModel(ObjectModel):
@property
def attr___class__(self):
return self._instance._proxied
@property
def attr___module__(self):
return node_classes.Const(self._instance.root().qname())
@property
def attr___doc__(self):
return node_classes.Const(self._instance.doc)
@property
def attr___dict__(self):
return _dunder_dict(self._instance, self._instance.instance_attrs)
# Exception instances
class ExceptionInstanceModel(InstanceModel):
@property
def attr_args(self):
message = node_classes.Const("")
args = node_classes.Tuple(parent=self._instance)
args.postinit((message,))
return args
@property
def attr___traceback__(self):
builtins_ast_module = astroid.MANAGER.builtins_module
traceback_type = builtins_ast_module[types.TracebackType.__name__]
return traceback_type.instantiate_class()
class SyntaxErrorInstanceModel(ExceptionInstanceModel):
@property
def attr_text(self):
return node_classes.Const("")
class OSErrorInstanceModel(ExceptionInstanceModel):
@property
def attr_filename(self):
return node_classes.Const("")
@property
def attr_errno(self):
return node_classes.Const(0)
@property
def attr_strerror(self):
return node_classes.Const("")
attr_filename2 = attr_filename
class ImportErrorInstanceModel(ExceptionInstanceModel):
@property
def attr_name(self):
return node_classes.Const("")
@property
def attr_path(self):
return node_classes.Const("")
BUILTIN_EXCEPTIONS = {
"builtins.SyntaxError": SyntaxErrorInstanceModel,
"builtins.ImportError": ImportErrorInstanceModel,
# These are all similar to OSError in terms of attributes
"builtins.OSError": OSErrorInstanceModel,
"builtins.BlockingIOError": OSErrorInstanceModel,
"builtins.BrokenPipeError": OSErrorInstanceModel,
"builtins.ChildProcessError": OSErrorInstanceModel,
"builtins.ConnectionAbortedError": OSErrorInstanceModel,
"builtins.ConnectionError": OSErrorInstanceModel,
"builtins.ConnectionRefusedError": OSErrorInstanceModel,
"builtins.ConnectionResetError": OSErrorInstanceModel,
"builtins.FileExistsError": OSErrorInstanceModel,
"builtins.FileNotFoundError": OSErrorInstanceModel,
"builtins.InterruptedError": OSErrorInstanceModel,
"builtins.IsADirectoryError": OSErrorInstanceModel,
"builtins.NotADirectoryError": OSErrorInstanceModel,
"builtins.PermissionError": OSErrorInstanceModel,
"builtins.ProcessLookupError": OSErrorInstanceModel,
"builtins.TimeoutError": OSErrorInstanceModel,
}
class DictModel(ObjectModel):
@property
def attr___class__(self):
return self._instance._proxied
def _generic_dict_attribute(self, obj, name):
"""Generate a bound method that can infer the given *obj*."""
class DictMethodBoundMethod(astroid.BoundMethod):
def infer_call_result(self, caller, context=None):
yield obj
meth = next(self._instance._proxied.igetattr(name))
return DictMethodBoundMethod(proxy=meth, bound=self._instance)
@property
def attr_items(self):
elems = []
obj = node_classes.List(parent=self._instance)
for key, value in self._instance.items:
elem = node_classes.Tuple(parent=obj)
elem.postinit((key, value))
elems.append(elem)
obj.postinit(elts=elems)
from astroid import objects
obj = objects.DictItems(obj)
return self._generic_dict_attribute(obj, "items")
@property
def attr_keys(self):
keys = [key for (key, _) in self._instance.items]
obj = node_classes.List(parent=self._instance)
obj.postinit(elts=keys)
from astroid import objects
obj = objects.DictKeys(obj)
return self._generic_dict_attribute(obj, "keys")
@property
def attr_values(self):
values = [value for (_, value) in self._instance.items]
obj = node_classes.List(parent=self._instance)
obj.postinit(values)
from astroid import objects
obj = objects.DictValues(obj)
return self._generic_dict_attribute(obj, "values")

View File

@@ -0,0 +1,327 @@
# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 BioGeek <jeroen.vangoey@gmail.com>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2017 Iva Miholic <ivamiho@gmail.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""astroid manager: avoid multiple astroid build of a same module when
possible by providing a class responsible to get astroid representation
from various source and using a cache of built modules)
"""
import os
import zipimport
from astroid import exceptions
from astroid.interpreter._import import spec
from astroid import modutils
from astroid import transforms
def safe_repr(obj):
try:
return repr(obj)
except Exception: # pylint: disable=broad-except
return "???"
class AstroidManager:
"""the astroid manager, responsible to build astroid from files
or modules.
Use the Borg pattern.
"""
name = "astroid loader"
brain = {}
def __init__(self):
self.__dict__ = AstroidManager.brain
if not self.__dict__:
# NOTE: cache entries are added by the [re]builder
self.astroid_cache = {}
self._mod_file_cache = {}
self._failed_import_hooks = []
self.always_load_extensions = False
self.optimize_ast = False
self.extension_package_whitelist = set()
self._transform = transforms.TransformVisitor()
# Export these APIs for convenience
self.register_transform = self._transform.register_transform
self.unregister_transform = self._transform.unregister_transform
self.max_inferable_values = 100
@property
def builtins_module(self):
return self.astroid_cache["builtins"]
def visit_transforms(self, node):
"""Visit the transforms and apply them to the given *node*."""
return self._transform.visit(node)
def ast_from_file(self, filepath, modname=None, fallback=True, source=False):
"""given a module name, return the astroid object"""
try:
filepath = modutils.get_source_file(filepath, include_no_ext=True)
source = True
except modutils.NoSourceFile:
pass
if modname is None:
try:
modname = ".".join(modutils.modpath_from_file(filepath))
except ImportError:
modname = filepath
if (
modname in self.astroid_cache
and self.astroid_cache[modname].file == filepath
):
return self.astroid_cache[modname]
if source:
from astroid.builder import AstroidBuilder
return AstroidBuilder(self).file_build(filepath, modname)
if fallback and modname:
return self.ast_from_module_name(modname)
raise exceptions.AstroidBuildingError(
"Unable to build an AST for {path}.", path=filepath
)
def _build_stub_module(self, modname):
from astroid.builder import AstroidBuilder
return AstroidBuilder(self).string_build("", modname)
def _build_namespace_module(self, modname, path):
from astroid.builder import build_namespace_package_module
return build_namespace_package_module(modname, path)
def _can_load_extension(self, modname):
if self.always_load_extensions:
return True
if modutils.is_standard_module(modname):
return True
parts = modname.split(".")
return any(
".".join(parts[:x]) in self.extension_package_whitelist
for x in range(1, len(parts) + 1)
)
def ast_from_module_name(self, modname, context_file=None):
"""given a module name, return the astroid object"""
if modname in self.astroid_cache:
return self.astroid_cache[modname]
if modname == "__main__":
return self._build_stub_module(modname)
old_cwd = os.getcwd()
if context_file:
os.chdir(os.path.dirname(context_file))
try:
found_spec = self.file_from_module_name(modname, context_file)
if found_spec.type == spec.ModuleType.PY_ZIPMODULE:
module = self.zip_import_data(found_spec.location)
if module is not None:
return module
elif found_spec.type in (
spec.ModuleType.C_BUILTIN,
spec.ModuleType.C_EXTENSION,
):
if (
found_spec.type == spec.ModuleType.C_EXTENSION
and not self._can_load_extension(modname)
):
return self._build_stub_module(modname)
try:
module = modutils.load_module_from_name(modname)
except Exception as ex:
raise exceptions.AstroidImportError(
"Loading {modname} failed with:\n{error}",
modname=modname,
path=found_spec.location,
) from ex
return self.ast_from_module(module, modname)
elif found_spec.type == spec.ModuleType.PY_COMPILED:
raise exceptions.AstroidImportError(
"Unable to load compiled module {modname}.",
modname=modname,
path=found_spec.location,
)
elif found_spec.type == spec.ModuleType.PY_NAMESPACE:
return self._build_namespace_module(
modname, found_spec.submodule_search_locations
)
if found_spec.location is None:
raise exceptions.AstroidImportError(
"Can't find a file for module {modname}.", modname=modname
)
return self.ast_from_file(found_spec.location, modname, fallback=False)
except exceptions.AstroidBuildingError as e:
for hook in self._failed_import_hooks:
try:
return hook(modname)
except exceptions.AstroidBuildingError:
pass
raise e
finally:
os.chdir(old_cwd)
def zip_import_data(self, filepath):
if zipimport is None:
return None
from astroid.builder import AstroidBuilder
builder = AstroidBuilder(self)
for ext in (".zip", ".egg"):
try:
eggpath, resource = filepath.rsplit(ext + os.path.sep, 1)
except ValueError:
continue
try:
importer = zipimport.zipimporter(eggpath + ext)
zmodname = resource.replace(os.path.sep, ".")
if importer.is_package(resource):
zmodname = zmodname + ".__init__"
module = builder.string_build(
importer.get_source(resource), zmodname, filepath
)
return module
except Exception: # pylint: disable=broad-except
continue
return None
def file_from_module_name(self, modname, contextfile):
try:
value = self._mod_file_cache[(modname, contextfile)]
except KeyError:
try:
value = modutils.file_info_from_modpath(
modname.split("."), context_file=contextfile
)
except ImportError as ex:
value = exceptions.AstroidImportError(
"Failed to import module {modname} with error:\n{error}.",
modname=modname,
error=ex,
)
self._mod_file_cache[(modname, contextfile)] = value
if isinstance(value, exceptions.AstroidBuildingError):
raise value
return value
def ast_from_module(self, module, modname=None):
"""given an imported module, return the astroid object"""
modname = modname or module.__name__
if modname in self.astroid_cache:
return self.astroid_cache[modname]
try:
# some builtin modules don't have __file__ attribute
filepath = module.__file__
if modutils.is_python_source(filepath):
return self.ast_from_file(filepath, modname)
except AttributeError:
pass
from astroid.builder import AstroidBuilder
return AstroidBuilder(self).module_build(module, modname)
def ast_from_class(self, klass, modname=None):
"""get astroid for the given class"""
if modname is None:
try:
modname = klass.__module__
except AttributeError as exc:
raise exceptions.AstroidBuildingError(
"Unable to get module for class {class_name}.",
cls=klass,
class_repr=safe_repr(klass),
modname=modname,
) from exc
modastroid = self.ast_from_module_name(modname)
return modastroid.getattr(klass.__name__)[0] # XXX
def infer_ast_from_something(self, obj, context=None):
"""infer astroid for the given class"""
if hasattr(obj, "__class__") and not isinstance(obj, type):
klass = obj.__class__
else:
klass = obj
try:
modname = klass.__module__
except AttributeError as exc:
raise exceptions.AstroidBuildingError(
"Unable to get module for {class_repr}.",
cls=klass,
class_repr=safe_repr(klass),
) from exc
except Exception as exc:
raise exceptions.AstroidImportError(
"Unexpected error while retrieving module for {class_repr}:\n"
"{error}",
cls=klass,
class_repr=safe_repr(klass),
) from exc
try:
name = klass.__name__
except AttributeError as exc:
raise exceptions.AstroidBuildingError(
"Unable to get name for {class_repr}:\n",
cls=klass,
class_repr=safe_repr(klass),
) from exc
except Exception as exc:
raise exceptions.AstroidImportError(
"Unexpected error while retrieving name for {class_repr}:\n" "{error}",
cls=klass,
class_repr=safe_repr(klass),
) from exc
# take care, on living object __module__ is regularly wrong :(
modastroid = self.ast_from_module_name(modname)
if klass is obj:
for inferred in modastroid.igetattr(name, context):
yield inferred
else:
for inferred in modastroid.igetattr(name, context):
yield inferred.instantiate_class()
def register_failed_import_hook(self, hook):
"""Registers a hook to resolve imports that cannot be found otherwise.
`hook` must be a function that accepts a single argument `modname` which
contains the name of the module or package that could not be imported.
If `hook` can resolve the import, must return a node of type `astroid.Module`,
otherwise, it must raise `AstroidBuildingError`.
"""
self._failed_import_hooks.append(hook)
def cache_module(self, module):
"""Cache a module if no module with the same name is known yet."""
self.astroid_cache.setdefault(module.name, module)
def bootstrap(self):
"""Bootstrap the required AST modules needed for the manager to work
The bootstrap usually involves building the AST for the builtins
module, which is required by the rest of astroid to work correctly.
"""
import astroid.raw_building
astroid.raw_building._astroid_bootstrapping()
def clear_cache(self):
"""Clear the underlying cache. Also bootstraps the builtins module."""
self.astroid_cache.clear()
self.bootstrap()

View File

@@ -0,0 +1,160 @@
# Copyright (c) 2010-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2014-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""This module contains some mixins for the different nodes.
"""
import itertools
from astroid import decorators
from astroid import exceptions
class BlockRangeMixIn:
"""override block range """
@decorators.cachedproperty
def blockstart_tolineno(self):
return self.lineno
def _elsed_block_range(self, lineno, orelse, last=None):
"""handle block line numbers range for try/finally, for, if and while
statements
"""
if lineno == self.fromlineno:
return lineno, lineno
if orelse:
if lineno >= orelse[0].fromlineno:
return lineno, orelse[-1].tolineno
return lineno, orelse[0].fromlineno - 1
return lineno, last or self.tolineno
class FilterStmtsMixin:
"""Mixin for statement filtering and assignment type"""
def _get_filtered_stmts(self, _, node, _stmts, mystmt):
"""method used in _filter_stmts to get statements and trigger break"""
if self.statement() is mystmt:
# original node's statement is the assignment, only keep
# current node (gen exp, list comp)
return [node], True
return _stmts, False
def assign_type(self):
return self
class AssignTypeMixin:
def assign_type(self):
return self
def _get_filtered_stmts(self, lookup_node, node, _stmts, mystmt):
"""method used in filter_stmts"""
if self is mystmt:
return _stmts, True
if self.statement() is mystmt:
# original node's statement is the assignment, only keep
# current node (gen exp, list comp)
return [node], True
return _stmts, False
class ParentAssignTypeMixin(AssignTypeMixin):
def assign_type(self):
return self.parent.assign_type()
class ImportFromMixin(FilterStmtsMixin):
"""MixIn for From and Import Nodes"""
def _infer_name(self, frame, name):
return name
def do_import_module(self, modname=None):
"""return the ast for a module whose name is <modname> imported by <self>
"""
# handle special case where we are on a package node importing a module
# using the same name as the package, which may end in an infinite loop
# on relative imports
# XXX: no more needed ?
mymodule = self.root()
level = getattr(self, "level", None) # Import as no level
if modname is None:
modname = self.modname
# XXX we should investigate deeper if we really want to check
# importing itself: modname and mymodule.name be relative or absolute
if mymodule.relative_to_absolute_name(modname, level) == mymodule.name:
# FIXME: we used to raise InferenceError here, but why ?
return mymodule
return mymodule.import_module(
modname, level=level, relative_only=level and level >= 1
)
def real_name(self, asname):
"""get name from 'as' name"""
for name, _asname in self.names:
if name == "*":
return asname
if not _asname:
name = name.split(".", 1)[0]
_asname = name
if asname == _asname:
return name
raise exceptions.AttributeInferenceError(
"Could not find original name for {attribute} in {target!r}",
target=self,
attribute=asname,
)
class MultiLineBlockMixin:
"""Mixin for nodes with multi-line blocks, e.g. For and FunctionDef.
Note that this does not apply to every node with a `body` field.
For instance, an If node has a multi-line body, but the body of an
IfExpr is not multi-line, and hence cannot contain Return nodes,
Assign nodes, etc.
"""
@decorators.cachedproperty
def _multi_line_blocks(self):
return tuple(getattr(self, field) for field in self._multi_line_block_fields)
def _get_return_nodes_skip_functions(self):
for block in self._multi_line_blocks:
for child_node in block:
if child_node.is_function:
continue
yield from child_node._get_return_nodes_skip_functions()
def _get_yield_nodes_skip_lambdas(self):
for block in self._multi_line_blocks:
for child_node in block:
if child_node.is_lambda:
continue
yield from child_node._get_yield_nodes_skip_lambdas()
@decorators.cached
def _get_assign_nodes(self):
children_assign_nodes = (
child_node._get_assign_nodes()
for block in self._multi_line_blocks
for child_node in block
)
return list(itertools.chain.from_iterable(children_assign_nodes))
class NoChildrenMixin:
"""Mixin for nodes with no children, e.g. Pass."""
def get_children(self):
yield from ()

View File

@@ -0,0 +1,704 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2014 Denis Laxalde <denis.laxalde@logilab.fr>
# Copyright (c) 2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
# Copyright (c) 2015 Radosław Ganczarek <radoslaw@ganczarek.in>
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2018 Mario Corchero <mcorcherojim@bloomberg.net>
# Copyright (c) 2018 Mario Corchero <mariocj89@gmail.com>
# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Python modules manipulation utility functions.
:type PY_SOURCE_EXTS: tuple(str)
:var PY_SOURCE_EXTS: list of possible python source file extension
:type STD_LIB_DIRS: set of str
:var STD_LIB_DIRS: directories where standard modules are located
:type BUILTIN_MODULES: dict
:var BUILTIN_MODULES: dictionary with builtin module names has key
"""
import imp
import os
import platform
import sys
import itertools
from distutils.sysconfig import get_python_lib # pylint: disable=import-error
# pylint: disable=import-error, no-name-in-module
from distutils.errors import DistutilsPlatformError
# distutils is replaced by virtualenv with a module that does
# weird path manipulations in order to get to the
# real distutils module.
from .interpreter._import import spec
from .interpreter._import import util
if sys.platform.startswith("win"):
PY_SOURCE_EXTS = ("py", "pyw")
PY_COMPILED_EXTS = ("dll", "pyd")
else:
PY_SOURCE_EXTS = ("py",)
PY_COMPILED_EXTS = ("so",)
try:
# The explicit sys.prefix is to work around a patch in virtualenv that
# replaces the 'real' sys.prefix (i.e. the location of the binary)
# with the prefix from which the virtualenv was created. This throws
# off the detection logic for standard library modules, thus the
# workaround.
STD_LIB_DIRS = {
get_python_lib(standard_lib=True, prefix=sys.prefix),
# Take care of installations where exec_prefix != prefix.
get_python_lib(standard_lib=True, prefix=sys.exec_prefix),
get_python_lib(standard_lib=True),
}
# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to
# non-valid path, see https://bugs.pypy.org/issue1164
except DistutilsPlatformError:
STD_LIB_DIRS = set()
if os.name == "nt":
STD_LIB_DIRS.add(os.path.join(sys.prefix, "dlls"))
try:
# real_prefix is defined when running inside virtual environments,
# created with the **virtualenv** library.
STD_LIB_DIRS.add(os.path.join(sys.real_prefix, "dlls"))
except AttributeError:
# sys.base_exec_prefix is always defined, but in a virtual environment
# created with the stdlib **venv** module, it points to the original
# installation, if the virtual env is activated.
try:
STD_LIB_DIRS.add(os.path.join(sys.base_exec_prefix, "dlls"))
except AttributeError:
pass
if platform.python_implementation() == "PyPy":
_root = os.path.join(sys.prefix, "lib_pypy")
STD_LIB_DIRS.add(_root)
try:
# real_prefix is defined when running inside virtualenv.
STD_LIB_DIRS.add(os.path.join(sys.real_prefix, "lib_pypy"))
except AttributeError:
pass
del _root
if os.name == "posix":
# Need the real prefix is we're under a virtualenv, otherwise
# the usual one will do.
try:
prefix = sys.real_prefix
except AttributeError:
prefix = sys.prefix
def _posix_path(path):
base_python = "python%d.%d" % sys.version_info[:2]
return os.path.join(prefix, path, base_python)
STD_LIB_DIRS.add(_posix_path("lib"))
if sys.maxsize > 2 ** 32:
# This tries to fix a problem with /usr/lib64 builds,
# where systems are running both 32-bit and 64-bit code
# on the same machine, which reflects into the places where
# standard library could be found. More details can be found
# here http://bugs.python.org/issue1294959.
# An easy reproducing case would be
# https://github.com/PyCQA/pylint/issues/712#issuecomment-163178753
STD_LIB_DIRS.add(_posix_path("lib64"))
EXT_LIB_DIR = get_python_lib()
IS_JYTHON = platform.python_implementation() == "Jython"
BUILTIN_MODULES = dict.fromkeys(sys.builtin_module_names, True)
class NoSourceFile(Exception):
"""exception raised when we are not able to get a python
source file for a precompiled file
"""
def _normalize_path(path):
return os.path.normcase(os.path.abspath(path))
def _canonicalize_path(path):
return os.path.realpath(os.path.expanduser(path))
def _path_from_filename(filename, is_jython=IS_JYTHON):
if not is_jython:
if sys.version_info > (3, 0):
return filename
if filename.endswith(".pyc"):
return filename[:-1]
return filename
head, has_pyclass, _ = filename.partition("$py.class")
if has_pyclass:
return head + ".py"
return filename
def _handle_blacklist(blacklist, dirnames, filenames):
"""remove files/directories in the black list
dirnames/filenames are usually from os.walk
"""
for norecurs in blacklist:
if norecurs in dirnames:
dirnames.remove(norecurs)
elif norecurs in filenames:
filenames.remove(norecurs)
_NORM_PATH_CACHE = {}
def _cache_normalize_path(path):
"""abspath with caching"""
# _module_file calls abspath on every path in sys.path every time it's
# called; on a larger codebase this easily adds up to half a second just
# assembling path components. This cache alleviates that.
try:
return _NORM_PATH_CACHE[path]
except KeyError:
if not path: # don't cache result for ''
return _normalize_path(path)
result = _NORM_PATH_CACHE[path] = _normalize_path(path)
return result
def load_module_from_name(dotted_name, path=None, use_sys=True):
"""Load a Python module from its name.
:type dotted_name: str
:param dotted_name: python name of a module or package
:type path: list or None
:param path:
optional list of path where the module or package should be
searched (use sys.path if nothing or None is given)
:type use_sys: bool
:param use_sys:
boolean indicating whether the sys.modules dictionary should be
used or not
:raise ImportError: if the module or package is not found
:rtype: module
:return: the loaded module
"""
return load_module_from_modpath(dotted_name.split("."), path, use_sys)
def load_module_from_modpath(parts, path=None, use_sys=1):
"""Load a python module from its split name.
:type parts: list(str) or tuple(str)
:param parts:
python name of a module or package split on '.'
:type path: list or None
:param path:
optional list of path where the module or package should be
searched (use sys.path if nothing or None is given)
:type use_sys: bool
:param use_sys:
boolean indicating whether the sys.modules dictionary should be used or not
:raise ImportError: if the module or package is not found
:rtype: module
:return: the loaded module
"""
if use_sys:
try:
return sys.modules[".".join(parts)]
except KeyError:
pass
modpath = []
prevmodule = None
for part in parts:
modpath.append(part)
curname = ".".join(modpath)
module = None
if len(modpath) != len(parts):
# even with use_sys=False, should try to get outer packages from sys.modules
module = sys.modules.get(curname)
elif use_sys:
# because it may have been indirectly loaded through a parent
module = sys.modules.get(curname)
if module is None:
mp_file, mp_filename, mp_desc = imp.find_module(part, path)
module = imp.load_module(curname, mp_file, mp_filename, mp_desc)
# mp_file still needs to be closed.
if mp_file:
mp_file.close()
if prevmodule:
setattr(prevmodule, part, module)
_file = getattr(module, "__file__", "")
prevmodule = module
if not _file and util.is_namespace(curname):
continue
if not _file and len(modpath) != len(parts):
raise ImportError("no module in %s" % ".".join(parts[len(modpath) :]))
path = [os.path.dirname(_file)]
return module
def load_module_from_file(filepath, path=None, use_sys=True, extrapath=None):
"""Load a Python module from it's path.
:type filepath: str
:param filepath: path to the python module or package
:type path: list or None
:param path:
optional list of path where the module or package should be
searched (use sys.path if nothing or None is given)
:type use_sys: bool
:param use_sys:
boolean indicating whether the sys.modules dictionary should be
used or not
:raise ImportError: if the module or package is not found
:rtype: module
:return: the loaded module
"""
modpath = modpath_from_file(filepath, extrapath)
return load_module_from_modpath(modpath, path, use_sys)
def check_modpath_has_init(path, mod_path):
"""check there are some __init__.py all along the way"""
modpath = []
for part in mod_path:
modpath.append(part)
path = os.path.join(path, part)
if not _has_init(path):
old_namespace = util.is_namespace(".".join(modpath))
if not old_namespace:
return False
return True
def _get_relative_base_path(filename, path_to_check):
"""Extracts the relative mod path of the file to import from
Check if a file is within the passed in path and if so, returns the
relative mod path from the one passed in.
If the filename is no in path_to_check, returns None
Note this function will look for both abs and realpath of the file,
this allows to find the relative base path even if the file is a
symlink of a file in the passed in path
Examples:
_get_relative_base_path("/a/b/c/d.py", "/a/b") -> ["c","d"]
_get_relative_base_path("/a/b/c/d.py", "/dev") -> None
"""
importable_path = None
path_to_check = os.path.normcase(path_to_check)
abs_filename = os.path.abspath(filename)
if os.path.normcase(abs_filename).startswith(path_to_check):
importable_path = abs_filename
real_filename = os.path.realpath(filename)
if os.path.normcase(real_filename).startswith(path_to_check):
importable_path = real_filename
if importable_path:
base_path = os.path.splitext(importable_path)[0]
relative_base_path = base_path[len(path_to_check) :]
return [pkg for pkg in relative_base_path.split(os.sep) if pkg]
return None
def modpath_from_file_with_callback(filename, extrapath=None, is_package_cb=None):
filename = os.path.expanduser(_path_from_filename(filename))
if extrapath is not None:
for path_ in itertools.chain(map(_canonicalize_path, extrapath), extrapath):
path = os.path.abspath(path_)
if not path:
continue
submodpath = _get_relative_base_path(filename, path)
if not submodpath:
continue
if is_package_cb(path, submodpath[:-1]):
return extrapath[path_].split(".") + submodpath
for path in itertools.chain(map(_canonicalize_path, sys.path), sys.path):
path = _cache_normalize_path(path)
if not path:
continue
modpath = _get_relative_base_path(filename, path)
if not modpath:
continue
if is_package_cb(path, modpath[:-1]):
return modpath
raise ImportError(
"Unable to find module for %s in %s" % (filename, ", \n".join(sys.path))
)
def modpath_from_file(filename, extrapath=None):
"""given a file path return the corresponding split module's name
(i.e name of a module or package split on '.')
:type filename: str
:param filename: file's path for which we want the module's name
:type extrapath: dict
:param extrapath:
optional extra search path, with path as key and package name for the path
as value. This is usually useful to handle package split in multiple
directories using __path__ trick.
:raise ImportError:
if the corresponding module's name has not been found
:rtype: list(str)
:return: the corresponding split module's name
"""
return modpath_from_file_with_callback(filename, extrapath, check_modpath_has_init)
def file_from_modpath(modpath, path=None, context_file=None):
return file_info_from_modpath(modpath, path, context_file).location
def file_info_from_modpath(modpath, path=None, context_file=None):
"""given a mod path (i.e. split module / package name), return the
corresponding file, giving priority to source file over precompiled
file if it exists
:type modpath: list or tuple
:param modpath:
split module's name (i.e name of a module or package split
on '.')
(this means explicit relative imports that start with dots have
empty strings in this list!)
:type path: list or None
:param path:
optional list of path where the module or package should be
searched (use sys.path if nothing or None is given)
:type context_file: str or None
:param context_file:
context file to consider, necessary if the identifier has been
introduced using a relative import unresolvable in the actual
context (i.e. modutils)
:raise ImportError: if there is no such module in the directory
:rtype: (str or None, import type)
:return:
the path to the module's file or None if it's an integrated
builtin module such as 'sys'
"""
if context_file is not None:
context = os.path.dirname(context_file)
else:
context = context_file
if modpath[0] == "xml":
# handle _xmlplus
try:
return _spec_from_modpath(["_xmlplus"] + modpath[1:], path, context)
except ImportError:
return _spec_from_modpath(modpath, path, context)
elif modpath == ["os", "path"]:
# FIXME: currently ignoring search_path...
return spec.ModuleSpec(
name="os.path", location=os.path.__file__, module_type=imp.PY_SOURCE
)
return _spec_from_modpath(modpath, path, context)
def get_module_part(dotted_name, context_file=None):
"""given a dotted name return the module part of the name :
>>> get_module_part('astroid.as_string.dump')
'astroid.as_string'
:type dotted_name: str
:param dotted_name: full name of the identifier we are interested in
:type context_file: str or None
:param context_file:
context file to consider, necessary if the identifier has been
introduced using a relative import unresolvable in the actual
context (i.e. modutils)
:raise ImportError: if there is no such module in the directory
:rtype: str or None
:return:
the module part of the name or None if we have not been able at
all to import the given name
XXX: deprecated, since it doesn't handle package precedence over module
(see #10066)
"""
# os.path trick
if dotted_name.startswith("os.path"):
return "os.path"
parts = dotted_name.split(".")
if context_file is not None:
# first check for builtin module which won't be considered latter
# in that case (path != None)
if parts[0] in BUILTIN_MODULES:
if len(parts) > 2:
raise ImportError(dotted_name)
return parts[0]
# don't use += or insert, we want a new list to be created !
path = None
starti = 0
if parts[0] == "":
assert (
context_file is not None
), "explicit relative import, but no context_file?"
path = [] # prevent resolving the import non-relatively
starti = 1
while parts[starti] == "": # for all further dots: change context
starti += 1
context_file = os.path.dirname(context_file)
for i in range(starti, len(parts)):
try:
file_from_modpath(
parts[starti : i + 1], path=path, context_file=context_file
)
except ImportError:
if i < max(1, len(parts) - 2):
raise
return ".".join(parts[:i])
return dotted_name
def get_module_files(src_directory, blacklist, list_all=False):
"""given a package directory return a list of all available python
module's files in the package and its subpackages
:type src_directory: str
:param src_directory:
path of the directory corresponding to the package
:type blacklist: list or tuple
:param blacklist: iterable
list of files or directories to ignore.
:type list_all: bool
:param list_all:
get files from all paths, including ones without __init__.py
:rtype: list
:return:
the list of all available python module's files in the package and
its subpackages
"""
files = []
for directory, dirnames, filenames in os.walk(src_directory):
if directory in blacklist:
continue
_handle_blacklist(blacklist, dirnames, filenames)
# check for __init__.py
if not list_all and "__init__.py" not in filenames:
dirnames[:] = ()
continue
for filename in filenames:
if _is_python_file(filename):
src = os.path.join(directory, filename)
files.append(src)
return files
def get_source_file(filename, include_no_ext=False):
"""given a python module's file name return the matching source file
name (the filename will be returned identically if it's already an
absolute path to a python source file...)
:type filename: str
:param filename: python module's file name
:raise NoSourceFile: if no source file exists on the file system
:rtype: str
:return: the absolute path of the source file if it exists
"""
filename = os.path.abspath(_path_from_filename(filename))
base, orig_ext = os.path.splitext(filename)
for ext in PY_SOURCE_EXTS:
source_path = "%s.%s" % (base, ext)
if os.path.exists(source_path):
return source_path
if include_no_ext and not orig_ext and os.path.exists(base):
return base
raise NoSourceFile(filename)
def is_python_source(filename):
"""
rtype: bool
return: True if the filename is a python source file
"""
return os.path.splitext(filename)[1][1:] in PY_SOURCE_EXTS
def is_standard_module(modname, std_path=None):
"""try to guess if a module is a standard python module (by default,
see `std_path` parameter's description)
:type modname: str
:param modname: name of the module we are interested in
:type std_path: list(str) or tuple(str)
:param std_path: list of path considered has standard
:rtype: bool
:return:
true if the module:
- is located on the path listed in one of the directory in `std_path`
- is a built-in module
"""
modname = modname.split(".")[0]
try:
filename = file_from_modpath([modname])
except ImportError:
# import failed, i'm probably not so wrong by supposing it's
# not standard...
return False
# modules which are not living in a file are considered standard
# (sys and __builtin__ for instance)
if filename is None:
# we assume there are no namespaces in stdlib
return not util.is_namespace(modname)
filename = _normalize_path(filename)
if filename.startswith(_cache_normalize_path(EXT_LIB_DIR)):
return False
if std_path is None:
std_path = STD_LIB_DIRS
for path in std_path:
if filename.startswith(_cache_normalize_path(path)):
return True
return False
def is_relative(modname, from_file):
"""return true if the given module name is relative to the given
file name
:type modname: str
:param modname: name of the module we are interested in
:type from_file: str
:param from_file:
path of the module from which modname has been imported
:rtype: bool
:return:
true if the module has been imported relatively to `from_file`
"""
if not os.path.isdir(from_file):
from_file = os.path.dirname(from_file)
if from_file in sys.path:
return False
try:
stream, _, _ = imp.find_module(modname.split(".")[0], [from_file])
# Close the stream to avoid ResourceWarnings.
if stream:
stream.close()
return True
except ImportError:
return False
# internal only functions #####################################################
def _spec_from_modpath(modpath, path=None, context=None):
"""given a mod path (i.e. split module / package name), return the
corresponding spec
this function is used internally, see `file_from_modpath`'s
documentation for more information
"""
assert modpath
location = None
if context is not None:
try:
found_spec = spec.find_spec(modpath, [context])
location = found_spec.location
except ImportError:
found_spec = spec.find_spec(modpath, path)
location = found_spec.location
else:
found_spec = spec.find_spec(modpath, path)
if found_spec.type == spec.ModuleType.PY_COMPILED:
try:
location = get_source_file(found_spec.location)
return found_spec._replace(
location=location, type=spec.ModuleType.PY_SOURCE
)
except NoSourceFile:
return found_spec._replace(location=location)
elif found_spec.type == spec.ModuleType.C_BUILTIN:
# integrated builtin module
return found_spec._replace(location=None)
elif found_spec.type == spec.ModuleType.PKG_DIRECTORY:
location = _has_init(found_spec.location)
return found_spec._replace(location=location, type=spec.ModuleType.PY_SOURCE)
return found_spec
def _is_python_file(filename):
"""return true if the given filename should be considered as a python file
.pyc and .pyo are ignored
"""
for ext in (".py", ".so", ".pyd", ".pyw"):
if filename.endswith(ext):
return True
return False
def _has_init(directory):
"""if the given directory has a valid __init__ file, return its path,
else return None
"""
mod_or_pack = os.path.join(directory, "__init__")
for ext in PY_SOURCE_EXTS + ("pyc", "pyo"):
if os.path.exists(mod_or_pack + "." + ext):
return mod_or_pack + "." + ext
return None
def is_namespace(specobj):
return specobj.type == spec.ModuleType.PY_NAMESPACE
def is_directory(specobj):
return specobj.type == spec.ModuleType.PKG_DIRECTORY

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,173 @@
# Copyright (c) 2006-2011, 2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2016 Jared Garst <jgarst@users.noreply.github.com>
# Copyright (c) 2017 Ashley Whetter <ashley@awhetter.co.uk>
# Copyright (c) 2017 rr- <rr-@sakuya.pl>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Every available node class.
.. seealso::
:doc:`ast documentation <green_tree_snakes:nodes>`
All nodes inherit from :class:`~astroid.node_classes.NodeNG`.
"""
# pylint: disable=unused-import,redefined-builtin
from astroid.node_classes import (
Arguments,
AssignAttr,
Assert,
Assign,
AnnAssign,
AssignName,
AugAssign,
Repr,
BinOp,
BoolOp,
Break,
Call,
Compare,
Comprehension,
Const,
Continue,
Decorators,
DelAttr,
DelName,
Delete,
Dict,
Expr,
Ellipsis,
EmptyNode,
ExceptHandler,
Exec,
ExtSlice,
For,
ImportFrom,
Attribute,
Global,
If,
IfExp,
Import,
Index,
Keyword,
List,
Name,
Nonlocal,
Pass,
Print,
Raise,
Return,
Set,
Slice,
Starred,
Subscript,
TryExcept,
TryFinally,
Tuple,
UnaryOp,
While,
With,
Yield,
YieldFrom,
const_factory,
AsyncFor,
Await,
AsyncWith,
FormattedValue,
JoinedStr,
# Node not present in the builtin ast module.
DictUnpack,
Unknown,
)
from astroid.scoped_nodes import (
Module,
GeneratorExp,
Lambda,
DictComp,
ListComp,
SetComp,
FunctionDef,
ClassDef,
AsyncFunctionDef,
)
ALL_NODE_CLASSES = (
AsyncFunctionDef,
AsyncFor,
AsyncWith,
Await,
Arguments,
AssignAttr,
Assert,
Assign,
AnnAssign,
AssignName,
AugAssign,
Repr,
BinOp,
BoolOp,
Break,
Call,
ClassDef,
Compare,
Comprehension,
Const,
Continue,
Decorators,
DelAttr,
DelName,
Delete,
Dict,
DictComp,
DictUnpack,
Expr,
Ellipsis,
EmptyNode,
ExceptHandler,
Exec,
ExtSlice,
For,
ImportFrom,
FunctionDef,
Attribute,
GeneratorExp,
Global,
If,
IfExp,
Import,
Index,
Keyword,
Lambda,
List,
ListComp,
Name,
Nonlocal,
Module,
Pass,
Print,
Raise,
Return,
Set,
SetComp,
Slice,
Starred,
Subscript,
TryExcept,
TryFinally,
Tuple,
UnaryOp,
While,
With,
Yield,
YieldFrom,
FormattedValue,
JoinedStr,
)

View File

@@ -0,0 +1,282 @@
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""
Inference objects are a way to represent composite AST nodes,
which are used only as inference results, so they can't be found in the
original AST tree. For instance, inferring the following frozenset use,
leads to an inferred FrozenSet:
Call(func=Name('frozenset'), args=Tuple(...))
"""
import builtins
from astroid import bases
from astroid import decorators
from astroid import exceptions
from astroid import MANAGER
from astroid import node_classes
from astroid import scoped_nodes
from astroid import util
BUILTINS = builtins.__name__
objectmodel = util.lazy_import("interpreter.objectmodel")
class FrozenSet(node_classes._BaseContainer):
"""class representing a FrozenSet composite node"""
def pytype(self):
return "%s.frozenset" % BUILTINS
def _infer(self, context=None):
yield self
@decorators.cachedproperty
def _proxied(self): # pylint: disable=method-hidden
ast_builtins = MANAGER.builtins_module
return ast_builtins.getattr("frozenset")[0]
class Super(node_classes.NodeNG):
"""Proxy class over a super call.
This class offers almost the same behaviour as Python's super,
which is MRO lookups for retrieving attributes from the parents.
The *mro_pointer* is the place in the MRO from where we should
start looking, not counting it. *mro_type* is the object which
provides the MRO, it can be both a type or an instance.
*self_class* is the class where the super call is, while
*scope* is the function where the super call is.
"""
# pylint: disable=unnecessary-lambda
special_attributes = util.lazy_descriptor(lambda: objectmodel.SuperModel())
# pylint: disable=super-init-not-called
def __init__(self, mro_pointer, mro_type, self_class, scope):
self.type = mro_type
self.mro_pointer = mro_pointer
self._class_based = False
self._self_class = self_class
self._scope = scope
def _infer(self, context=None):
yield self
def super_mro(self):
"""Get the MRO which will be used to lookup attributes in this super."""
if not isinstance(self.mro_pointer, scoped_nodes.ClassDef):
raise exceptions.SuperError(
"The first argument to super must be a subtype of "
"type, not {mro_pointer}.",
super_=self,
)
if isinstance(self.type, scoped_nodes.ClassDef):
# `super(type, type)`, most likely in a class method.
self._class_based = True
mro_type = self.type
else:
mro_type = getattr(self.type, "_proxied", None)
if not isinstance(mro_type, (bases.Instance, scoped_nodes.ClassDef)):
raise exceptions.SuperError(
"The second argument to super must be an "
"instance or subtype of type, not {type}.",
super_=self,
)
if not mro_type.newstyle:
raise exceptions.SuperError(
"Unable to call super on old-style classes.", super_=self
)
mro = mro_type.mro()
if self.mro_pointer not in mro:
raise exceptions.SuperError(
"The second argument to super must be an "
"instance or subtype of type, not {type}.",
super_=self,
)
index = mro.index(self.mro_pointer)
return mro[index + 1 :]
@decorators.cachedproperty
def _proxied(self):
ast_builtins = MANAGER.builtins_module
return ast_builtins.getattr("super")[0]
def pytype(self):
return "%s.super" % BUILTINS
def display_type(self):
return "Super of"
@property
def name(self):
"""Get the name of the MRO pointer."""
return self.mro_pointer.name
def qname(self):
return "super"
def igetattr(self, name, context=None):
"""Retrieve the inferred values of the given attribute name."""
if name in self.special_attributes:
yield self.special_attributes.lookup(name)
return
try:
mro = self.super_mro()
# Don't let invalid MROs or invalid super calls
# leak out as is from this function.
except exceptions.SuperError as exc:
raise exceptions.AttributeInferenceError(
(
"Lookup for {name} on {target!r} because super call {super!r} "
"is invalid."
),
target=self,
attribute=name,
context=context,
super_=exc.super_,
) from exc
except exceptions.MroError as exc:
raise exceptions.AttributeInferenceError(
(
"Lookup for {name} on {target!r} failed because {cls!r} has an "
"invalid MRO."
),
target=self,
attribute=name,
context=context,
mros=exc.mros,
cls=exc.cls,
) from exc
found = False
for cls in mro:
if name not in cls.locals:
continue
found = True
for inferred in bases._infer_stmts([cls[name]], context, frame=self):
if not isinstance(inferred, scoped_nodes.FunctionDef):
yield inferred
continue
# We can obtain different descriptors from a super depending
# on what we are accessing and where the super call is.
if inferred.type == "classmethod":
yield bases.BoundMethod(inferred, cls)
elif self._scope.type == "classmethod" and inferred.type == "method":
yield inferred
elif self._class_based or inferred.type == "staticmethod":
yield inferred
elif bases._is_property(inferred):
# TODO: support other descriptors as well.
try:
yield from inferred.infer_call_result(self, context)
except exceptions.InferenceError:
yield util.Uninferable
else:
yield bases.BoundMethod(inferred, cls)
if not found:
raise exceptions.AttributeInferenceError(
target=self, attribute=name, context=context
)
def getattr(self, name, context=None):
return list(self.igetattr(name, context=context))
class ExceptionInstance(bases.Instance):
"""Class for instances of exceptions
It has special treatment for some of the exceptions's attributes,
which are transformed at runtime into certain concrete objects, such as
the case of .args.
"""
@decorators.cachedproperty
def special_attributes(self):
qname = self.qname()
instance = objectmodel.BUILTIN_EXCEPTIONS.get(
qname, objectmodel.ExceptionInstanceModel
)
return instance()(self)
class DictInstance(bases.Instance):
"""Special kind of instances for dictionaries
This instance knows the underlying object model of the dictionaries, which means
that methods such as .values or .items can be properly inferred.
"""
# pylint: disable=unnecessary-lambda
special_attributes = util.lazy_descriptor(lambda: objectmodel.DictModel())
# Custom objects tailored for dictionaries, which are used to
# disambiguate between the types of Python 2 dict's method returns
# and Python 3 (where they return set like objects).
class DictItems(bases.Proxy):
__str__ = node_classes.NodeNG.__str__
__repr__ = node_classes.NodeNG.__repr__
class DictKeys(bases.Proxy):
__str__ = node_classes.NodeNG.__str__
__repr__ = node_classes.NodeNG.__repr__
class DictValues(bases.Proxy):
__str__ = node_classes.NodeNG.__str__
__repr__ = node_classes.NodeNG.__repr__
class PartialFunction(scoped_nodes.FunctionDef):
"""A class representing partial function obtained via functools.partial"""
def __init__(
self, call, name=None, doc=None, lineno=None, col_offset=None, parent=None
):
super().__init__(name, doc, lineno, col_offset, parent)
self.filled_positionals = len(call.positional_arguments[1:])
self.filled_args = call.positional_arguments[1:]
self.filled_keywords = call.keyword_arguments
def infer_call_result(self, caller=None, context=None):
if context:
current_passed_keywords = {
keyword for (keyword, _) in context.callcontext.keywords
}
for keyword, value in self.filled_keywords.items():
if keyword not in current_passed_keywords:
context.callcontext.keywords.append((keyword, value))
call_context_args = context.callcontext.args or []
context.callcontext.args = self.filled_args + call_context_args
return super().infer_call_result(caller=caller, context=context)
def qname(self):
return self.__class__.__name__
# TODO: Hack to solve the circular import problem between node_classes and objects
# This is not needed in 2.0, which has a cleaner design overall
node_classes.Dict.__bases__ = (node_classes.NodeNG, DictInstance)

View File

@@ -0,0 +1,755 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2017-2018 Ashley Whetter <ashley@awhetter.co.uk>
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
# Copyright (c) 2017 rr- <rr-@sakuya.pl>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
# Copyright (c) 2018 HoverHell <hoverhell@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""this module contains a set of functions to handle python protocols for nodes
where it makes sense.
"""
import collections
import operator as operator_mod
import sys
import itertools
from astroid import Store
from astroid import arguments
from astroid import bases
from astroid import context as contextmod
from astroid import exceptions
from astroid import decorators
from astroid import node_classes
from astroid import helpers
from astroid import nodes
from astroid import util
raw_building = util.lazy_import("raw_building")
objects = util.lazy_import("objects")
def _reflected_name(name):
return "__r" + name[2:]
def _augmented_name(name):
return "__i" + name[2:]
_CONTEXTLIB_MGR = "contextlib.contextmanager"
BIN_OP_METHOD = {
"+": "__add__",
"-": "__sub__",
"/": "__truediv__",
"//": "__floordiv__",
"*": "__mul__",
"**": "__pow__",
"%": "__mod__",
"&": "__and__",
"|": "__or__",
"^": "__xor__",
"<<": "__lshift__",
">>": "__rshift__",
"@": "__matmul__",
}
REFLECTED_BIN_OP_METHOD = {
key: _reflected_name(value) for (key, value) in BIN_OP_METHOD.items()
}
AUGMENTED_OP_METHOD = {
key + "=": _augmented_name(value) for (key, value) in BIN_OP_METHOD.items()
}
UNARY_OP_METHOD = {
"+": "__pos__",
"-": "__neg__",
"~": "__invert__",
"not": None, # XXX not '__nonzero__'
}
_UNARY_OPERATORS = {
"+": operator_mod.pos,
"-": operator_mod.neg,
"~": operator_mod.invert,
"not": operator_mod.not_,
}
def _infer_unary_op(obj, op):
func = _UNARY_OPERATORS[op]
value = func(obj)
return nodes.const_factory(value)
nodes.Tuple.infer_unary_op = lambda self, op: _infer_unary_op(tuple(self.elts), op)
nodes.List.infer_unary_op = lambda self, op: _infer_unary_op(self.elts, op)
nodes.Set.infer_unary_op = lambda self, op: _infer_unary_op(set(self.elts), op)
nodes.Const.infer_unary_op = lambda self, op: _infer_unary_op(self.value, op)
nodes.Dict.infer_unary_op = lambda self, op: _infer_unary_op(dict(self.items), op)
# Binary operations
BIN_OP_IMPL = {
"+": lambda a, b: a + b,
"-": lambda a, b: a - b,
"/": lambda a, b: a / b,
"//": lambda a, b: a // b,
"*": lambda a, b: a * b,
"**": lambda a, b: a ** b,
"%": lambda a, b: a % b,
"&": lambda a, b: a & b,
"|": lambda a, b: a | b,
"^": lambda a, b: a ^ b,
"<<": lambda a, b: a << b,
">>": lambda a, b: a >> b,
}
if sys.version_info >= (3, 5):
# MatMult is available since Python 3.5+.
BIN_OP_IMPL["@"] = operator_mod.matmul
for _KEY, _IMPL in list(BIN_OP_IMPL.items()):
BIN_OP_IMPL[_KEY + "="] = _IMPL
@decorators.yes_if_nothing_inferred
def const_infer_binary_op(self, opnode, operator, other, context, _):
not_implemented = nodes.Const(NotImplemented)
if isinstance(other, nodes.Const):
try:
impl = BIN_OP_IMPL[operator]
try:
yield nodes.const_factory(impl(self.value, other.value))
except TypeError:
# ArithmeticError is not enough: float >> float is a TypeError
yield not_implemented
except Exception: # pylint: disable=broad-except
yield util.Uninferable
except TypeError:
yield not_implemented
elif isinstance(self.value, str) and operator == "%":
# TODO(cpopa): implement string interpolation later on.
yield util.Uninferable
else:
yield not_implemented
nodes.Const.infer_binary_op = const_infer_binary_op
def _multiply_seq_by_int(self, opnode, other, context):
node = self.__class__(parent=opnode)
filtered_elts = (
helpers.safe_infer(elt, context) or util.Uninferable
for elt in self.elts
if elt is not util.Uninferable
)
node.elts = list(filtered_elts) * other.value
return node
def _filter_uninferable_nodes(elts, context):
for elt in elts:
if elt is util.Uninferable:
yield nodes.Unknown()
else:
for inferred in elt.infer(context):
if inferred is not util.Uninferable:
yield inferred
else:
yield nodes.Unknown()
@decorators.yes_if_nothing_inferred
def tl_infer_binary_op(self, opnode, operator, other, context, method):
not_implemented = nodes.Const(NotImplemented)
if isinstance(other, self.__class__) and operator == "+":
node = self.__class__(parent=opnode)
node.elts = list(
itertools.chain(
_filter_uninferable_nodes(self.elts, context),
_filter_uninferable_nodes(other.elts, context),
)
)
yield node
elif isinstance(other, nodes.Const) and operator == "*":
if not isinstance(other.value, int):
yield not_implemented
return
yield _multiply_seq_by_int(self, opnode, other, context)
elif isinstance(other, bases.Instance) and operator == "*":
# Verify if the instance supports __index__.
as_index = helpers.class_instance_as_index(other)
if not as_index:
yield util.Uninferable
else:
yield _multiply_seq_by_int(self, opnode, as_index, context)
else:
yield not_implemented
nodes.Tuple.infer_binary_op = tl_infer_binary_op
nodes.List.infer_binary_op = tl_infer_binary_op
@decorators.yes_if_nothing_inferred
def instance_class_infer_binary_op(self, opnode, operator, other, context, method):
return method.infer_call_result(self, context)
bases.Instance.infer_binary_op = instance_class_infer_binary_op
nodes.ClassDef.infer_binary_op = instance_class_infer_binary_op
# assignment ##################################################################
"""the assigned_stmts method is responsible to return the assigned statement
(e.g. not inferred) according to the assignment type.
The `assign_path` argument is used to record the lhs path of the original node.
For instance if we want assigned statements for 'c' in 'a, (b,c)', assign_path
will be [1, 1] once arrived to the Assign node.
The `context` argument is the current inference context which should be given
to any intermediary inference necessary.
"""
def _resolve_looppart(parts, assign_path, context):
"""recursive function to resolve multiple assignments on loops"""
assign_path = assign_path[:]
index = assign_path.pop(0)
for part in parts:
if part is util.Uninferable:
continue
if not hasattr(part, "itered"):
continue
try:
itered = part.itered()
except TypeError:
continue
for stmt in itered:
index_node = nodes.Const(index)
try:
assigned = stmt.getitem(index_node, context)
except (
AttributeError,
exceptions.AstroidTypeError,
exceptions.AstroidIndexError,
):
continue
if not assign_path:
# we achieved to resolved the assignment path,
# don't infer the last part
yield assigned
elif assigned is util.Uninferable:
break
else:
# we are not yet on the last part of the path
# search on each possibly inferred value
try:
yield from _resolve_looppart(
assigned.infer(context), assign_path, context
)
except exceptions.InferenceError:
break
@decorators.raise_if_nothing_inferred
def for_assigned_stmts(self, node=None, context=None, assign_path=None):
if isinstance(self, nodes.AsyncFor) or getattr(self, "is_async", False):
# Skip inferring of async code for now
return dict(node=self, unknown=node, assign_path=assign_path, context=context)
if assign_path is None:
for lst in self.iter.infer(context):
if isinstance(lst, (nodes.Tuple, nodes.List)):
yield from lst.elts
else:
yield from _resolve_looppart(self.iter.infer(context), assign_path, context)
return dict(node=self, unknown=node, assign_path=assign_path, context=context)
nodes.For.assigned_stmts = for_assigned_stmts
nodes.Comprehension.assigned_stmts = for_assigned_stmts
def sequence_assigned_stmts(self, node=None, context=None, assign_path=None):
if assign_path is None:
assign_path = []
try:
index = self.elts.index(node)
except ValueError as exc:
raise exceptions.InferenceError(
"Tried to retrieve a node {node!r} which does not exist",
node=self,
assign_path=assign_path,
context=context,
) from exc
assign_path.insert(0, index)
return self.parent.assigned_stmts(
node=self, context=context, assign_path=assign_path
)
nodes.Tuple.assigned_stmts = sequence_assigned_stmts
nodes.List.assigned_stmts = sequence_assigned_stmts
def assend_assigned_stmts(self, node=None, context=None, assign_path=None):
return self.parent.assigned_stmts(node=self, context=context)
nodes.AssignName.assigned_stmts = assend_assigned_stmts
nodes.AssignAttr.assigned_stmts = assend_assigned_stmts
def _arguments_infer_argname(self, name, context):
# arguments information may be missing, in which case we can't do anything
# more
if not (self.args or self.vararg or self.kwarg):
yield util.Uninferable
return
# first argument of instance/class method
if self.args and getattr(self.args[0], "name", None) == name:
functype = self.parent.type
cls = self.parent.parent.scope()
is_metaclass = isinstance(cls, nodes.ClassDef) and cls.type == "metaclass"
# If this is a metaclass, then the first argument will always
# be the class, not an instance.
if is_metaclass or functype == "classmethod":
yield cls
return
if functype == "method":
yield bases.Instance(cls)
return
if context and context.callcontext:
call_site = arguments.CallSite(context.callcontext, context.extra_context)
yield from call_site.infer_argument(self.parent, name, context)
return
if name == self.vararg:
vararg = nodes.const_factory(())
vararg.parent = self
yield vararg
return
if name == self.kwarg:
kwarg = nodes.const_factory({})
kwarg.parent = self
yield kwarg
return
# if there is a default value, yield it. And then yield Uninferable to reflect
# we can't guess given argument value
try:
context = contextmod.copy_context(context)
yield from self.default_value(name).infer(context)
yield util.Uninferable
except exceptions.NoDefault:
yield util.Uninferable
def arguments_assigned_stmts(self, node=None, context=None, assign_path=None):
if context.callcontext:
# reset call context/name
callcontext = context.callcontext
context = contextmod.copy_context(context)
context.callcontext = None
args = arguments.CallSite(callcontext)
return args.infer_argument(self.parent, node.name, context)
return _arguments_infer_argname(self, node.name, context)
nodes.Arguments.assigned_stmts = arguments_assigned_stmts
@decorators.raise_if_nothing_inferred
def assign_assigned_stmts(self, node=None, context=None, assign_path=None):
if not assign_path:
yield self.value
return None
yield from _resolve_assignment_parts(
self.value.infer(context), assign_path, context
)
return dict(node=self, unknown=node, assign_path=assign_path, context=context)
def assign_annassigned_stmts(self, node=None, context=None, assign_path=None):
for inferred in assign_assigned_stmts(self, node, context, assign_path):
if inferred is None:
yield util.Uninferable
else:
yield inferred
nodes.Assign.assigned_stmts = assign_assigned_stmts
nodes.AnnAssign.assigned_stmts = assign_annassigned_stmts
nodes.AugAssign.assigned_stmts = assign_assigned_stmts
def _resolve_assignment_parts(parts, assign_path, context):
"""recursive function to resolve multiple assignments"""
assign_path = assign_path[:]
index = assign_path.pop(0)
for part in parts:
assigned = None
if isinstance(part, nodes.Dict):
# A dictionary in an iterating context
try:
assigned, _ = part.items[index]
except IndexError:
return
elif hasattr(part, "getitem"):
index_node = nodes.Const(index)
try:
assigned = part.getitem(index_node, context)
except (exceptions.AstroidTypeError, exceptions.AstroidIndexError):
return
if not assigned:
return
if not assign_path:
# we achieved to resolved the assignment path, don't infer the
# last part
yield assigned
elif assigned is util.Uninferable:
return
else:
# we are not yet on the last part of the path search on each
# possibly inferred value
try:
yield from _resolve_assignment_parts(
assigned.infer(context), assign_path, context
)
except exceptions.InferenceError:
return
@decorators.raise_if_nothing_inferred
def excepthandler_assigned_stmts(self, node=None, context=None, assign_path=None):
for assigned in node_classes.unpack_infer(self.type):
if isinstance(assigned, nodes.ClassDef):
assigned = objects.ExceptionInstance(assigned)
yield assigned
return dict(node=self, unknown=node, assign_path=assign_path, context=context)
nodes.ExceptHandler.assigned_stmts = excepthandler_assigned_stmts
def _infer_context_manager(self, mgr, context):
inferred = next(mgr.infer(context=context))
if isinstance(inferred, bases.Generator):
# Check if it is decorated with contextlib.contextmanager.
func = inferred.parent
if not func.decorators:
raise exceptions.InferenceError(
"No decorators found on inferred generator %s", node=func
)
for decorator_node in func.decorators.nodes:
decorator = next(decorator_node.infer(context))
if isinstance(decorator, nodes.FunctionDef):
if decorator.qname() == _CONTEXTLIB_MGR:
break
else:
# It doesn't interest us.
raise exceptions.InferenceError(node=func)
# Get the first yield point. If it has multiple yields,
# then a RuntimeError will be raised.
possible_yield_points = func.nodes_of_class(nodes.Yield)
# Ignore yields in nested functions
yield_point = next(
(node for node in possible_yield_points if node.scope() == func), None
)
if yield_point:
if not yield_point.value:
const = nodes.Const(None)
const.parent = yield_point
const.lineno = yield_point.lineno
yield const
else:
yield from yield_point.value.infer(context=context)
elif isinstance(inferred, bases.Instance):
try:
enter = next(inferred.igetattr("__enter__", context=context))
except (exceptions.InferenceError, exceptions.AttributeInferenceError):
raise exceptions.InferenceError(node=inferred)
if not isinstance(enter, bases.BoundMethod):
raise exceptions.InferenceError(node=enter)
if not context.callcontext:
context.callcontext = contextmod.CallContext(args=[inferred])
yield from enter.infer_call_result(self, context)
else:
raise exceptions.InferenceError(node=mgr)
@decorators.raise_if_nothing_inferred
def with_assigned_stmts(self, node=None, context=None, assign_path=None):
"""Infer names and other nodes from a *with* statement.
This enables only inference for name binding in a *with* statement.
For instance, in the following code, inferring `func` will return
the `ContextManager` class, not whatever ``__enter__`` returns.
We are doing this intentionally, because we consider that the context
manager result is whatever __enter__ returns and what it is binded
using the ``as`` keyword.
class ContextManager(object):
def __enter__(self):
return 42
with ContextManager() as f:
pass
# ContextManager().infer() will return ContextManager
# f.infer() will return 42.
Arguments:
self: nodes.With
node: The target of the assignment, `as (a, b)` in `with foo as (a, b)`.
context: Inference context used for caching already inferred objects
assign_path:
A list of indices, where each index specifies what item to fetch from
the inference results.
"""
try:
mgr = next(mgr for (mgr, vars) in self.items if vars == node)
except StopIteration:
return None
if assign_path is None:
yield from _infer_context_manager(self, mgr, context)
else:
for result in _infer_context_manager(self, mgr, context):
# Walk the assign_path and get the item at the final index.
obj = result
for index in assign_path:
if not hasattr(obj, "elts"):
raise exceptions.InferenceError(
"Wrong type ({targets!r}) for {node!r} assignment",
node=self,
targets=node,
assign_path=assign_path,
context=context,
)
try:
obj = obj.elts[index]
except IndexError as exc:
raise exceptions.InferenceError(
"Tried to infer a nonexistent target with index {index} "
"in {node!r}.",
node=self,
targets=node,
assign_path=assign_path,
context=context,
) from exc
except TypeError as exc:
raise exceptions.InferenceError(
"Tried to unpack a non-iterable value " "in {node!r}.",
node=self,
targets=node,
assign_path=assign_path,
context=context,
) from exc
yield obj
return dict(node=self, unknown=node, assign_path=assign_path, context=context)
nodes.With.assigned_stmts = with_assigned_stmts
@decorators.yes_if_nothing_inferred
def starred_assigned_stmts(self, node=None, context=None, assign_path=None):
"""
Arguments:
self: nodes.Starred
node: a node related to the current underlying Node.
context: Inference context used for caching already inferred objects
assign_path:
A list of indices, where each index specifies what item to fetch from
the inference results.
"""
# pylint: disable=too-many-locals,too-many-branches,too-many-statements
def _determine_starred_iteration_lookups(starred, target, lookups):
# Determine the lookups for the rhs of the iteration
itered = target.itered()
for index, element in enumerate(itered):
if (
isinstance(element, nodes.Starred)
and element.value.name == starred.value.name
):
lookups.append((index, len(itered)))
break
if isinstance(element, nodes.Tuple):
lookups.append((index, len(element.itered())))
_determine_starred_iteration_lookups(starred, element, lookups)
stmt = self.statement()
if not isinstance(stmt, (nodes.Assign, nodes.For)):
raise exceptions.InferenceError(
"Statement {stmt!r} enclosing {node!r} " "must be an Assign or For node.",
node=self,
stmt=stmt,
unknown=node,
context=context,
)
if context is None:
context = contextmod.InferenceContext()
if isinstance(stmt, nodes.Assign):
value = stmt.value
lhs = stmt.targets[0]
if sum(1 for _ in lhs.nodes_of_class(nodes.Starred)) > 1:
raise exceptions.InferenceError(
"Too many starred arguments in the " " assignment targets {lhs!r}.",
node=self,
targets=lhs,
unknown=node,
context=context,
)
try:
rhs = next(value.infer(context))
except exceptions.InferenceError:
yield util.Uninferable
return
if rhs is util.Uninferable or not hasattr(rhs, "itered"):
yield util.Uninferable
return
try:
elts = collections.deque(rhs.itered())
except TypeError:
yield util.Uninferable
return
# Unpack iteratively the values from the rhs of the assignment,
# until the find the starred node. What will remain will
# be the list of values which the Starred node will represent
# This is done in two steps, from left to right to remove
# anything before the starred node and from right to left
# to remove anything after the starred node.
for index, left_node in enumerate(lhs.elts):
if not isinstance(left_node, nodes.Starred):
if not elts:
break
elts.popleft()
continue
lhs_elts = collections.deque(reversed(lhs.elts[index:]))
for right_node in lhs_elts:
if not isinstance(right_node, nodes.Starred):
if not elts:
break
elts.pop()
continue
# We're done
packed = nodes.List(
ctx=Store, parent=self, lineno=lhs.lineno, col_offset=lhs.col_offset
)
packed.postinit(elts=elts)
yield packed
break
if isinstance(stmt, nodes.For):
try:
inferred_iterable = next(stmt.iter.infer(context=context))
except exceptions.InferenceError:
yield util.Uninferable
return
if inferred_iterable is util.Uninferable or not hasattr(
inferred_iterable, "itered"
):
yield util.Uninferable
return
try:
itered = inferred_iterable.itered()
except TypeError:
yield util.Uninferable
return
target = stmt.target
if not isinstance(target, nodes.Tuple):
raise exceptions.InferenceError(
"Could not make sense of this, the target must be a tuple",
context=context,
)
lookups = []
_determine_starred_iteration_lookups(self, target, lookups)
if not lookups:
raise exceptions.InferenceError(
"Could not make sense of this, needs at least a lookup", context=context
)
# Make the last lookup a slice, since that what we want for a Starred node
last_element_index, last_element_length = lookups[-1]
is_starred_last = last_element_index == (last_element_length - 1)
lookup_slice = slice(
last_element_index,
None if is_starred_last else (last_element_length - last_element_index),
)
lookups[-1] = lookup_slice
for element in itered:
# We probably want to infer the potential values *for each* element in an
# iterable, but we can't infer a list of all values, when only a list of
# step values are expected:
#
# for a, *b in [...]:
# b
#
# *b* should now point to just the elements at that particular iteration step,
# which astroid can't know about.
found_element = None
for lookup in lookups:
if not hasattr(element, "itered"):
break
if not isinstance(lookup, slice):
# Grab just the index, not the whole length
lookup = lookup[0]
try:
itered_inner_element = element.itered()
element = itered_inner_element[lookup]
except IndexError:
break
except TypeError:
# Most likely the itered() call failed, cannot make sense of this
yield util.Uninferable
return
else:
found_element = element
unpacked = nodes.List(
ctx=Store, parent=self, lineno=self.lineno, col_offset=self.col_offset
)
unpacked.postinit(elts=found_element or [])
yield unpacked
return
yield util.Uninferable
nodes.Starred.assigned_stmts = starred_assigned_stmts

View File

@@ -0,0 +1,449 @@
# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
# Copyright (c) 2014-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2014 Google, Inc.
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
# Copyright (c) 2015 Ovidiu Sabou <ovidiu@sabou.org>
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""this module contains a set of functions to create astroid trees from scratch
(build_* functions) or from living object (object_build_* functions)
"""
import builtins
import inspect
import logging
import os
import sys
import types
from astroid import bases
from astroid import manager
from astroid import node_classes
from astroid import nodes
MANAGER = manager.AstroidManager()
# the keys of CONST_CLS eg python builtin types
_CONSTANTS = tuple(node_classes.CONST_CLS)
_BUILTINS = vars(builtins)
_LOG = logging.getLogger(__name__)
def _io_discrepancy(member):
# _io module names itself `io`: http://bugs.python.org/issue18602
member_self = getattr(member, "__self__", None)
return (
member_self
and inspect.ismodule(member_self)
and member_self.__name__ == "_io"
and member.__module__ == "io"
)
def _attach_local_node(parent, node, name):
node.name = name # needed by add_local_node
parent.add_local_node(node)
def _add_dunder_class(func, member):
"""Add a __class__ member to the given func node, if we can determine it."""
python_cls = member.__class__
cls_name = getattr(python_cls, "__name__", None)
if not cls_name:
return
cls_bases = [ancestor.__name__ for ancestor in python_cls.__bases__]
ast_klass = build_class(cls_name, cls_bases, python_cls.__doc__)
func.instance_attrs["__class__"] = [ast_klass]
_marker = object()
def attach_dummy_node(node, name, runtime_object=_marker):
"""create a dummy node and register it in the locals of the given
node with the specified name
"""
enode = nodes.EmptyNode()
enode.object = runtime_object
_attach_local_node(node, enode, name)
def _has_underlying_object(self):
return self.object is not None and self.object is not _marker
nodes.EmptyNode.has_underlying_object = _has_underlying_object
def attach_const_node(node, name, value):
"""create a Const node and register it in the locals of the given
node with the specified name
"""
if name not in node.special_attributes:
_attach_local_node(node, nodes.const_factory(value), name)
def attach_import_node(node, modname, membername):
"""create a ImportFrom node and register it in the locals of the given
node with the specified name
"""
from_node = nodes.ImportFrom(modname, [(membername, None)])
_attach_local_node(node, from_node, membername)
def build_module(name, doc=None):
"""create and initialize an astroid Module node"""
node = nodes.Module(name, doc, pure_python=False)
node.package = False
node.parent = None
return node
def build_class(name, basenames=(), doc=None):
"""create and initialize an astroid ClassDef node"""
node = nodes.ClassDef(name, doc)
for base in basenames:
basenode = nodes.Name()
basenode.name = base
node.bases.append(basenode)
basenode.parent = node
return node
def build_function(name, args=None, defaults=None, doc=None):
"""create and initialize an astroid FunctionDef node"""
args, defaults = args or [], defaults or []
# first argument is now a list of decorators
func = nodes.FunctionDef(name, doc)
func.args = argsnode = nodes.Arguments()
argsnode.args = []
for arg in args:
argsnode.args.append(nodes.Name())
argsnode.args[-1].name = arg
argsnode.args[-1].parent = argsnode
argsnode.defaults = []
for default in defaults:
argsnode.defaults.append(nodes.const_factory(default))
argsnode.defaults[-1].parent = argsnode
argsnode.kwarg = None
argsnode.vararg = None
argsnode.parent = func
if args:
register_arguments(func)
return func
def build_from_import(fromname, names):
"""create and initialize an astroid ImportFrom import statement"""
return nodes.ImportFrom(fromname, [(name, None) for name in names])
def register_arguments(func, args=None):
"""add given arguments to local
args is a list that may contains nested lists
(i.e. def func(a, (b, c, d)): ...)
"""
if args is None:
args = func.args.args
if func.args.vararg:
func.set_local(func.args.vararg, func.args)
if func.args.kwarg:
func.set_local(func.args.kwarg, func.args)
for arg in args:
if isinstance(arg, nodes.Name):
func.set_local(arg.name, arg)
else:
register_arguments(func, arg.elts)
def object_build_class(node, member, localname):
"""create astroid for a living class object"""
basenames = [base.__name__ for base in member.__bases__]
return _base_class_object_build(node, member, basenames, localname=localname)
def object_build_function(node, member, localname):
"""create astroid for a living function object"""
# pylint: disable=deprecated-method; completely removed in 2.0
args, varargs, varkw, defaults = inspect.getargspec(member)
if varargs is not None:
args.append(varargs)
if varkw is not None:
args.append(varkw)
func = build_function(
getattr(member, "__name__", None) or localname, args, defaults, member.__doc__
)
node.add_local_node(func, localname)
def object_build_datadescriptor(node, member, name):
"""create astroid for a living data descriptor object"""
return _base_class_object_build(node, member, [], name)
def object_build_methoddescriptor(node, member, localname):
"""create astroid for a living method descriptor object"""
# FIXME get arguments ?
func = build_function(
getattr(member, "__name__", None) or localname, doc=member.__doc__
)
# set node's arguments to None to notice that we have no information, not
# and empty argument list
func.args.args = None
node.add_local_node(func, localname)
_add_dunder_class(func, member)
def _base_class_object_build(node, member, basenames, name=None, localname=None):
"""create astroid for a living class object, with a given set of base names
(e.g. ancestors)
"""
klass = build_class(
name or getattr(member, "__name__", None) or localname,
basenames,
member.__doc__,
)
klass._newstyle = isinstance(member, type)
node.add_local_node(klass, localname)
try:
# limit the instantiation trick since it's too dangerous
# (such as infinite test execution...)
# this at least resolves common case such as Exception.args,
# OSError.errno
if issubclass(member, Exception):
instdict = member().__dict__
else:
raise TypeError
except: # pylint: disable=bare-except
pass
else:
for item_name, obj in instdict.items():
valnode = nodes.EmptyNode()
valnode.object = obj
valnode.parent = klass
valnode.lineno = 1
klass.instance_attrs[item_name] = [valnode]
return klass
def _build_from_function(node, name, member, module):
# verify this is not an imported function
try:
code = member.__code__
except AttributeError:
# Some implementations don't provide the code object,
# such as Jython.
code = None
filename = getattr(code, "co_filename", None)
if filename is None:
assert isinstance(member, object)
object_build_methoddescriptor(node, member, name)
elif filename != getattr(module, "__file__", None):
attach_dummy_node(node, name, member)
else:
object_build_function(node, member, name)
class InspectBuilder:
"""class for building nodes from living object
this is actually a really minimal representation, including only Module,
FunctionDef and ClassDef nodes and some others as guessed.
"""
def __init__(self):
self._done = {}
self._module = None
def inspect_build(self, module, modname=None, path=None):
"""build astroid from a living module (i.e. using inspect)
this is used when there is no python source code available (either
because it's a built-in module or because the .py is not available)
"""
self._module = module
if modname is None:
modname = module.__name__
try:
node = build_module(modname, module.__doc__)
except AttributeError:
# in jython, java modules have no __doc__ (see #109562)
node = build_module(modname)
node.file = node.path = os.path.abspath(path) if path else path
node.name = modname
MANAGER.cache_module(node)
node.package = hasattr(module, "__path__")
self._done = {}
self.object_build(node, module)
return node
def object_build(self, node, obj):
"""recursive method which create a partial ast from real objects
(only function, class, and method are handled)
"""
if obj in self._done:
return self._done[obj]
self._done[obj] = node
for name in dir(obj):
try:
member = getattr(obj, name)
except AttributeError:
# damned ExtensionClass.Base, I know you're there !
attach_dummy_node(node, name)
continue
if inspect.ismethod(member):
member = member.__func__
if inspect.isfunction(member):
_build_from_function(node, name, member, self._module)
elif inspect.isbuiltin(member):
if not _io_discrepancy(member) and self.imported_member(
node, member, name
):
continue
object_build_methoddescriptor(node, member, name)
elif inspect.isclass(member):
if self.imported_member(node, member, name):
continue
if member in self._done:
class_node = self._done[member]
if class_node not in node.locals.get(name, ()):
node.add_local_node(class_node, name)
else:
class_node = object_build_class(node, member, name)
# recursion
self.object_build(class_node, member)
if name == "__class__" and class_node.parent is None:
class_node.parent = self._done[self._module]
elif inspect.ismethoddescriptor(member):
assert isinstance(member, object)
object_build_methoddescriptor(node, member, name)
elif inspect.isdatadescriptor(member):
assert isinstance(member, object)
object_build_datadescriptor(node, member, name)
elif isinstance(member, _CONSTANTS):
attach_const_node(node, name, member)
elif inspect.isroutine(member):
# This should be called for Jython, where some builtin
# methods aren't caught by isbuiltin branch.
_build_from_function(node, name, member, self._module)
else:
# create an empty node so that the name is actually defined
attach_dummy_node(node, name, member)
return None
def imported_member(self, node, member, name):
"""verify this is not an imported class or handle it"""
# /!\ some classes like ExtensionClass doesn't have a __module__
# attribute ! Also, this may trigger an exception on badly built module
# (see http://www.logilab.org/ticket/57299 for instance)
try:
modname = getattr(member, "__module__", None)
except: # pylint: disable=bare-except
_LOG.exception(
"unexpected error while building " "astroid from living object"
)
modname = None
if modname is None:
if name in ("__new__", "__subclasshook__"):
# Python 2.5.1 (r251:54863, Sep 1 2010, 22:03:14)
# >>> print object.__new__.__module__
# None
modname = builtins.__name__
else:
attach_dummy_node(node, name, member)
return True
real_name = {"gtk": "gtk_gtk", "_io": "io"}.get(modname, modname)
if real_name != self._module.__name__:
# check if it sounds valid and then add an import node, else use a
# dummy node
try:
getattr(sys.modules[modname], name)
except (KeyError, AttributeError):
attach_dummy_node(node, name, member)
else:
attach_import_node(node, modname, name)
return True
return False
### astroid bootstrapping ######################################################
_CONST_PROXY = {}
# TODO : find a nicer way to handle this situation;
def _set_proxied(const):
return _CONST_PROXY[const.value.__class__]
def _astroid_bootstrapping():
"""astroid bootstrapping the builtins module"""
# this boot strapping is necessary since we need the Const nodes to
# inspect_build builtins, and then we can proxy Const
builder = InspectBuilder()
astroid_builtin = builder.inspect_build(builtins)
# pylint: disable=redefined-outer-name
for cls, node_cls in node_classes.CONST_CLS.items():
if cls is type(None):
proxy = build_class("NoneType")
proxy.parent = astroid_builtin
elif cls is type(NotImplemented):
proxy = build_class("NotImplementedType")
proxy.parent = astroid_builtin
else:
proxy = astroid_builtin.getattr(cls.__name__)[0]
if cls in (dict, list, set, tuple):
node_cls._proxied = proxy
else:
_CONST_PROXY[cls] = proxy
# Set the builtin module as parent for some builtins.
nodes.Const._proxied = property(_set_proxied)
_GeneratorType = nodes.ClassDef(
types.GeneratorType.__name__, types.GeneratorType.__doc__
)
_GeneratorType.parent = astroid_builtin
bases.Generator._proxied = _GeneratorType
builder.object_build(bases.Generator._proxied, types.GeneratorType)
if hasattr(types, "AsyncGeneratorType"):
# pylint: disable=no-member; AsyncGeneratorType
_AsyncGeneratorType = nodes.ClassDef(
types.AsyncGeneratorType.__name__, types.AsyncGeneratorType.__doc__
)
_AsyncGeneratorType.parent = astroid_builtin
bases.AsyncGenerator._proxied = _AsyncGeneratorType
builder.object_build(bases.AsyncGenerator._proxied, types.AsyncGeneratorType)
builtin_types = (
types.GetSetDescriptorType,
types.GeneratorType,
types.MemberDescriptorType,
type(None),
type(NotImplemented),
types.FunctionType,
types.MethodType,
types.BuiltinFunctionType,
types.ModuleType,
types.TracebackType,
)
for _type in builtin_types:
if _type.__name__ not in astroid_builtin:
cls = nodes.ClassDef(_type.__name__, _type.__doc__)
cls.parent = astroid_builtin
builder.object_build(cls, _type)
astroid_builtin[_type.__name__] = cls
_astroid_bootstrapping()

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,71 @@
# Copyright (c) 2013-2014 Google, Inc.
# Copyright (c) 2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""Utility functions for test code that uses astroid ASTs as input."""
import contextlib
import functools
import sys
import warnings
from astroid import nodes
def require_version(minver=None, maxver=None):
""" Compare version of python interpreter to the given one. Skip the test
if older.
"""
def parse(string, default=None):
string = string or default
try:
return tuple(int(v) for v in string.split("."))
except ValueError as exc:
raise ValueError(
"{string} is not a correct version : should be X.Y[.Z].".format(
string=string
)
) from exc
def check_require_version(f):
current = sys.version_info[:3]
if parse(minver, "0") < current <= parse(maxver, "4"):
return f
str_version = ".".join(str(v) for v in sys.version_info)
@functools.wraps(f)
def new_f(self, *args, **kwargs):
if minver is not None:
self.skipTest(
"Needs Python > %s. Current version is %s." % (minver, str_version)
)
elif maxver is not None:
self.skipTest(
"Needs Python <= %s. Current version is %s." % (maxver, str_version)
)
return new_f
return check_require_version
def get_name_node(start_from, name, index=0):
return [n for n in start_from.nodes_of_class(nodes.Name) if n.name == name][index]
@contextlib.contextmanager
def enable_warning(warning):
warnings.simplefilter("always", warning)
try:
yield
finally:
# Reset it to default value, so it will take
# into account the values from the -W flag.
warnings.simplefilter("default", warning)

View File

@@ -0,0 +1,90 @@
# Copyright (c) 2015-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
import collections
from functools import lru_cache
class TransformVisitor:
"""A visitor for handling transforms.
The standard approach of using it is to call
:meth:`~visit` with an *astroid* module and the class
will take care of the rest, walking the tree and running the
transforms for each encountered node.
"""
TRANSFORM_MAX_CACHE_SIZE = 10000
def __init__(self):
self.transforms = collections.defaultdict(list)
@lru_cache(maxsize=TRANSFORM_MAX_CACHE_SIZE)
def _transform(self, node):
"""Call matching transforms for the given node if any and return the
transformed node.
"""
cls = node.__class__
if cls not in self.transforms:
# no transform registered for this class of node
return node
transforms = self.transforms[cls]
for transform_func, predicate in transforms:
if predicate is None or predicate(node):
ret = transform_func(node)
# if the transformation function returns something, it's
# expected to be a replacement for the node
if ret is not None:
node = ret
if ret.__class__ != cls:
# Can no longer apply the rest of the transforms.
break
return node
def _visit(self, node):
if hasattr(node, "_astroid_fields"):
for name in node._astroid_fields:
value = getattr(node, name)
visited = self._visit_generic(value)
if visited != value:
setattr(node, name, visited)
return self._transform(node)
def _visit_generic(self, node):
if isinstance(node, list):
return [self._visit_generic(child) for child in node]
if isinstance(node, tuple):
return tuple(self._visit_generic(child) for child in node)
if not node or isinstance(node, str):
return node
return self._visit(node)
def register_transform(self, node_class, transform, predicate=None):
"""Register `transform(node)` function to be applied on the given
astroid's `node_class` if `predicate` is None or returns true
when called with the node as argument.
The transform function may return a value which is then used to
substitute the original node in the tree.
"""
self.transforms[node_class].append((transform, predicate))
def unregister_transform(self, node_class, transform, predicate=None):
"""Unregister the given transform."""
self.transforms[node_class].remove((transform, predicate))
def visit(self, module):
"""Walk the given astroid *tree* and transform each encountered node
Only the nodes which have transforms registered will actually
be replaced or changed.
"""
module.body = [self._visit(child) for child in module.body]
return self._transform(module)

View File

@@ -0,0 +1,164 @@
# Copyright (c) 2015-2018 Claudiu Popa <pcmanticore@gmail.com>
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
import warnings
from itertools import islice
import importlib
import lazy_object_proxy
def lazy_descriptor(obj):
class DescriptorProxy(lazy_object_proxy.Proxy):
def __get__(self, instance, owner=None):
return self.__class__.__get__(self, instance)
return DescriptorProxy(obj)
def lazy_import(module_name):
return lazy_object_proxy.Proxy(
lambda: importlib.import_module("." + module_name, "astroid")
)
@object.__new__
class Uninferable:
"""Special inference object, which is returned when inference fails."""
def __repr__(self):
return "Uninferable"
__str__ = __repr__
def __getattribute__(self, name):
if name == "next":
raise AttributeError("next method should not be called")
if name.startswith("__") and name.endswith("__"):
return object.__getattribute__(self, name)
if name == "accept":
return object.__getattribute__(self, name)
return self
def __call__(self, *args, **kwargs):
return self
def __bool__(self):
return False
__nonzero__ = __bool__
def accept(self, visitor):
func = getattr(visitor, "visit_uninferable")
return func(self)
class BadOperationMessage:
"""Object which describes a TypeError occurred somewhere in the inference chain
This is not an exception, but a container object which holds the types and
the error which occurred.
"""
class BadUnaryOperationMessage(BadOperationMessage):
"""Object which describes operational failures on UnaryOps."""
def __init__(self, operand, op, error):
self.operand = operand
self.op = op
self.error = error
@property
def _object_type_helper(self):
helpers = lazy_import("helpers")
return helpers.object_type
def _object_type(self, obj):
# pylint: disable=not-callable; can't infer lazy_import
objtype = self._object_type_helper(obj)
if objtype is Uninferable:
return None
return objtype
def __str__(self):
if hasattr(self.operand, "name"):
operand_type = self.operand.name
else:
object_type = self._object_type(self.operand)
if hasattr(object_type, "name"):
operand_type = object_type.name
else:
# Just fallback to as_string
operand_type = object_type.as_string()
msg = "bad operand type for unary {}: {}"
return msg.format(self.op, operand_type)
class BadBinaryOperationMessage(BadOperationMessage):
"""Object which describes type errors for BinOps."""
def __init__(self, left_type, op, right_type):
self.left_type = left_type
self.right_type = right_type
self.op = op
def __str__(self):
msg = "unsupported operand type(s) for {}: {!r} and {!r}"
return msg.format(self.op, self.left_type.name, self.right_type.name)
def _instancecheck(cls, other):
wrapped = cls.__wrapped__
other_cls = other.__class__
is_instance_of = wrapped is other_cls or issubclass(other_cls, wrapped)
warnings.warn(
"%r is deprecated and slated for removal in astroid "
"2.0, use %r instead" % (cls.__class__.__name__, wrapped.__name__),
PendingDeprecationWarning,
stacklevel=2,
)
return is_instance_of
def proxy_alias(alias_name, node_type):
"""Get a Proxy from the given name to the given node type."""
proxy = type(
alias_name,
(lazy_object_proxy.Proxy,),
{
"__class__": object.__dict__["__class__"],
"__instancecheck__": _instancecheck,
},
)
return proxy(lambda: node_type)
def limit_inference(iterator, size):
"""Limit inference amount.
Limit inference amount to help with performance issues with
exponentially exploding possible results.
:param iterator: Inference generator to limit
:type iterator: Iterator(NodeNG)
:param size: Maximum mount of nodes yielded plus an
Uninferable at the end if limit reached
:type size: int
:yields: A possibly modified generator
:rtype param: Iterable
"""
yield from islice(iterator, size)
has_more = next(iterator, False)
if has_more is not False:
yield Uninferable
return

View File

@@ -0,0 +1,8 @@
# -*- coding: utf-8 -*-
try:
from ._version import version as __version__
except ImportError:
__version__ = 'unknown'
__all__ = ['easter', 'parser', 'relativedelta', 'rrule', 'tz',
'utils', 'zoneinfo']

View File

@@ -0,0 +1,43 @@
"""
Common code used in multiple modules.
"""
class weekday(object):
__slots__ = ["weekday", "n"]
def __init__(self, weekday, n=None):
self.weekday = weekday
self.n = n
def __call__(self, n):
if n == self.n:
return self
else:
return self.__class__(self.weekday, n)
def __eq__(self, other):
try:
if self.weekday != other.weekday or self.n != other.n:
return False
except AttributeError:
return False
return True
def __hash__(self):
return hash((
self.weekday,
self.n,
))
def __ne__(self, other):
return not (self == other)
def __repr__(self):
s = ("MO", "TU", "WE", "TH", "FR", "SA", "SU")[self.weekday]
if not self.n:
return s
else:
return "%s(%+d)" % (s, self.n)
# vim:ts=4:sw=4:et

View File

@@ -0,0 +1,4 @@
# coding: utf-8
# file generated by setuptools_scm
# don't change, don't track in version control
version = '2.8.0'

View File

@@ -0,0 +1,89 @@
# -*- coding: utf-8 -*-
"""
This module offers a generic easter computing method for any given year, using
Western, Orthodox or Julian algorithms.
"""
import datetime
__all__ = ["easter", "EASTER_JULIAN", "EASTER_ORTHODOX", "EASTER_WESTERN"]
EASTER_JULIAN = 1
EASTER_ORTHODOX = 2
EASTER_WESTERN = 3
def easter(year, method=EASTER_WESTERN):
"""
This method was ported from the work done by GM Arts,
on top of the algorithm by Claus Tondering, which was
based in part on the algorithm of Ouding (1940), as
quoted in "Explanatory Supplement to the Astronomical
Almanac", P. Kenneth Seidelmann, editor.
This algorithm implements three different easter
calculation methods:
1 - Original calculation in Julian calendar, valid in
dates after 326 AD
2 - Original method, with date converted to Gregorian
calendar, valid in years 1583 to 4099
3 - Revised method, in Gregorian calendar, valid in
years 1583 to 4099 as well
These methods are represented by the constants:
* ``EASTER_JULIAN = 1``
* ``EASTER_ORTHODOX = 2``
* ``EASTER_WESTERN = 3``
The default method is method 3.
More about the algorithm may be found at:
`GM Arts: Easter Algorithms <http://www.gmarts.org/index.php?go=415>`_
and
`The Calendar FAQ: Easter <https://www.tondering.dk/claus/cal/easter.php>`_
"""
if not (1 <= method <= 3):
raise ValueError("invalid method")
# g - Golden year - 1
# c - Century
# h - (23 - Epact) mod 30
# i - Number of days from March 21 to Paschal Full Moon
# j - Weekday for PFM (0=Sunday, etc)
# p - Number of days from March 21 to Sunday on or before PFM
# (-6 to 28 methods 1 & 3, to 56 for method 2)
# e - Extra days to add for method 2 (converting Julian
# date to Gregorian date)
y = year
g = y % 19
e = 0
if method < 3:
# Old method
i = (19*g + 15) % 30
j = (y + y//4 + i) % 7
if method == 2:
# Extra dates to convert Julian to Gregorian date
e = 10
if y > 1600:
e = e + y//100 - 16 - (y//100 - 16)//4
else:
# New method
c = y//100
h = (c - c//4 - (8*c + 13)//25 + 19*g + 15) % 30
i = h - (h//28)*(1 - (h//28)*(29//(h + 1))*((21 - g)//11))
j = (y + y//4 + i + 2 - c + c//4) % 7
# p can be from -6 to 56 corresponding to dates 22 March to 23 May
# (later dates apply to method 2, although 23 May never actually occurs)
p = i - j + e
d = 1 + (p + 27 + (p + 6)//40) % 31
m = 3 + (p + 26)//30
return datetime.date(int(y), int(m), int(d))

View File

@@ -0,0 +1,60 @@
# -*- coding: utf-8 -*-
from ._parser import parse, parser, parserinfo
from ._parser import DEFAULTPARSER, DEFAULTTZPARSER
from ._parser import UnknownTimezoneWarning
from ._parser import __doc__
from .isoparser import isoparser, isoparse
__all__ = ['parse', 'parser', 'parserinfo',
'isoparse', 'isoparser',
'UnknownTimezoneWarning']
###
# Deprecate portions of the private interface so that downstream code that
# is improperly relying on it is given *some* notice.
def __deprecated_private_func(f):
from functools import wraps
import warnings
msg = ('{name} is a private function and may break without warning, '
'it will be moved and or renamed in future versions.')
msg = msg.format(name=f.__name__)
@wraps(f)
def deprecated_func(*args, **kwargs):
warnings.warn(msg, DeprecationWarning)
return f(*args, **kwargs)
return deprecated_func
def __deprecate_private_class(c):
import warnings
msg = ('{name} is a private class and may break without warning, '
'it will be moved and or renamed in future versions.')
msg = msg.format(name=c.__name__)
class private_class(c):
__doc__ = c.__doc__
def __init__(self, *args, **kwargs):
warnings.warn(msg, DeprecationWarning)
super(private_class, self).__init__(*args, **kwargs)
private_class.__name__ = c.__name__
return private_class
from ._parser import _timelex, _resultbase
from ._parser import _tzparser, _parsetz
_timelex = __deprecate_private_class(_timelex)
_tzparser = __deprecate_private_class(_tzparser)
_resultbase = __deprecate_private_class(_resultbase)
_parsetz = __deprecated_private_func(_parsetz)

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More