*.pyc | |||||
__pycache__ | |||||
myvenv | |||||
db.sqlite3 | |||||
.DS_Store |
{ | |||||
"python.pythonPath": "${workspaceFolder}/thesisenv/bin/python" | |||||
} |
from django.contrib import admin | |||||
# Register your models here. |
from django.apps import AppConfig | |||||
class ApplicationConfig(AppConfig): | |||||
name = 'application' |
from django.db import models | |||||
from django.utils import timezone | |||||
class Post(models.Model): | |||||
author = models.ForeignKey('auth.User', on_delete=models.CASCADE) | |||||
title = models.CharField(max_length=200) | |||||
text = models.TextField() | |||||
created_date = models.DateTimeField( | |||||
default=timezone.now) | |||||
published_date = models.DateTimeField( | |||||
blank=True, null=True) | |||||
def publish(self): | |||||
self.published_date = timezone.now() | |||||
self.save() | |||||
def __str__(self): | |||||
return self.title |
from django.test import TestCase | |||||
# Create your tests here. |
from django.shortcuts import render | |||||
# Create your views here. |
#!/usr/bin/env python | |||||
import os | |||||
import sys | |||||
if __name__ == "__main__": | |||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings") | |||||
try: | |||||
from django.core.management import execute_from_command_line | |||||
except ImportError as exc: | |||||
raise ImportError( | |||||
"Couldn't import Django. Are you sure it's installed and " | |||||
"available on your PYTHONPATH environment variable? Did you " | |||||
"forget to activate a virtual environment?" | |||||
) from exc | |||||
execute_from_command_line(sys.argv) |
""" | |||||
Django settings for mysite project. | |||||
Generated by 'django-admin startproject' using Django 2.0.6. | |||||
For more information on this file, see | |||||
https://docs.djangoproject.com/en/2.0/topics/settings/ | |||||
For the full list of settings and their values, see | |||||
https://docs.djangoproject.com/en/2.0/ref/settings/ | |||||
""" | |||||
import os | |||||
import re | |||||
import socket | |||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...) | |||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) | |||||
# Quick-start development settings - unsuitable for production | |||||
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/ | |||||
# SECURITY WARNING: keep the secret key used in production secret! | |||||
SECRET_KEY = 'rh2cynsps7=3fb-bmb!+6g(!a(j5i3dq54ps08y2^py8z*49ct' | |||||
# SECURITY WARNING: don't run with debug turned on in production! | |||||
DEBUG = True | |||||
ALLOWED_HOSTS = [] | |||||
# Development or Production | |||||
r = re.search(r'^172.17', socket.gethostbyname(socket.gethostname())) | |||||
DEVELOPMENT = (r == None) | |||||
# Application definition | |||||
INSTALLED_APPS = [ | |||||
'django.contrib.admin', | |||||
'django.contrib.auth', | |||||
'django.contrib.contenttypes', | |||||
'django.contrib.sessions', | |||||
'django.contrib.messages', | |||||
'django.contrib.staticfiles', | |||||
'application', | |||||
] | |||||
MIDDLEWARE = [ | |||||
'django.middleware.security.SecurityMiddleware', | |||||
'django.contrib.sessions.middleware.SessionMiddleware', | |||||
'django.middleware.common.CommonMiddleware', | |||||
'django.middleware.csrf.CsrfViewMiddleware', | |||||
'django.contrib.auth.middleware.AuthenticationMiddleware', | |||||
'django.contrib.messages.middleware.MessageMiddleware', | |||||
'django.middleware.clickjacking.XFrameOptionsMiddleware', | |||||
] | |||||
ROOT_URLCONF = 'mysite.urls' | |||||
TEMPLATES = [ | |||||
{ | |||||
'BACKEND': 'django.template.backends.django.DjangoTemplates', | |||||
'DIRS': [], | |||||
'APP_DIRS': True, | |||||
'OPTIONS': { | |||||
'context_processors': [ | |||||
'django.template.context_processors.debug', | |||||
'django.template.context_processors.request', | |||||
'django.contrib.auth.context_processors.auth', | |||||
'django.contrib.messages.context_processors.messages', | |||||
], | |||||
}, | |||||
}, | |||||
] | |||||
WSGI_APPLICATION = 'mysite.wsgi.application' | |||||
# Database | |||||
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases | |||||
if DEVELOPMENT: | |||||
DATABASES = { | |||||
'default': { | |||||
'ENGINE': 'django.db.backends.sqlite3', | |||||
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), | |||||
} | |||||
} | |||||
else: | |||||
DATABASES = { | |||||
'default': { | |||||
'ENGINE': 'django.db.backends.mysql', | |||||
'NAME': 'django-app', | |||||
'USER': 'django-app', | |||||
'PASSWORD': '*******', | |||||
'HOST': 'mysql', | |||||
'PORT': '3306', | |||||
'OPTIONS': { | |||||
'init_command': "SET sql_mode='STRICT_TRANS_TABLES'" | |||||
}, | |||||
} | |||||
} | |||||
# Password validation | |||||
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators | |||||
AUTH_PASSWORD_VALIDATORS = [ | |||||
{ | |||||
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', | |||||
}, | |||||
{ | |||||
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', | |||||
}, | |||||
{ | |||||
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', | |||||
}, | |||||
{ | |||||
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', | |||||
}, | |||||
] | |||||
# Internationalization | |||||
# https://docs.djangoproject.com/en/2.0/topics/i18n/ | |||||
LANGUAGE_CODE = 'en-us' | |||||
TIME_ZONE = 'Europe/Berlin' | |||||
USE_I18N = True | |||||
USE_L10N = True | |||||
USE_TZ = True | |||||
# Static files (CSS, JavaScript, Images) | |||||
# https://docs.djangoproject.com/en/2.0/howto/static-files/ | |||||
STATIC_URL = '/static/' | |||||
STATIC_ROOT = os.path.join(BASE_DIR, 'static') | |||||
# Konfiguration des Auth-Systems | |||||
LDAP_DOMAIN = 'ADS1' | |||||
LDAP_SERVER = 'gso1.ads1.fh-nuernberg.de' | |||||
if DEVELOPMENT: | |||||
LOGIN_REDIRECT_URL = '/' | |||||
LOGOUT_REDIRECT_URL = '/' | |||||
LOGIN_URL = "/accounts/login/" | |||||
else: | |||||
LOGIN_REDIRECT_URL = '/app/' | |||||
LOGOUT_REDIRECT_URL = '/app/' | |||||
LOGIN_URL = "/app/accounts/login/" | |||||
if DEVELOPMENT: | |||||
AUTHENTICATION_BACKENDS = [ | |||||
'django.contrib.auth.backends.ModelBackend', | |||||
] | |||||
else: | |||||
AUTHENTICATION_BACKENDS = [ | |||||
'django.contrib.auth.backends.ModelBackend', | |||||
'medinf.ldap_backend.LdapBackend', | |||||
] |
"""mysite URL Configuration | |||||
The `urlpatterns` list routes URLs to views. For more information please see: | |||||
https://docs.djangoproject.com/en/2.0/topics/http/urls/ | |||||
Examples: | |||||
Function views | |||||
1. Add an import: from my_app import views | |||||
2. Add a URL to urlpatterns: path('', views.home, name='home') | |||||
Class-based views | |||||
1. Add an import: from other_app.views import Home | |||||
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') | |||||
Including another URLconf | |||||
1. Import the include() function: from django.urls import include, path | |||||
2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) | |||||
""" | |||||
from django.contrib import admin | |||||
from django.urls import path | |||||
urlpatterns = [ | |||||
path('admin/', admin.site.urls), | |||||
] |
""" | |||||
WSGI config for mysite project. | |||||
It exposes the WSGI callable as a module-level variable named ``application``. | |||||
For more information on this file, see | |||||
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/ | |||||
""" | |||||
import os | |||||
from django.core.wsgi import get_wsgi_application | |||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings") | |||||
application = get_wsgi_application() |
# This file must be used with "source bin/activate" *from bash* | |||||
# you cannot run it directly | |||||
deactivate () { | |||||
# reset old environment variables | |||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then | |||||
PATH="${_OLD_VIRTUAL_PATH:-}" | |||||
export PATH | |||||
unset _OLD_VIRTUAL_PATH | |||||
fi | |||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then | |||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" | |||||
export PYTHONHOME | |||||
unset _OLD_VIRTUAL_PYTHONHOME | |||||
fi | |||||
# This should detect bash and zsh, which have a hash command that must | |||||
# be called to get it to forget past commands. Without forgetting | |||||
# past commands the $PATH changes we made may not be respected | |||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then | |||||
hash -r | |||||
fi | |||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then | |||||
PS1="${_OLD_VIRTUAL_PS1:-}" | |||||
export PS1 | |||||
unset _OLD_VIRTUAL_PS1 | |||||
fi | |||||
unset VIRTUAL_ENV | |||||
if [ ! "$1" = "nondestructive" ] ; then | |||||
# Self destruct! | |||||
unset -f deactivate | |||||
fi | |||||
} | |||||
# unset irrelevant variables | |||||
deactivate nondestructive | |||||
VIRTUAL_ENV="/Users/Esthi/thesis_ek/thesisenv" | |||||
export VIRTUAL_ENV | |||||
_OLD_VIRTUAL_PATH="$PATH" | |||||
PATH="$VIRTUAL_ENV/bin:$PATH" | |||||
export PATH | |||||
# unset PYTHONHOME if set | |||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) | |||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash | |||||
if [ -n "${PYTHONHOME:-}" ] ; then | |||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" | |||||
unset PYTHONHOME | |||||
fi | |||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then | |||||
_OLD_VIRTUAL_PS1="${PS1:-}" | |||||
if [ "x(thesisenv) " != x ] ; then | |||||
PS1="(thesisenv) ${PS1:-}" | |||||
else | |||||
if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then | |||||
# special case for Aspen magic directories | |||||
# see http://www.zetadev.com/software/aspen/ | |||||
PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1" | |||||
else | |||||
PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1" | |||||
fi | |||||
fi | |||||
export PS1 | |||||
fi | |||||
# This should detect bash and zsh, which have a hash command that must | |||||
# be called to get it to forget past commands. Without forgetting | |||||
# past commands the $PATH changes we made may not be respected | |||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then | |||||
hash -r | |||||
fi |
# This file must be used with "source bin/activate.csh" *from csh*. | |||||
# You cannot run it directly. | |||||
# Created by Davide Di Blasi <davidedb@gmail.com>. | |||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com> | |||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' | |||||
# Unset irrelevant variables. | |||||
deactivate nondestructive | |||||
setenv VIRTUAL_ENV "/Users/Esthi/thesis_ek/thesisenv" | |||||
set _OLD_VIRTUAL_PATH="$PATH" | |||||
setenv PATH "$VIRTUAL_ENV/bin:$PATH" | |||||
set _OLD_VIRTUAL_PROMPT="$prompt" | |||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then | |||||
if ("thesisenv" != "") then | |||||
set env_name = "thesisenv" | |||||
else | |||||
if (`basename "VIRTUAL_ENV"` == "__") then | |||||
# special case for Aspen magic directories | |||||
# see http://www.zetadev.com/software/aspen/ | |||||
set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` | |||||
else | |||||
set env_name = `basename "$VIRTUAL_ENV"` | |||||
endif | |||||
endif | |||||
set prompt = "[$env_name] $prompt" | |||||
unset env_name | |||||
endif | |||||
alias pydoc python -m pydoc | |||||
rehash |
# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) | |||||
# you cannot run it directly | |||||
function deactivate -d "Exit virtualenv and return to normal shell environment" | |||||
# reset old environment variables | |||||
if test -n "$_OLD_VIRTUAL_PATH" | |||||
set -gx PATH $_OLD_VIRTUAL_PATH | |||||
set -e _OLD_VIRTUAL_PATH | |||||
end | |||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME" | |||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME | |||||
set -e _OLD_VIRTUAL_PYTHONHOME | |||||
end | |||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE" | |||||
functions -e fish_prompt | |||||
set -e _OLD_FISH_PROMPT_OVERRIDE | |||||
functions -c _old_fish_prompt fish_prompt | |||||
functions -e _old_fish_prompt | |||||
end | |||||
set -e VIRTUAL_ENV | |||||
if test "$argv[1]" != "nondestructive" | |||||
# Self destruct! | |||||
functions -e deactivate | |||||
end | |||||
end | |||||
# unset irrelevant variables | |||||
deactivate nondestructive | |||||
set -gx VIRTUAL_ENV "/Users/Esthi/thesis_ek/thesisenv" | |||||
set -gx _OLD_VIRTUAL_PATH $PATH | |||||
set -gx PATH "$VIRTUAL_ENV/bin" $PATH | |||||
# unset PYTHONHOME if set | |||||
if set -q PYTHONHOME | |||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME | |||||
set -e PYTHONHOME | |||||
end | |||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" | |||||
# fish uses a function instead of an env var to generate the prompt. | |||||
# save the current fish_prompt function as the function _old_fish_prompt | |||||
functions -c fish_prompt _old_fish_prompt | |||||
# with the original prompt function renamed, we can override with our own. | |||||
function fish_prompt | |||||
# Save the return status of the last command | |||||
set -l old_status $status | |||||
# Prompt override? | |||||
if test -n "(thesisenv) " | |||||
printf "%s%s" "(thesisenv) " (set_color normal) | |||||
else | |||||
# ...Otherwise, prepend env | |||||
set -l _checkbase (basename "$VIRTUAL_ENV") | |||||
if test $_checkbase = "__" | |||||
# special case for Aspen magic directories | |||||
# see http://www.zetadev.com/software/aspen/ | |||||
printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) | |||||
else | |||||
printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) | |||||
end | |||||
end | |||||
# Restore the return status of the previous command. | |||||
echo "exit $old_status" | . | |||||
_old_fish_prompt | |||||
end | |||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" | |||||
end |
#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 | |||||
# -*- coding: utf-8 -*- | |||||
import re | |||||
import sys | |||||
from django.core.management import execute_from_command_line | |||||
if __name__ == '__main__': | |||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | |||||
sys.exit(execute_from_command_line()) |
#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 | |||||
from django.core import management | |||||
if __name__ == "__main__": | |||||
management.execute_from_command_line() |
#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 | |||||
# -*- coding: utf-8 -*- | |||||
import re | |||||
import sys | |||||
from setuptools.command.easy_install import main | |||||
if __name__ == '__main__': | |||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | |||||
sys.exit(main()) |
#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 | |||||
# -*- coding: utf-8 -*- | |||||
import re | |||||
import sys | |||||
from setuptools.command.easy_install import main | |||||
if __name__ == '__main__': | |||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | |||||
sys.exit(main()) |
#!/Users/Esthi/thesis_ek/thesisenv/bin/python | |||||
# -*- coding: utf-8 -*- | |||||
import re | |||||
import sys | |||||
from pylint import run_epylint | |||||
if __name__ == '__main__': | |||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | |||||
sys.exit(run_epylint()) |
#!/Users/Esthi/thesis_ek/thesisenv/bin/python | |||||
# -*- coding: utf-8 -*- | |||||
import re | |||||
import sys | |||||
from isort.main import main | |||||
if __name__ == '__main__': | |||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | |||||
sys.exit(main()) |
#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 | |||||
# -*- coding: utf-8 -*- | |||||
import re | |||||
import sys | |||||
from pip._internal import main | |||||
if __name__ == '__main__': | |||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | |||||
sys.exit(main()) |
#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 | |||||
# -*- coding: utf-8 -*- | |||||
import re | |||||
import sys | |||||
from pip._internal import main | |||||
if __name__ == '__main__': | |||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | |||||
sys.exit(main()) |
#!/Users/Esthi/thesis_ek/thesisenv/bin/python3 | |||||
# -*- coding: utf-8 -*- | |||||
import re | |||||
import sys | |||||
from pip._internal import main | |||||
if __name__ == '__main__': | |||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | |||||
sys.exit(main()) |
#!/Users/Esthi/thesis_ek/thesisenv/bin/python | |||||
# -*- coding: utf-8 -*- | |||||
import re | |||||
import sys | |||||
from pylint import run_pylint | |||||
if __name__ == '__main__': | |||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | |||||
sys.exit(run_pylint()) |
#!/Users/Esthi/thesis_ek/thesisenv/bin/python | |||||
# -*- coding: utf-8 -*- | |||||
import re | |||||
import sys | |||||
from pylint import run_pyreverse | |||||
if __name__ == '__main__': | |||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | |||||
sys.exit(run_pyreverse()) |
python3 |
/usr/local/bin/python3 |
#!/Users/Esthi/thesis_ek/thesisenv/bin/python | |||||
# -*- coding: utf-8 -*- | |||||
import re | |||||
import sys | |||||
from pylint import run_symilar | |||||
if __name__ == '__main__': | |||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | |||||
sys.exit(run_symilar()) |
Django is a high-level Python Web framework that encourages rapid development | |||||
and clean, pragmatic design. Thanks for checking it out. | |||||
All documentation is in the "``docs``" directory and online at | |||||
https://docs.djangoproject.com/en/stable/. If you're just getting started, | |||||
here's how we recommend you read the docs: | |||||
* First, read ``docs/intro/install.txt`` for instructions on installing Django. | |||||
* Next, work through the tutorials in order (``docs/intro/tutorial01.txt``, | |||||
``docs/intro/tutorial02.txt``, etc.). | |||||
* If you want to set up an actual deployment server, read | |||||
``docs/howto/deployment/index.txt`` for instructions. | |||||
* You'll probably want to read through the topical guides (in ``docs/topics``) | |||||
next; from there you can jump to the HOWTOs (in ``docs/howto``) for specific | |||||
problems, and check out the reference (``docs/ref``) for gory details. | |||||
* See ``docs/README`` for instructions on building an HTML version of the docs. | |||||
Docs are updated rigorously. If you find any problems in the docs, or think | |||||
they should be clarified in any way, please take 30 seconds to fill out a | |||||
ticket here: https://code.djangoproject.com/newticket | |||||
To get more help: | |||||
* Join the ``#django`` channel on irc.freenode.net. Lots of helpful people hang out | |||||
there. Read the archives at https://botbot.me/freenode/django/. | |||||
* Join the django-users mailing list, or read the archives, at | |||||
https://groups.google.com/group/django-users. | |||||
To contribute to Django: | |||||
* Check out https://docs.djangoproject.com/en/dev/internals/contributing/ for | |||||
information about getting involved. | |||||
To run Django's test suite: | |||||
* Follow the instructions in the "Unit tests" section of | |||||
``docs/internals/contributing/writing-code/unit-tests.txt``, published online at | |||||
https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/unit-tests/#running-the-unit-tests | |||||
pip |
Copyright (c) Django Software Foundation and individual contributors. | |||||
All rights reserved. | |||||
Redistribution and use in source and binary forms, with or without modification, | |||||
are permitted provided that the following conditions are met: | |||||
1. Redistributions of source code must retain the above copyright notice, | |||||
this list of conditions and the following disclaimer. | |||||
2. Redistributions in binary form must reproduce the above copyright | |||||
notice, this list of conditions and the following disclaimer in the | |||||
documentation and/or other materials provided with the distribution. | |||||
3. Neither the name of Django nor the names of its contributors may be used | |||||
to endorse or promote products derived from this software without | |||||
specific prior written permission. | |||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND | |||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED | |||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE | |||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR | |||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES | |||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | |||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON | |||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | |||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS | |||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
Metadata-Version: 2.0 | |||||
Name: Django | |||||
Version: 2.0.6 | |||||
Summary: A high-level Python Web framework that encourages rapid development and clean, pragmatic design. | |||||
Home-page: https://www.djangoproject.com/ | |||||
Author: Django Software Foundation | |||||
Author-email: foundation@djangoproject.com | |||||
License: BSD | |||||
Project-URL: Documentation, https://docs.djangoproject.com/ | |||||
Project-URL: Funding, https://www.djangoproject.com/fundraising/ | |||||
Project-URL: Source, https://github.com/django/django | |||||
Project-URL: Tracker, https://code.djangoproject.com/ | |||||
Description-Content-Type: UNKNOWN | |||||
Platform: UNKNOWN | |||||
Classifier: Development Status :: 5 - Production/Stable | |||||
Classifier: Environment :: Web Environment | |||||
Classifier: Framework :: Django | |||||
Classifier: Intended Audience :: Developers | |||||
Classifier: License :: OSI Approved :: BSD License | |||||
Classifier: Operating System :: OS Independent | |||||
Classifier: Programming Language :: Python | |||||
Classifier: Programming Language :: Python :: 3 | |||||
Classifier: Programming Language :: Python :: 3.4 | |||||
Classifier: Programming Language :: Python :: 3.5 | |||||
Classifier: Programming Language :: Python :: 3.6 | |||||
Classifier: Programming Language :: Python :: 3 :: Only | |||||
Classifier: Topic :: Internet :: WWW/HTTP | |||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content | |||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI | |||||
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks | |||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules | |||||
Requires-Python: >=3.4 | |||||
Requires-Dist: pytz | |||||
Provides-Extra: argon2 | |||||
Requires-Dist: argon2-cffi (>=16.1.0); extra == 'argon2' | |||||
Provides-Extra: bcrypt | |||||
Requires-Dist: bcrypt; extra == 'bcrypt' | |||||
Django is a high-level Python Web framework that encourages rapid development | |||||
and clean, pragmatic design. Thanks for checking it out. | |||||
All documentation is in the "``docs``" directory and online at | |||||
https://docs.djangoproject.com/en/stable/. If you're just getting started, | |||||
here's how we recommend you read the docs: | |||||
* First, read ``docs/intro/install.txt`` for instructions on installing Django. | |||||
* Next, work through the tutorials in order (``docs/intro/tutorial01.txt``, | |||||
``docs/intro/tutorial02.txt``, etc.). | |||||
* If you want to set up an actual deployment server, read | |||||
``docs/howto/deployment/index.txt`` for instructions. | |||||
* You'll probably want to read through the topical guides (in ``docs/topics``) | |||||
next; from there you can jump to the HOWTOs (in ``docs/howto``) for specific | |||||
problems, and check out the reference (``docs/ref``) for gory details. | |||||
* See ``docs/README`` for instructions on building an HTML version of the docs. | |||||
Docs are updated rigorously. If you find any problems in the docs, or think | |||||
they should be clarified in any way, please take 30 seconds to fill out a | |||||
ticket here: https://code.djangoproject.com/newticket | |||||
To get more help: | |||||
* Join the ``#django`` channel on irc.freenode.net. Lots of helpful people hang out | |||||
there. Read the archives at https://botbot.me/freenode/django/. | |||||
* Join the django-users mailing list, or read the archives, at | |||||
https://groups.google.com/group/django-users. | |||||
To contribute to Django: | |||||
* Check out https://docs.djangoproject.com/en/dev/internals/contributing/ for | |||||
information about getting involved. | |||||
To run Django's test suite: | |||||
* Follow the instructions in the "Unit tests" section of | |||||
``docs/internals/contributing/writing-code/unit-tests.txt``, published online at | |||||
https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/unit-tests/#running-the-unit-tests | |||||
Wheel-Version: 1.0 | |||||
Generator: bdist_wheel (0.30.0) | |||||
Root-Is-Purelib: true | |||||
Tag: py3-none-any | |||||
[console_scripts] | |||||
django-admin = django.core.management:execute_from_command_line | |||||
{"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3 :: Only", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Internet :: WWW/HTTP :: WSGI", "Topic :: Software Development :: Libraries :: Application Frameworks", "Topic :: Software Development :: Libraries :: Python Modules"], "description_content_type": "UNKNOWN", "extensions": {"python.commands": {"wrap_console": {"django-admin": "django.core.management:execute_from_command_line"}}, "python.details": {"contacts": [{"email": "foundation@djangoproject.com", "name": "Django Software Foundation", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://www.djangoproject.com/"}}, "python.exports": {"console_scripts": {"django-admin": "django.core.management:execute_from_command_line"}}}, "extras": ["argon2", "bcrypt"], "generator": "bdist_wheel (0.30.0)", "license": "BSD", "metadata_version": "2.0", "name": "Django", "project_url": "Documentation, https://docs.djangoproject.com/", "requires_python": ">=3.4", "run_requires": [{"extra": "argon2", "requires": ["argon2-cffi (>=16.1.0)"]}, {"extra": "bcrypt", "requires": ["bcrypt"]}, {"requires": ["pytz"]}], "summary": "A high-level Python Web framework that encourages rapid development and clean, pragmatic design.", "version": "2.0.6"} |
django |
Astroid | |||||
======= | |||||
.. image:: https://travis-ci.org/PyCQA/astroid.svg?branch=master | |||||
:target: https://travis-ci.org/PyCQA/astroid | |||||
.. image:: https://ci.appveyor.com/api/projects/status/co3u42kunguhbh6l/branch/master?svg=true | |||||
:alt: AppVeyor Build Status | |||||
:target: https://ci.appveyor.com/project/PCManticore/astroid | |||||
.. image:: https://coveralls.io/repos/github/PyCQA/astroid/badge.svg?branch=master | |||||
:target: https://coveralls.io/github/PyCQA/astroid?branch=master | |||||
.. image:: https://readthedocs.org/projects/astroid/badge/?version=latest | |||||
:target: http://astroid.readthedocs.io/en/latest/?badge=latest | |||||
:alt: Documentation Status | |||||
What's this? | |||||
------------ | |||||
The aim of this module is to provide a common base representation of | |||||
python source code for projects such as pychecker, pyreverse, | |||||
pylint... Well, actually the development of this library is essentially | |||||
governed by pylint's needs. It used to be called logilab-astng. | |||||
It provides a compatible representation which comes from the `_ast` | |||||
module. It rebuilds the tree generated by the builtin _ast module by | |||||
recursively walking down the AST and building an extended ast. The new | |||||
node classes have additional methods and attributes for different | |||||
usages. They include some support for static inference and local name | |||||
scopes. Furthermore, astroid builds partial trees by inspecting living | |||||
objects. | |||||
Installation | |||||
------------ | |||||
Extract the tarball, jump into the created directory and run:: | |||||
python setup.py install | |||||
For installation options, see:: | |||||
python setup.py install --help | |||||
If you have any questions, please mail the code-quality@python.org | |||||
mailing list for support. See | |||||
http://mail.python.org/mailman/listinfo/code-quality for subscription | |||||
information and archives. You may find older archives at | |||||
http://lists.logilab.org/mailman/listinfo/python-projects . | |||||
Python Versions | |||||
--------------- | |||||
astroid is compatible with Python 2.7 as well as 3.4 and later. astroid uses | |||||
the same code base for both Python versions, using six. | |||||
Test | |||||
---- | |||||
Tests are in the 'test' subdirectory. To launch the whole tests suite | |||||
at once, you can use unittest discover:: | |||||
python -m unittest discover -p "unittest*.py" | |||||
pip |
Metadata-Version: 2.0 | |||||
Name: astroid | |||||
Version: 1.6.5 | |||||
Summary: A abstract syntax tree for Python with inference support. | |||||
Home-page: https://github.com/PyCQA/astroid | |||||
Author: Python Code Quality Authority | |||||
Author-email: code-quality@python.org | |||||
License: LGPL | |||||
Platform: UNKNOWN | |||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules | |||||
Classifier: Topic :: Software Development :: Quality Assurance | |||||
Classifier: Programming Language :: Python | |||||
Classifier: Programming Language :: Python :: 2 | |||||
Classifier: Programming Language :: Python :: 2.7 | |||||
Classifier: Programming Language :: Python :: 3 | |||||
Classifier: Programming Language :: Python :: 3.4 | |||||
Classifier: Programming Language :: Python :: 3.5 | |||||
Classifier: Programming Language :: Python :: 3.6 | |||||
Classifier: Programming Language :: Python :: Implementation :: CPython | |||||
Classifier: Programming Language :: Python :: Implementation :: PyPy | |||||
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.* | |||||
Requires-Dist: lazy-object-proxy | |||||
Requires-Dist: six | |||||
Requires-Dist: wrapt | |||||
Requires-Dist: enum34 (>=1.1.3); python_version<"3.4" | |||||
Requires-Dist: singledispatch; python_version<"3.4" | |||||
Requires-Dist: backports.functools-lru-cache; python_version<"3.4" | |||||
Astroid | |||||
======= | |||||
.. image:: https://travis-ci.org/PyCQA/astroid.svg?branch=master | |||||
:target: https://travis-ci.org/PyCQA/astroid | |||||
.. image:: https://ci.appveyor.com/api/projects/status/co3u42kunguhbh6l/branch/master?svg=true | |||||
:alt: AppVeyor Build Status | |||||
:target: https://ci.appveyor.com/project/PCManticore/astroid | |||||
.. image:: https://coveralls.io/repos/github/PyCQA/astroid/badge.svg?branch=master | |||||
:target: https://coveralls.io/github/PyCQA/astroid?branch=master | |||||
.. image:: https://readthedocs.org/projects/astroid/badge/?version=latest | |||||
:target: http://astroid.readthedocs.io/en/latest/?badge=latest | |||||
:alt: Documentation Status | |||||
What's this? | |||||
------------ | |||||
The aim of this module is to provide a common base representation of | |||||
python source code for projects such as pychecker, pyreverse, | |||||
pylint... Well, actually the development of this library is essentially | |||||
governed by pylint's needs. It used to be called logilab-astng. | |||||
It provides a compatible representation which comes from the `_ast` | |||||
module. It rebuilds the tree generated by the builtin _ast module by | |||||
recursively walking down the AST and building an extended ast. The new | |||||
node classes have additional methods and attributes for different | |||||
usages. They include some support for static inference and local name | |||||
scopes. Furthermore, astroid builds partial trees by inspecting living | |||||
objects. | |||||
Installation | |||||
------------ | |||||
Extract the tarball, jump into the created directory and run:: | |||||
python setup.py install | |||||
For installation options, see:: | |||||
python setup.py install --help | |||||
If you have any questions, please mail the code-quality@python.org | |||||
mailing list for support. See | |||||
http://mail.python.org/mailman/listinfo/code-quality for subscription | |||||
information and archives. You may find older archives at | |||||
http://lists.logilab.org/mailman/listinfo/python-projects . | |||||
Python Versions | |||||
--------------- | |||||
astroid is compatible with Python 2.7 as well as 3.4 and later. astroid uses | |||||
the same code base for both Python versions, using six. | |||||
Test | |||||
---- | |||||
Tests are in the 'test' subdirectory. To launch the whole tests suite | |||||
at once, you can use unittest discover:: | |||||
python -m unittest discover -p "unittest*.py" | |||||
astroid/__init__.py,sha256=nJa-PtJVjGLGS3llvtbRxCyUVQevswmcDZe8wF8Wndw,5628 | |||||
astroid/__pkginfo__.py,sha256=X5WIIY-hxNeAfmC4zGhyG2dpsbFYDZiA5jllJI67hqg,2603 | |||||
astroid/arguments.py,sha256=47OYPbIe1llGi-bWxuFDpMrhgnjWMTM_1h4KXbLwNwQ,11305 | |||||
astroid/as_string.py,sha256=eGy2-yU4tGYUiUWkfJhnvx8gGDPFifcOdSMqBXggqT4,19917 | |||||
astroid/astpeephole.py,sha256=N-vMldr_kuvu_gJ4gG6vXdr6CYYL2svI1V3ctRX8i0I,2446 | |||||
astroid/bases.py,sha256=2CbjwGgIHYeHkRICtXwVjHkkbeQ8hGQsEHJezDY3hO8,17127 | |||||
astroid/builder.py,sha256=B8x4wix1pcvDL0A1YcxReZJWUQc0zetHHEeGJfFzDxo,16324 | |||||
astroid/context.py,sha256=gquYYshu40royBm4KdFvQEfzsedZY-jkDLRyaRzUjSY,3327 | |||||
astroid/decorators.py,sha256=wsNx_s7YCDgM90cFhgTkwIpl6ZtBQQj_PcIx4p5wHJE,5174 | |||||
astroid/exceptions.py,sha256=aO6KMw78I0RhjlHgy9zCmLnni0_HsTyRvZcGaaEZG5Y,6925 | |||||
astroid/helpers.py,sha256=f-3GWhh9mNXtG1BDa6pCxPoAURHNOTdMORnA2ZIbsXs,5480 | |||||
astroid/inference.py,sha256=f8uK0QTGBi7mHl2Vof5_a6CQi5wedx_qAZVHVU9HSRQ,30515 | |||||
astroid/manager.py,sha256=buJqHYjz3UJ4OlwtsKUoXlHn8T_x9vh7Qi86CeaXaQU,12332 | |||||
astroid/mixins.py,sha256=jSmxJRasbIU_7dX2VXLMYEbEbiIhTvOnq5UJlXH2GJg,4832 | |||||
astroid/modutils.py,sha256=Gz1Apy25EjWAVJe8i9O6ZxuvFuLTVWVyD9rYk50GbyI,21858 | |||||
astroid/node_classes.py,sha256=8fg0A-oU_bERV8IdgwpKJbpWk6oeGgsrXrHFLC-17QM,129539 | |||||
astroid/nodes.py,sha256=2NctPYDrplpV1Iy0Ze2S5YH13B2bZi7f3tuxX-B_t0w,2400 | |||||
astroid/objects.py,sha256=cMOSw957M3l86OBRmnxnLUq8c91dJS5br5LN7nRmxnU,8062 | |||||
astroid/protocols.py,sha256=NMmtzstAJpDI7KYjmZUqZS4ddbnIfW7pGuZX9bJOrxk,22996 | |||||
astroid/raw_building.py,sha256=LAR3Wt5GgNf-9CaMHFNCaUmwY1tHt71HpTgYiLPBhpM,15753 | |||||
astroid/rebuilder.py,sha256=Cij4R5eehuOwV1LOcNLpjYwuG42bPf5wG0YM4wYYdis,39135 | |||||
astroid/scoped_nodes.py,sha256=fyO6aBhyQmCb-9ARipDq_4AyB2blX5CT9URZdwyuGZE,91147 | |||||
astroid/test_utils.py,sha256=MYum03eaauNc1XCJKoFzzj4Z2FeoOjVGzoISYiaISMk,2046 | |||||
astroid/transforms.py,sha256=rOnQae4Zz21Rk5j6tUiBJWoJ2WmGJ5e0iBO9wkMDEe0,3227 | |||||
astroid/util.py,sha256=Yx1qPfK1bf7CCE3I4X8nysRtXv1XyYsIBKEgoia4kyc,4288 | |||||
astroid/brain/brain_attrs.py,sha256=bLgG9gB5mLhvDnj6OZnjnI21gZJjBy2mcUO1_pbAW_U,1766 | |||||
astroid/brain/brain_builtin_inference.py,sha256=nfJqjeaC3zBL_GtuXy2gAf20eTDWtxQ7N0knxEobJ78,17469 | |||||
astroid/brain/brain_collections.py,sha256=kiAyoVhoOLV2gjiX1DNcDn9IRmCKpysekMSe49uHxrI,2227 | |||||
astroid/brain/brain_curses.py,sha256=P9Ay_ZZqCtZND7Q1t3PLuhLGaaHbo6pBYtcL8Pc8U5E,3289 | |||||
astroid/brain/brain_dateutil.py,sha256=ZflUChhczpnxIWeKrWLYXqZdEe_3ktT-Ay_elaexlWg,714 | |||||
astroid/brain/brain_fstrings.py,sha256=_Y0sap2S1J7nrW9YSl6SaXGUvTL0Y1Q6-BJTTODQH_w,1963 | |||||
astroid/brain/brain_functools.py,sha256=CmaRIplk_6G7xbLyyCIXd2ULUm2pDkFRXUpZH5qzHpY,2323 | |||||
astroid/brain/brain_gi.py,sha256=EjyjllMJ3EQP0NPulpIko0Hclr_4U-txjjYkWFMe_0w,6326 | |||||
astroid/brain/brain_hashlib.py,sha256=c230J0Cdnyav341dDxboxCKuwMBUM2f46k4xjGeuT_A,1056 | |||||
astroid/brain/brain_io.py,sha256=veIF0061yjZyAinn7ILkOt7GTPmEzcIRNUybm2e-hsA,1589 | |||||
astroid/brain/brain_mechanize.py,sha256=afG7eL64YvviwRgGWvJqSTgDck2huAD_w4v4s9t3CWQ,712 | |||||
astroid/brain/brain_multiprocessing.py,sha256=XGVujYTqNpttuhhsj_wV-L5ovJRDusRagZtiBo8Jmns,3128 | |||||
astroid/brain/brain_namedtuple_enum.py,sha256=5ZjyxEcoCBtIHfcHviI1ry-L89qmekAotxwl23UzFHQ,10543 | |||||
astroid/brain/brain_nose.py,sha256=NwqOAv_2-eZu11J_jbHN4k_xn1Y6mrQhnbDYjCERM_Q,2179 | |||||
astroid/brain/brain_numpy.py,sha256=W3hQPZx81EcakGZapqX5Wlr5H-UjPrz8Zq1hooYDhQI,7208 | |||||
astroid/brain/brain_pkg_resources.py,sha256=rtSzNUy775IIWONW-Oa3aqr1QNuzD76ew_iKG1NrbxA,2164 | |||||
astroid/brain/brain_pytest.py,sha256=BtfAfrbf4KA1So5N9XEPeA_eQpbJgi38Bf-OkEmRKBE,2224 | |||||
astroid/brain/brain_qt.py,sha256=FwzpsjGGrhCtbW39X9dRvpO0a-z-cvQQBEeBxv0fQGM,1634 | |||||
astroid/brain/brain_random.py,sha256=MM4lhpkad1aEXlser7W2GWFHtQ76C55LAj4vVUcFk4Y,2678 | |||||
astroid/brain/brain_re.py,sha256=v2Ul259C-xKfYcu1Tw5gHCifzbA8Beur6gRwVGWVVwo,1106 | |||||
astroid/brain/brain_six.py,sha256=U4X7Y1JF5dIJtEMdKwTpu4F8fjn19CfFdC2avMyk9sQ,11251 | |||||
astroid/brain/brain_ssl.py,sha256=dYNvkEAIV5TsByufeUyUfy5UW8ledXCYaUY5WJoujPw,3492 | |||||
astroid/brain/brain_subprocess.py,sha256=PxwnCOwQlOQvt6e94AwZBDUEHLPaLhGYlSzOumU3Rf4,3314 | |||||
astroid/brain/brain_threading.py,sha256=sqyPOgIqoFnNfthxzyRsaSaGNFON4I9BM7pY5jvOPCk,656 | |||||
astroid/brain/brain_typing.py,sha256=6CvVNjeriY4NIUz4jgu3h7odqz1l4emEP6IMu0PipJY,2602 | |||||
astroid/brain/brain_uuid.py,sha256=o6_9_TjFSJBbiMX2YU2_cLyNmqoCLgiWsERUH8dghss,572 | |||||
astroid/interpreter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/interpreter/dunder_lookup.py,sha256=zCbbcMOuFDmTwM9fKbY9Ov31shU4pvUyD5VAZup73v8,2372 | |||||
astroid/interpreter/objectmodel.py,sha256=UmNRaibOIbeYnIjL_OvWFHeHKk2hKp-87ebkr1T0c68,19988 | |||||
astroid/interpreter/_import/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/interpreter/_import/spec.py,sha256=VevLd8YyjUMD_N-nXxdKNlGUWVXh72Zl5iCdidtJ2f4,10402 | |||||
astroid/interpreter/_import/util.py,sha256=oYFwYfp56jeq4HJmvukgIypg7imJqx5fhm2-fePoGjE,346 | |||||
astroid/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/resources.py,sha256=gOCYQkxYk0FhAMjBtvvHXpW1eQhU7uUWw9v1Veh3Q10,1950 | |||||
astroid/tests/unittest_brain.py,sha256=_82F52B-YwO6PhOah5bMUFrofErqOEBZ92cmN2Q27UY,30911 | |||||
astroid/tests/unittest_brain_numpy.py,sha256=qyeciUNKLj9wsXNeatjgo-5AUYyNAP2MklXczJrnIhs,10948 | |||||
astroid/tests/unittest_builder.py,sha256=FvwPTnFJHIAPC-Y4F4yyEUqkDqgg8SxonhbI6PPvTkU,28928 | |||||
astroid/tests/unittest_helpers.py,sha256=Wx1UP8rKjRhdY4GzObKXzSJnK5tYQCBqDKHuMvFL67Y,9201 | |||||
astroid/tests/unittest_inference.py,sha256=UP4zFkvGdYLhri-QzGtqaxlAQt1dSdRuqZlB54oIGoA,147345 | |||||
astroid/tests/unittest_lookup.py,sha256=N51VbkAH6sKgjGeMlWPAZPmfIyAaZHHsoEOXV-40oKI,12521 | |||||
astroid/tests/unittest_manager.py,sha256=v4WsPD49vB7RLJIGBosA9WPB-Sj5B_9B0a98zmo9yOM,11818 | |||||
astroid/tests/unittest_modutils.py,sha256=8-qDLAKj46YOScA6QXqlwYMZx7ZMgc2RBAFm5JdfxjI,10566 | |||||
astroid/tests/unittest_nodes.py,sha256=sL6vlDMHYmCDxQRq3GRzCoM8wmqmDxQLsXibgiWoW8Q,30227 | |||||
astroid/tests/unittest_object_model.py,sha256=vu2gNDiqsaqhe4zehkcEx91xbBdzEBvB-ZaYTuaUbQs,20504 | |||||
astroid/tests/unittest_objects.py,sha256=jkY9rXp66dL1Ox468sgHhqm7N4_DnXnTXznOnlhsGh8,19235 | |||||
astroid/tests/unittest_peephole.py,sha256=asVwyvtf2Gc3CcIjL_lXHJtrp3OdAW1HkrpMNImv5Dg,3276 | |||||
astroid/tests/unittest_protocols.py,sha256=LY2K4NPDFYoJo7zggjU5YZPtZ0ISdOdmehCA5rDTWn8,7587 | |||||
astroid/tests/unittest_python3.py,sha256=uE0firQjt4XFvjZt0bmHddR4jkjPEPTMnu8PCcXkN-w,13502 | |||||
astroid/tests/unittest_raw_building.py,sha256=EoblIBaKFxwEnUCCs83ERpm22MXDTug5IwXrAneFtC0,3425 | |||||
astroid/tests/unittest_regrtest.py,sha256=IXwRCmsbOTE6RQpAZ4m5ZEhIal33YCUpyMvHENXMplE,11079 | |||||
astroid/tests/unittest_scoped_nodes.py,sha256=3KJ9a7hUwqpB8yCY8IIbT03uSzMc25e4me1snkgvOeU,65436 | |||||
astroid/tests/unittest_transforms.py,sha256=V2c5H18k7sKJU_cbVaiCcwvuvN17XSxrS9hM5SevL1w,8122 | |||||
astroid/tests/unittest_utils.py,sha256=kAApZ25LzvXJbVSW4fMjfp--9BnLDzUPyRf_WapcYls,3866 | |||||
astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.egg,sha256=hZPSxlunnE-5kTg-yII7E10WG-nsV3DELfKv_xYnBeI,1222 | |||||
astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.zip,sha256=hZPSxlunnE-5kTg-yII7E10WG-nsV3DELfKv_xYnBeI,1222 | |||||
astroid/tests/testdata/python2/data/__init__.py,sha256=UUgQFilI5GXd3tVo42wvC99xr-OWdiFwd4AToVWMKJg,68 | |||||
astroid/tests/testdata/python2/data/absimport.py,sha256=Dl1v3sCTUuy5NjWsvk6xfXDGqG8dJxYky66oH_16y1U,78 | |||||
astroid/tests/testdata/python2/data/all.py,sha256=9hzh93N-w2OoWmuWFFPe4NfLPtN0CcQUWyJU9G2kki8,106 | |||||
astroid/tests/testdata/python2/data/descriptor_crash.py,sha256=c9dmcN0XSB1WiDINWLjfA0SYY87UzMIpETXHBdcgJ0Y,217 | |||||
astroid/tests/testdata/python2/data/email.py,sha256=bA18WU0kAWGxsPlWJjD6LgXj9NK4RDLjqaN5-EievLw,70 | |||||
astroid/tests/testdata/python2/data/foogle_fax-0.12.5-py2.7-nspkg.pth,sha256=kv-1ZyITNtu-FjiACe5_1ZhKcQnVSKROTwzfHyeIwPA,629 | |||||
astroid/tests/testdata/python2/data/format.py,sha256=Se18tU4br95nCnBg7DIYonoRIXAZi3u2RvyoKwupAXk,421 | |||||
astroid/tests/testdata/python2/data/invalid_encoding.py,sha256=m1_U4_CIbs71SvS2kGVVYDdqYGizzXm9yrXjDWZsO2g,22 | |||||
astroid/tests/testdata/python2/data/joined_strings.py,sha256=5nO3HMS9TAB0jZml1cSBv_b-1m4GTJ_12hD8WYMugBw,72168 | |||||
astroid/tests/testdata/python2/data/module.py,sha256=jaS47E_rOtpGIECwWYYl3ZBzBUZt0fvyCs7tG99SxgU,1804 | |||||
astroid/tests/testdata/python2/data/module2.py,sha256=gNaybt93hMTRFCnOh3gjW0niEDP5nVO8TrpixkHWW5o,1960 | |||||
astroid/tests/testdata/python2/data/noendingnewline.py,sha256=cVu_K7C5NnjnEvmMUxVGeeguyFcHBuNFEO3ueF9X9LI,503 | |||||
astroid/tests/testdata/python2/data/nonregr.py,sha256=0M3kW2tiTQdfuIUU9CNZHDBd1qC6Sxms6b_QZLLGtro,1150 | |||||
astroid/tests/testdata/python2/data/notall.py,sha256=lOzkx4qf1Gm6SmTSXnrCT9C13WDF1UkzvStAnTSkjU0,74 | |||||
astroid/tests/testdata/python2/data/recursion.py,sha256=ZuYyd9K4DyZxXg3L-B1Dl7k9q8OpIfVDwN9kJ52xLDk,52 | |||||
astroid/tests/testdata/python2/data/tmp__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python2/data/SSL1/Connection1.py,sha256=rOKmOG_JTouiVawzB5kty493I64pBM9WJDinQn-_Y5c,343 | |||||
astroid/tests/testdata/python2/data/SSL1/__init__.py,sha256=ZlvNUty1pEZy7wHMAM83YwYcdE4ypNHh0W2ijB3mqO8,35 | |||||
astroid/tests/testdata/python2/data/absimp/__init__.py,sha256=CTlFm8G4kKecaa0NpFb4X25NNZ9FNorigSG65GAvvYA,89 | |||||
astroid/tests/testdata/python2/data/absimp/string.py,sha256=liyEyorFV0OJFr-HcECPPRfVmLd0lO4YrGFnZz0_T0M,83 | |||||
astroid/tests/testdata/python2/data/absimp/sidepackage/__init__.py,sha256=9E8Vj_jbaQ7tm80sIxyruqZPjzlVLNbd3qQxbvj39rI,42 | |||||
astroid/tests/testdata/python2/data/appl/__init__.py,sha256=9OoDa7y4MPXKZenN5CA2wmwsG7vUqiO4ImtTjsNs6YY,13 | |||||
astroid/tests/testdata/python2/data/appl/myConnection.py,sha256=Zc3RQ_GjoZ91k3LkaIfV4_1SePpwKUU2cOFAzN5Iq6Y,303 | |||||
astroid/tests/testdata/python2/data/contribute_to_namespace/namespace_pep_420/submodule.py,sha256=5rdq57wop6pxdC9TpMnh8L7gNANMMsNFujBhL9rGhBw,8 | |||||
astroid/tests/testdata/python2/data/find_test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python2/data/find_test/module.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python2/data/find_test/module2.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python2/data/find_test/noendingnewline.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python2/data/find_test/nonregr.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python2/data/foogle/fax/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python2/data/foogle/fax/a.py,sha256=j_Q23vFFEoVZmhsa1wgASTuNyv3ikS4aODRWMwVOTCY,5 | |||||
astroid/tests/testdata/python2/data/lmfp/__init__.py,sha256=JmYecBTypWpPdKNy76pDWYliy-gWg3PPOOMcUdMAvzo,51 | |||||
astroid/tests/testdata/python2/data/lmfp/foo.py,sha256=ePynel7303gG6wq8wb6kRmaV75Q7mR9A_X7SZVP0YWM,170 | |||||
astroid/tests/testdata/python2/data/module1abs/__init__.py,sha256=RTMiBz8OgkD3dy2Sehwv6am35Xzlf6X8SQJcfo-m2sA,113 | |||||
astroid/tests/testdata/python2/data/module1abs/core.py,sha256=xRdXeFHEieRauuJZElbEBASgXG0ZzU1a5_0isAhM7Gw,11 | |||||
astroid/tests/testdata/python2/data/namespace_pep_420/module.py,sha256=1NhbQAf4THdW94o7nrSzPkEJFy0XvISnlF_-OTTiwRk,43 | |||||
astroid/tests/testdata/python2/data/notamodule/file.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python2/data/package/__init__.py,sha256=U50oVo2CraRtPYheia534Z0iPVQMDT2C6Qwj2ZWAmO0,57 | |||||
astroid/tests/testdata/python2/data/package/absimport.py,sha256=cTkLoSR4oIJtQ8yVLAgdopJXro0qFsehlMGYLCfiPvo,172 | |||||
astroid/tests/testdata/python2/data/package/hello.py,sha256=sTddKXRfLNAysty0r625S8QysSDOmtF8oXDvbl3Cywk,20 | |||||
astroid/tests/testdata/python2/data/package/import_package_subpackage_module.py,sha256=U6BsMb_ygFb8RqImsTrWEGJihU7nJgELPH6AvWM-zaU,2242 | |||||
astroid/tests/testdata/python2/data/package/subpackage/__init__.py,sha256=XtKilaAqziUI-ImaSw4V6Aic40domt4v_If7lAZYhSE,25 | |||||
astroid/tests/testdata/python2/data/package/subpackage/module.py,sha256=WAtPIk13pW6tYI6rSgNHcCgTu0EXhX6i5CugdHPH8N0,32 | |||||
astroid/tests/testdata/python2/data/path_pkg_resources_1/package/__init__.py,sha256=1kQs82-WyvhNmr6z4VEklhVXMJulr_OO2RXRAqT40Z4,55 | |||||
astroid/tests/testdata/python2/data/path_pkg_resources_1/package/foo.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python2/data/path_pkg_resources_2/package/__init__.py,sha256=1kQs82-WyvhNmr6z4VEklhVXMJulr_OO2RXRAqT40Z4,55 | |||||
astroid/tests/testdata/python2/data/path_pkg_resources_2/package/bar.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python2/data/path_pkg_resources_3/package/__init__.py,sha256=1kQs82-WyvhNmr6z4VEklhVXMJulr_OO2RXRAqT40Z4,55 | |||||
astroid/tests/testdata/python2/data/path_pkg_resources_3/package/baz.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python2/data/path_pkgutil_1/package/__init__.py,sha256=doH9fwYoYwbugpsdT3QT3lEja4mw1-USY24J1TSboCs,74 | |||||
astroid/tests/testdata/python2/data/path_pkgutil_1/package/foo.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python2/data/path_pkgutil_2/package/__init__.py,sha256=doH9fwYoYwbugpsdT3QT3lEja4mw1-USY24J1TSboCs,74 | |||||
astroid/tests/testdata/python2/data/path_pkgutil_2/package/bar.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python2/data/path_pkgutil_3/package/__init__.py,sha256=doH9fwYoYwbugpsdT3QT3lEja4mw1-USY24J1TSboCs,74 | |||||
astroid/tests/testdata/python2/data/path_pkgutil_3/package/baz.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python2/data/unicode_package/__init__.py,sha256=Qq8Rv1-47xfh9UMnDqtU6MYCoZbK2DF1zxAvxlkhCNU,17 | |||||
astroid/tests/testdata/python2/data/unicode_package/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.egg,sha256=hZPSxlunnE-5kTg-yII7E10WG-nsV3DELfKv_xYnBeI,1222 | |||||
astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.zip,sha256=hZPSxlunnE-5kTg-yII7E10WG-nsV3DELfKv_xYnBeI,1222 | |||||
astroid/tests/testdata/python3/data/__init__.py,sha256=UUgQFilI5GXd3tVo42wvC99xr-OWdiFwd4AToVWMKJg,68 | |||||
astroid/tests/testdata/python3/data/absimport.py,sha256=-CKa6uxNJwTox5JoeWFe_hnxPcp1BT_vgPrXjsk4c-w,40 | |||||
astroid/tests/testdata/python3/data/all.py,sha256=96OFTf0wN5cad6Zt4WvJ6OxHTUncQyPyghPMRxGV9B8,107 | |||||
astroid/tests/testdata/python3/data/descriptor_crash.py,sha256=c9dmcN0XSB1WiDINWLjfA0SYY87UzMIpETXHBdcgJ0Y,217 | |||||
astroid/tests/testdata/python3/data/email.py,sha256=bA18WU0kAWGxsPlWJjD6LgXj9NK4RDLjqaN5-EievLw,70 | |||||
astroid/tests/testdata/python3/data/foogle_fax-0.12.5-py2.7-nspkg.pth,sha256=kv-1ZyITNtu-FjiACe5_1ZhKcQnVSKROTwzfHyeIwPA,629 | |||||
astroid/tests/testdata/python3/data/format.py,sha256=Se18tU4br95nCnBg7DIYonoRIXAZi3u2RvyoKwupAXk,421 | |||||
astroid/tests/testdata/python3/data/invalid_encoding.py,sha256=m1_U4_CIbs71SvS2kGVVYDdqYGizzXm9yrXjDWZsO2g,22 | |||||
astroid/tests/testdata/python3/data/joined_strings.py,sha256=5nO3HMS9TAB0jZml1cSBv_b-1m4GTJ_12hD8WYMugBw,72168 | |||||
astroid/tests/testdata/python3/data/module.py,sha256=gmtEr1dRdtYP5oyUwvl-Bmk498D3q9fpPSMcEGeoPPc,1799 | |||||
astroid/tests/testdata/python3/data/module2.py,sha256=VOuXghmJXG0kxFfMufQV55G8vcd-f9qVorvd2CTRjLo,2016 | |||||
astroid/tests/testdata/python3/data/noendingnewline.py,sha256=PaqOTMH1fn703GRn8_lZox2ByExWci0LiXfEKZjKgGU,506 | |||||
astroid/tests/testdata/python3/data/nonregr.py,sha256=oCCrE6UTcDUmFcLnde2N34Fxv1PQ8Ck3WqE0or1Jqqk,1101 | |||||
astroid/tests/testdata/python3/data/notall.py,sha256=DftFceOP1cQfe2imrwTWcsbuxugJx9mDFFM57cCPUnA,75 | |||||
astroid/tests/testdata/python3/data/recursion.py,sha256=ZuYyd9K4DyZxXg3L-B1Dl7k9q8OpIfVDwN9kJ52xLDk,52 | |||||
astroid/tests/testdata/python3/data/tmp__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python3/data/SSL1/Connection1.py,sha256=bvnJLQ3Ey3FzNDCR2mEeU8G44-c4iw9vOHBKOXHuGJM,306 | |||||
astroid/tests/testdata/python3/data/SSL1/__init__.py,sha256=3Flw6M01FPCVMhiVC_yk-NQbOaQW6K4H_H9wqx6c1do,36 | |||||
astroid/tests/testdata/python3/data/absimp/__init__.py,sha256=CTlFm8G4kKecaa0NpFb4X25NNZ9FNorigSG65GAvvYA,89 | |||||
astroid/tests/testdata/python3/data/absimp/string.py,sha256=liyEyorFV0OJFr-HcECPPRfVmLd0lO4YrGFnZz0_T0M,83 | |||||
astroid/tests/testdata/python3/data/absimp/sidepackage/__init__.py,sha256=9E8Vj_jbaQ7tm80sIxyruqZPjzlVLNbd3qQxbvj39rI,42 | |||||
astroid/tests/testdata/python3/data/appl/__init__.py,sha256=9OoDa7y4MPXKZenN5CA2wmwsG7vUqiO4ImtTjsNs6YY,13 | |||||
astroid/tests/testdata/python3/data/appl/myConnection.py,sha256=mWi72c6yYuIXoyRXo-uKFwY7NSj-lok_NRlNc9N2hfM,261 | |||||
astroid/tests/testdata/python3/data/contribute_to_namespace/namespace_pep_420/submodule.py,sha256=5rdq57wop6pxdC9TpMnh8L7gNANMMsNFujBhL9rGhBw,8 | |||||
astroid/tests/testdata/python3/data/find_test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python3/data/find_test/module.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python3/data/find_test/module2.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python3/data/find_test/noendingnewline.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python3/data/find_test/nonregr.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python3/data/foogle/fax/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python3/data/foogle/fax/a.py,sha256=j_Q23vFFEoVZmhsa1wgASTuNyv3ikS4aODRWMwVOTCY,5 | |||||
astroid/tests/testdata/python3/data/lmfp/__init__.py,sha256=JmYecBTypWpPdKNy76pDWYliy-gWg3PPOOMcUdMAvzo,51 | |||||
astroid/tests/testdata/python3/data/lmfp/foo.py,sha256=ePynel7303gG6wq8wb6kRmaV75Q7mR9A_X7SZVP0YWM,170 | |||||
astroid/tests/testdata/python3/data/module1abs/__init__.py,sha256=qeBmkE-gZ07oAuq_fgcaMP8217AdA-FGOR73iB5lltg,59 | |||||
astroid/tests/testdata/python3/data/module1abs/core.py,sha256=xRdXeFHEieRauuJZElbEBASgXG0ZzU1a5_0isAhM7Gw,11 | |||||
astroid/tests/testdata/python3/data/namespace_pep_420/module.py,sha256=1NhbQAf4THdW94o7nrSzPkEJFy0XvISnlF_-OTTiwRk,43 | |||||
astroid/tests/testdata/python3/data/notamodule/file.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python3/data/package/__init__.py,sha256=U50oVo2CraRtPYheia534Z0iPVQMDT2C6Qwj2ZWAmO0,57 | |||||
astroid/tests/testdata/python3/data/package/absimport.py,sha256=cTkLoSR4oIJtQ8yVLAgdopJXro0qFsehlMGYLCfiPvo,172 | |||||
astroid/tests/testdata/python3/data/package/hello.py,sha256=sTddKXRfLNAysty0r625S8QysSDOmtF8oXDvbl3Cywk,20 | |||||
astroid/tests/testdata/python3/data/package/import_package_subpackage_module.py,sha256=U6BsMb_ygFb8RqImsTrWEGJihU7nJgELPH6AvWM-zaU,2242 | |||||
astroid/tests/testdata/python3/data/package/subpackage/__init__.py,sha256=XtKilaAqziUI-ImaSw4V6Aic40domt4v_If7lAZYhSE,25 | |||||
astroid/tests/testdata/python3/data/package/subpackage/module.py,sha256=WAtPIk13pW6tYI6rSgNHcCgTu0EXhX6i5CugdHPH8N0,32 | |||||
astroid/tests/testdata/python3/data/path_pkg_resources_1/package/__init__.py,sha256=1kQs82-WyvhNmr6z4VEklhVXMJulr_OO2RXRAqT40Z4,55 | |||||
astroid/tests/testdata/python3/data/path_pkg_resources_1/package/foo.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python3/data/path_pkg_resources_2/package/__init__.py,sha256=1kQs82-WyvhNmr6z4VEklhVXMJulr_OO2RXRAqT40Z4,55 | |||||
astroid/tests/testdata/python3/data/path_pkg_resources_2/package/bar.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python3/data/path_pkg_resources_3/package/__init__.py,sha256=1kQs82-WyvhNmr6z4VEklhVXMJulr_OO2RXRAqT40Z4,55 | |||||
astroid/tests/testdata/python3/data/path_pkg_resources_3/package/baz.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python3/data/path_pkgutil_1/package/__init__.py,sha256=doH9fwYoYwbugpsdT3QT3lEja4mw1-USY24J1TSboCs,74 | |||||
astroid/tests/testdata/python3/data/path_pkgutil_1/package/foo.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python3/data/path_pkgutil_2/package/__init__.py,sha256=doH9fwYoYwbugpsdT3QT3lEja4mw1-USY24J1TSboCs,74 | |||||
astroid/tests/testdata/python3/data/path_pkgutil_2/package/bar.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python3/data/path_pkgutil_3/package/__init__.py,sha256=doH9fwYoYwbugpsdT3QT3lEja4mw1-USY24J1TSboCs,74 | |||||
astroid/tests/testdata/python3/data/path_pkgutil_3/package/baz.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid/tests/testdata/python3/data/unicode_package/__init__.py,sha256=Qq8Rv1-47xfh9UMnDqtU6MYCoZbK2DF1zxAvxlkhCNU,17 | |||||
astroid/tests/testdata/python3/data/unicode_package/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||||
astroid-1.6.5.dist-info/DESCRIPTION.rst,sha256=XNH2sQxCrEZ-yMkktkQxOBoIoa2-pk1oWK0TDIDLQxo,2186 | |||||
astroid-1.6.5.dist-info/METADATA,sha256=fXGdzK8FVIH9Lu-EpzGGdxAbm-TNhEFLoIgc6uKPq44,3378 | |||||
astroid-1.6.5.dist-info/RECORD,, | |||||
astroid-1.6.5.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110 | |||||
astroid-1.6.5.dist-info/metadata.json,sha256=Mtualpf3xthECcJ3bCjb3K8RHAWXzHK3xR0i_czO5Wc,1268 | |||||
astroid-1.6.5.dist-info/top_level.txt,sha256=HsdW4O2x7ZXRj6k-agi3RaQybGLobI3VSE-jt4vQUXM,8 | |||||
astroid-1.6.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 | |||||
astroid/interpreter/_import/__pycache__/util.cpython-36.pyc,, | |||||
astroid/interpreter/_import/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/interpreter/_import/__pycache__/spec.cpython-36.pyc,, | |||||
astroid/interpreter/__pycache__/objectmodel.cpython-36.pyc,, | |||||
astroid/interpreter/__pycache__/dunder_lookup.cpython-36.pyc,, | |||||
astroid/interpreter/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/absimp/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/absimp/__pycache__/string.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/absimp/sidepackage/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/path_pkg_resources_1/package/__pycache__/foo.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/path_pkg_resources_1/package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/path_pkgutil_3/package/__pycache__/baz.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/path_pkgutil_3/package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/path_pkgutil_2/package/__pycache__/bar.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/path_pkgutil_2/package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/lmfp/__pycache__/foo.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/lmfp/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/appl/__pycache__/myConnection.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/appl/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/foogle/fax/__pycache__/a.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/foogle/fax/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/__pycache__/tmp__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/__pycache__/noendingnewline.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/__pycache__/joined_strings.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/__pycache__/notall.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/__pycache__/recursion.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/__pycache__/descriptor_crash.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/__pycache__/module2.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/__pycache__/absimport.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/__pycache__/all.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/__pycache__/module.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/__pycache__/nonregr.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/__pycache__/email.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/__pycache__/format.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/unicode_package/core/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/unicode_package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/module1abs/__pycache__/core.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/module1abs/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/notamodule/__pycache__/file.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/SSL1/__pycache__/Connection1.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/SSL1/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/namespace_pep_420/__pycache__/module.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/path_pkg_resources_2/package/__pycache__/bar.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/path_pkg_resources_2/package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/find_test/__pycache__/noendingnewline.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/find_test/__pycache__/module2.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/find_test/__pycache__/module.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/find_test/__pycache__/nonregr.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/find_test/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/path_pkgutil_1/package/__pycache__/foo.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/path_pkgutil_1/package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/path_pkg_resources_3/package/__pycache__/baz.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/path_pkg_resources_3/package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/contribute_to_namespace/namespace_pep_420/__pycache__/submodule.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/package/__pycache__/hello.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/package/__pycache__/import_package_subpackage_module.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/package/__pycache__/absimport.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/package/subpackage/__pycache__/module.cpython-36.pyc,, | |||||
astroid/tests/testdata/python3/data/package/subpackage/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/absimp/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/absimp/__pycache__/string.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/absimp/sidepackage/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/path_pkg_resources_1/package/__pycache__/foo.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/path_pkg_resources_1/package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/path_pkgutil_3/package/__pycache__/baz.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/path_pkgutil_3/package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/path_pkgutil_2/package/__pycache__/bar.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/path_pkgutil_2/package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/lmfp/__pycache__/foo.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/lmfp/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/appl/__pycache__/myConnection.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/appl/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/foogle/fax/__pycache__/a.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/foogle/fax/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/__pycache__/tmp__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/__pycache__/joined_strings.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/__pycache__/notall.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/__pycache__/recursion.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/__pycache__/descriptor_crash.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/__pycache__/absimport.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/__pycache__/nonregr.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/__pycache__/email.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/__pycache__/format.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/unicode_package/core/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/unicode_package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/module1abs/__pycache__/core.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/module1abs/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/notamodule/__pycache__/file.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/SSL1/__pycache__/Connection1.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/SSL1/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/namespace_pep_420/__pycache__/module.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/path_pkg_resources_2/package/__pycache__/bar.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/path_pkg_resources_2/package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/find_test/__pycache__/noendingnewline.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/find_test/__pycache__/module2.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/find_test/__pycache__/module.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/find_test/__pycache__/nonregr.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/find_test/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/path_pkgutil_1/package/__pycache__/foo.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/path_pkgutil_1/package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/path_pkg_resources_3/package/__pycache__/baz.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/path_pkg_resources_3/package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/contribute_to_namespace/namespace_pep_420/__pycache__/submodule.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/package/__pycache__/hello.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/package/__pycache__/import_package_subpackage_module.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/package/__pycache__/absimport.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/package/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/package/subpackage/__pycache__/module.cpython-36.pyc,, | |||||
astroid/tests/testdata/python2/data/package/subpackage/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_transforms.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_brain_numpy.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_raw_building.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_regrtest.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_manager.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_scoped_nodes.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/resources.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_protocols.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_python3.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_helpers.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_brain.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_modutils.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_peephole.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_inference.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_objects.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_builder.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_lookup.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_object_model.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_nodes.cpython-36.pyc,, | |||||
astroid/tests/__pycache__/unittest_utils.cpython-36.pyc,, | |||||
astroid/__pycache__/inference.cpython-36.pyc,, | |||||
astroid/__pycache__/util.cpython-36.pyc,, | |||||
astroid/__pycache__/exceptions.cpython-36.pyc,, | |||||
astroid/__pycache__/raw_building.cpython-36.pyc,, | |||||
astroid/__pycache__/bases.cpython-36.pyc,, | |||||
astroid/__pycache__/__pkginfo__.cpython-36.pyc,, | |||||
astroid/__pycache__/test_utils.cpython-36.pyc,, | |||||
astroid/__pycache__/transforms.cpython-36.pyc,, | |||||
astroid/__pycache__/astpeephole.cpython-36.pyc,, | |||||
astroid/__pycache__/objects.cpython-36.pyc,, | |||||
astroid/__pycache__/builder.cpython-36.pyc,, | |||||
astroid/__pycache__/mixins.cpython-36.pyc,, | |||||
astroid/__pycache__/scoped_nodes.cpython-36.pyc,, | |||||
astroid/__pycache__/as_string.cpython-36.pyc,, | |||||
astroid/__pycache__/node_classes.cpython-36.pyc,, | |||||
astroid/__pycache__/modutils.cpython-36.pyc,, | |||||
astroid/__pycache__/protocols.cpython-36.pyc,, | |||||
astroid/__pycache__/rebuilder.cpython-36.pyc,, | |||||
astroid/__pycache__/nodes.cpython-36.pyc,, | |||||
astroid/__pycache__/helpers.cpython-36.pyc,, | |||||
astroid/__pycache__/arguments.cpython-36.pyc,, | |||||
astroid/__pycache__/context.cpython-36.pyc,, | |||||
astroid/__pycache__/manager.cpython-36.pyc,, | |||||
astroid/__pycache__/__init__.cpython-36.pyc,, | |||||
astroid/__pycache__/decorators.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_mechanize.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_pkg_resources.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_typing.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_attrs.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_collections.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_pytest.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_random.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_functools.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_subprocess.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_multiprocessing.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_six.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_dateutil.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_uuid.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_fstrings.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_re.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_curses.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_io.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_ssl.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_qt.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_threading.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_hashlib.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_builtin_inference.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_gi.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_nose.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_numpy.cpython-36.pyc,, | |||||
astroid/brain/__pycache__/brain_namedtuple_enum.cpython-36.pyc,, |
Wheel-Version: 1.0 | |||||
Generator: bdist_wheel (0.30.0) | |||||
Root-Is-Purelib: true | |||||
Tag: py2-none-any | |||||
Tag: py3-none-any | |||||
{"classifiers": ["Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Software Development :: Quality Assurance", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.details": {"contacts": [{"email": "code-quality@python.org", "name": "Python Code Quality Authority", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/PyCQA/astroid"}}}, "extras": [], "generator": "bdist_wheel (0.30.0)", "license": "LGPL", "metadata_version": "2.0", "name": "astroid", "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", "run_requires": [{"requires": ["lazy-object-proxy", "six", "wrapt"]}, {"environment": "python_version<\"3.4\"", "requires": ["backports.functools-lru-cache", "enum34 (>=1.1.3)", "singledispatch"]}], "summary": "A abstract syntax tree for Python with inference support.", "version": "1.6.5"} |
astroid |
# Copyright (c) 2006-2013, 2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr> | |||||
# Copyright (c) 2014 Google, Inc. | |||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Python Abstract Syntax Tree New Generation | |||||
The aim of this module is to provide a common base representation of | |||||
python source code for projects such as pychecker, pyreverse, | |||||
pylint... Well, actually the development of this library is essentially | |||||
governed by pylint's needs. | |||||
It extends class defined in the python's _ast module with some | |||||
additional methods and attributes. Instance attributes are added by a | |||||
builder object, which can either generate extended ast (let's call | |||||
them astroid ;) by visiting an existent ast tree or by inspecting living | |||||
object. Methods are added by monkey patching ast classes. | |||||
Main modules are: | |||||
* nodes and scoped_nodes for more information about methods and | |||||
attributes added to different node classes | |||||
* the manager contains a high level object to get astroid trees from | |||||
source files and living objects. It maintains a cache of previously | |||||
constructed tree for quick access | |||||
* builder contains the class responsible to build astroid trees | |||||
""" | |||||
import os | |||||
import sys | |||||
import re | |||||
from operator import attrgetter | |||||
import enum | |||||
_Context = enum.Enum('Context', 'Load Store Del') | |||||
Load = _Context.Load | |||||
Store = _Context.Store | |||||
Del = _Context.Del | |||||
del _Context | |||||
from .__pkginfo__ import version as __version__ | |||||
# WARNING: internal imports order matters ! | |||||
# pylint: disable=redefined-builtin, wildcard-import | |||||
# make all exception classes accessible from astroid package | |||||
from astroid.exceptions import * | |||||
# make all node classes accessible from astroid package | |||||
from astroid.nodes import * | |||||
# trigger extra monkey-patching | |||||
from astroid import inference | |||||
# more stuff available | |||||
from astroid import raw_building | |||||
from astroid.bases import BaseInstance, Instance, BoundMethod, UnboundMethod | |||||
from astroid.node_classes import are_exclusive, unpack_infer | |||||
from astroid.scoped_nodes import builtin_lookup | |||||
from astroid.builder import parse, extract_node | |||||
from astroid.util import Uninferable, YES | |||||
# make a manager instance (borg) accessible from astroid package | |||||
from astroid.manager import AstroidManager | |||||
MANAGER = AstroidManager() | |||||
del AstroidManager | |||||
# transform utilities (filters and decorator) | |||||
class AsStringRegexpPredicate(object): | |||||
"""ClassDef to be used as predicate that may be given to `register_transform` | |||||
First argument is a regular expression that will be searched against the `as_string` | |||||
representation of the node onto which it's applied. | |||||
If specified, the second argument is an `attrgetter` expression that will be | |||||
applied on the node first to get the actual node on which `as_string` should | |||||
be called. | |||||
WARNING: This can be fairly slow, as it has to convert every AST node back | |||||
to Python code; you should consider examining the AST directly instead. | |||||
""" | |||||
def __init__(self, regexp, expression=None): | |||||
self.regexp = re.compile(regexp) | |||||
self.expression = expression | |||||
def __call__(self, node): | |||||
if self.expression is not None: | |||||
node = attrgetter(self.expression)(node) | |||||
# pylint: disable=no-member; github.com/pycqa/astroid/126 | |||||
return self.regexp.search(node.as_string()) | |||||
def inference_tip(infer_function, raise_on_overwrite=False): | |||||
"""Given an instance specific inference function, return a function to be | |||||
given to MANAGER.register_transform to set this inference function. | |||||
:param bool raise_on_overwrite: Raise an `InferenceOverwriteError` | |||||
if the inference tip will overwrite another. Used for debugging | |||||
Typical usage | |||||
.. sourcecode:: python | |||||
MANAGER.register_transform(Call, inference_tip(infer_named_tuple), | |||||
predicate) | |||||
.. Note:: | |||||
Using an inference tip will override | |||||
any previously set inference tip for the given | |||||
node. Use a predicate in the transform to prevent | |||||
excess overwrites. | |||||
""" | |||||
def transform(node, infer_function=infer_function): | |||||
if (raise_on_overwrite | |||||
and node._explicit_inference is not None | |||||
and node._explicit_inference is not infer_function): | |||||
raise InferenceOverwriteError( | |||||
"Inference already set to {existing_inference}. " | |||||
"Trying to overwrite with {new_inference} for {node}" | |||||
.format(existing_inference=infer_function, | |||||
new_inference=node._explicit_inference, | |||||
node=node)) | |||||
node._explicit_inference = infer_function | |||||
return node | |||||
return transform | |||||
def register_module_extender(manager, module_name, get_extension_mod): | |||||
def transform(node): | |||||
extension_module = get_extension_mod() | |||||
for name, objs in extension_module.locals.items(): | |||||
node.locals[name] = objs | |||||
for obj in objs: | |||||
if obj.parent is extension_module: | |||||
obj.parent = node | |||||
manager.register_transform(Module, transform, lambda n: n.name == module_name) | |||||
# load brain plugins | |||||
BRAIN_MODULES_DIR = os.path.join(os.path.dirname(__file__), 'brain') | |||||
if BRAIN_MODULES_DIR not in sys.path: | |||||
# add it to the end of the list so user path take precedence | |||||
sys.path.append(BRAIN_MODULES_DIR) | |||||
# load modules in this directory | |||||
for module in os.listdir(BRAIN_MODULES_DIR): | |||||
if module.endswith('.py'): | |||||
__import__(module[:-3]) |
# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr> | |||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Copyright (c) 2014 Google, Inc. | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""astroid packaging information""" | |||||
from sys import version_info as py_version | |||||
from pkg_resources import parse_version | |||||
from setuptools import __version__ as setuptools_version | |||||
distname = 'astroid' | |||||
modname = 'astroid' | |||||
version = '1.6.5' | |||||
numversion = tuple(map(int, version.split('.'))) | |||||
extras_require = {} | |||||
install_requires = ['lazy_object_proxy', 'six', 'wrapt'] | |||||
def has_environment_marker_range_operators_support(): | |||||
"""Code extracted from 'pytest/setup.py' | |||||
https://github.com/pytest-dev/pytest/blob/7538680c/setup.py#L31 | |||||
The first known release to support environment marker with range operators | |||||
it is 17.1, see: https://setuptools.readthedocs.io/en/latest/history.html#id113 | |||||
""" | |||||
return parse_version(setuptools_version) >= parse_version('17.1') | |||||
if has_environment_marker_range_operators_support(): | |||||
extras_require[':python_version<"3.4"'] = ['enum34>=1.1.3', | |||||
'singledispatch', | |||||
'backports.functools_lru_cache'] | |||||
else: | |||||
if py_version < (3, 4): | |||||
install_requires.extend(['enum34', | |||||
'singledispatch', | |||||
'backports.functools_lru_cache']) | |||||
# pylint: disable=redefined-builtin; why license is a builtin anyway? | |||||
license = 'LGPL' | |||||
author = 'Python Code Quality Authority' | |||||
author_email = 'code-quality@python.org' | |||||
mailinglist = "mailto://%s" % author_email | |||||
web = 'https://github.com/PyCQA/astroid' | |||||
description = "A abstract syntax tree for Python with inference support." | |||||
classifiers = ["Topic :: Software Development :: Libraries :: Python Modules", | |||||
"Topic :: Software Development :: Quality Assurance", | |||||
"Programming Language :: Python", | |||||
"Programming Language :: Python :: 2", | |||||
"Programming Language :: Python :: 2.7", | |||||
"Programming Language :: Python :: 3", | |||||
"Programming Language :: Python :: 3.4", | |||||
"Programming Language :: Python :: 3.5", | |||||
"Programming Language :: Python :: 3.6", | |||||
"Programming Language :: Python :: Implementation :: CPython", | |||||
"Programming Language :: Python :: Implementation :: PyPy", | |||||
] |
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
import six | |||||
from astroid import bases | |||||
from astroid import context as contextmod | |||||
from astroid import exceptions | |||||
from astroid import nodes | |||||
from astroid import util | |||||
class CallSite(object): | |||||
"""Class for understanding arguments passed into a call site | |||||
It needs a call context, which contains the arguments and the | |||||
keyword arguments that were passed into a given call site. | |||||
In order to infer what an argument represents, call | |||||
:meth:`infer_argument` with the corresponding function node | |||||
and the argument name. | |||||
""" | |||||
def __init__(self, callcontext): | |||||
args = callcontext.args | |||||
keywords = callcontext.keywords | |||||
self.duplicated_keywords = set() | |||||
self._unpacked_args = self._unpack_args(args) | |||||
self._unpacked_kwargs = self._unpack_keywords(keywords) | |||||
self.positional_arguments = [ | |||||
arg for arg in self._unpacked_args | |||||
if arg is not util.Uninferable | |||||
] | |||||
self.keyword_arguments = { | |||||
key: value for key, value in self._unpacked_kwargs.items() | |||||
if value is not util.Uninferable | |||||
} | |||||
@classmethod | |||||
def from_call(cls, call_node): | |||||
"""Get a CallSite object from the given Call node.""" | |||||
callcontext = contextmod.CallContext(call_node.args, | |||||
call_node.keywords) | |||||
return cls(callcontext) | |||||
def has_invalid_arguments(self): | |||||
"""Check if in the current CallSite were passed *invalid* arguments | |||||
This can mean multiple things. For instance, if an unpacking | |||||
of an invalid object was passed, then this method will return True. | |||||
Other cases can be when the arguments can't be inferred by astroid, | |||||
for example, by passing objects which aren't known statically. | |||||
""" | |||||
return len(self.positional_arguments) != len(self._unpacked_args) | |||||
def has_invalid_keywords(self): | |||||
"""Check if in the current CallSite were passed *invalid* keyword arguments | |||||
For instance, unpacking a dictionary with integer keys is invalid | |||||
(**{1:2}), because the keys must be strings, which will make this | |||||
method to return True. Other cases where this might return True if | |||||
objects which can't be inferred were passed. | |||||
""" | |||||
return len(self.keyword_arguments) != len(self._unpacked_kwargs) | |||||
def _unpack_keywords(self, keywords): | |||||
values = {} | |||||
context = contextmod.InferenceContext() | |||||
for name, value in keywords: | |||||
if name is None: | |||||
# Then it's an unpacking operation (**) | |||||
try: | |||||
inferred = next(value.infer(context=context)) | |||||
except exceptions.InferenceError: | |||||
values[name] = util.Uninferable | |||||
continue | |||||
if not isinstance(inferred, nodes.Dict): | |||||
# Not something we can work with. | |||||
values[name] = util.Uninferable | |||||
continue | |||||
for dict_key, dict_value in inferred.items: | |||||
try: | |||||
dict_key = next(dict_key.infer(context=context)) | |||||
except exceptions.InferenceError: | |||||
values[name] = util.Uninferable | |||||
continue | |||||
if not isinstance(dict_key, nodes.Const): | |||||
values[name] = util.Uninferable | |||||
continue | |||||
if not isinstance(dict_key.value, six.string_types): | |||||
values[name] = util.Uninferable | |||||
continue | |||||
if dict_key.value in values: | |||||
# The name is already in the dictionary | |||||
values[dict_key.value] = util.Uninferable | |||||
self.duplicated_keywords.add(dict_key.value) | |||||
continue | |||||
values[dict_key.value] = dict_value | |||||
else: | |||||
values[name] = value | |||||
return values | |||||
@staticmethod | |||||
def _unpack_args(args): | |||||
values = [] | |||||
context = contextmod.InferenceContext() | |||||
for arg in args: | |||||
if isinstance(arg, nodes.Starred): | |||||
try: | |||||
inferred = next(arg.value.infer(context=context)) | |||||
except exceptions.InferenceError: | |||||
values.append(util.Uninferable) | |||||
continue | |||||
if inferred is util.Uninferable: | |||||
values.append(util.Uninferable) | |||||
continue | |||||
if not hasattr(inferred, 'elts'): | |||||
values.append(util.Uninferable) | |||||
continue | |||||
values.extend(inferred.elts) | |||||
else: | |||||
values.append(arg) | |||||
return values | |||||
def infer_argument(self, funcnode, name, context): | |||||
"""infer a function argument value according to the call context | |||||
Arguments: | |||||
funcnode: The function being called. | |||||
name: The name of the argument whose value is being inferred. | |||||
context: TODO | |||||
""" | |||||
if name in self.duplicated_keywords: | |||||
raise exceptions.InferenceError('The arguments passed to {func!r} ' | |||||
' have duplicate keywords.', | |||||
call_site=self, func=funcnode, | |||||
arg=name, context=context) | |||||
# Look into the keywords first, maybe it's already there. | |||||
try: | |||||
return self.keyword_arguments[name].infer(context) | |||||
except KeyError: | |||||
pass | |||||
# Too many arguments given and no variable arguments. | |||||
if len(self.positional_arguments) > len(funcnode.args.args): | |||||
if not funcnode.args.vararg: | |||||
raise exceptions.InferenceError('Too many positional arguments ' | |||||
'passed to {func!r} that does ' | |||||
'not have *args.', | |||||
call_site=self, func=funcnode, | |||||
arg=name, context=context) | |||||
positional = self.positional_arguments[:len(funcnode.args.args)] | |||||
vararg = self.positional_arguments[len(funcnode.args.args):] | |||||
argindex = funcnode.args.find_argname(name)[0] | |||||
kwonlyargs = set(arg.name for arg in funcnode.args.kwonlyargs) | |||||
kwargs = { | |||||
key: value for key, value in self.keyword_arguments.items() | |||||
if key not in kwonlyargs | |||||
} | |||||
# If there are too few positionals compared to | |||||
# what the function expects to receive, check to see | |||||
# if the missing positional arguments were passed | |||||
# as keyword arguments and if so, place them into the | |||||
# positional args list. | |||||
if len(positional) < len(funcnode.args.args): | |||||
for func_arg in funcnode.args.args: | |||||
if func_arg.name in kwargs: | |||||
arg = kwargs.pop(func_arg.name) | |||||
positional.append(arg) | |||||
if argindex is not None: | |||||
# 2. first argument of instance/class method | |||||
if argindex == 0 and funcnode.type in ('method', 'classmethod'): | |||||
if context.boundnode is not None: | |||||
boundnode = context.boundnode | |||||
else: | |||||
# XXX can do better ? | |||||
boundnode = funcnode.parent.frame() | |||||
if isinstance(boundnode, nodes.ClassDef): | |||||
# Verify that we're accessing a method | |||||
# of the metaclass through a class, as in | |||||
# `cls.metaclass_method`. In this case, the | |||||
# first argument is always the class. | |||||
method_scope = funcnode.parent.scope() | |||||
if method_scope is boundnode.metaclass(): | |||||
return iter((boundnode, )) | |||||
if funcnode.type == 'method': | |||||
if not isinstance(boundnode, bases.Instance): | |||||
boundnode = bases.Instance(boundnode) | |||||
return iter((boundnode,)) | |||||
if funcnode.type == 'classmethod': | |||||
return iter((boundnode,)) | |||||
# if we have a method, extract one position | |||||
# from the index, so we'll take in account | |||||
# the extra parameter represented by `self` or `cls` | |||||
if funcnode.type in ('method', 'classmethod'): | |||||
argindex -= 1 | |||||
# 2. search arg index | |||||
try: | |||||
return self.positional_arguments[argindex].infer(context) | |||||
except IndexError: | |||||
pass | |||||
if funcnode.args.kwarg == name: | |||||
# It wants all the keywords that were passed into | |||||
# the call site. | |||||
if self.has_invalid_keywords(): | |||||
raise exceptions.InferenceError( | |||||
"Inference failed to find values for all keyword arguments " | |||||
"to {func!r}: {unpacked_kwargs!r} doesn't correspond to " | |||||
"{keyword_arguments!r}.", | |||||
keyword_arguments=self.keyword_arguments, | |||||
unpacked_kwargs=self._unpacked_kwargs, | |||||
call_site=self, func=funcnode, arg=name, context=context) | |||||
kwarg = nodes.Dict(lineno=funcnode.args.lineno, | |||||
col_offset=funcnode.args.col_offset, | |||||
parent=funcnode.args) | |||||
kwarg.postinit([(nodes.const_factory(key), value) | |||||
for key, value in kwargs.items()]) | |||||
return iter((kwarg, )) | |||||
elif funcnode.args.vararg == name: | |||||
# It wants all the args that were passed into | |||||
# the call site. | |||||
if self.has_invalid_arguments(): | |||||
raise exceptions.InferenceError( | |||||
"Inference failed to find values for all positional " | |||||
"arguments to {func!r}: {unpacked_args!r} doesn't " | |||||
"correspond to {positional_arguments!r}.", | |||||
positional_arguments=self.positional_arguments, | |||||
unpacked_args=self._unpacked_args, | |||||
call_site=self, func=funcnode, arg=name, context=context) | |||||
args = nodes.Tuple(lineno=funcnode.args.lineno, | |||||
col_offset=funcnode.args.col_offset, | |||||
parent=funcnode.args) | |||||
args.postinit(vararg) | |||||
return iter((args, )) | |||||
# Check if it's a default parameter. | |||||
try: | |||||
return funcnode.args.default_value(name).infer(context) | |||||
except exceptions.NoDefault: | |||||
pass | |||||
raise exceptions.InferenceError('No value found for argument {name} to ' | |||||
'{func!r}', call_site=self, | |||||
func=funcnode, arg=name, context=context) |
# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr> | |||||
# Copyright (c) 2013-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Copyright (c) 2013-2014 Google, Inc. | |||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""This module renders Astroid nodes as string: | |||||
* :func:`to_code` function return equivalent (hopefully valid) python string | |||||
* :func:`dump` function return an internal representation of nodes found | |||||
in the tree, useful for debugging or understanding the tree structure | |||||
""" | |||||
import sys | |||||
import six | |||||
# pylint: disable=unused-argument | |||||
class AsStringVisitor(object): | |||||
"""Visitor to render an Astroid node as a valid python code string""" | |||||
def __init__(self, indent): | |||||
self.indent = indent | |||||
def __call__(self, node): | |||||
"""Makes this visitor behave as a simple function""" | |||||
return node.accept(self) | |||||
def _stmt_list(self, stmts): | |||||
"""return a list of nodes to string""" | |||||
stmts = '\n'.join([nstr for nstr in [n.accept(self) for n in stmts] if nstr]) | |||||
return self.indent + stmts.replace('\n', '\n'+self.indent) | |||||
## visit_<node> methods ########################################### | |||||
def visit_arguments(self, node): | |||||
"""return an astroid.Function node as string""" | |||||
return node.format_args() | |||||
def visit_assignattr(self, node): | |||||
"""return an astroid.AssAttr node as string""" | |||||
return self.visit_attribute(node) | |||||
def visit_assert(self, node): | |||||
"""return an astroid.Assert node as string""" | |||||
if node.fail: | |||||
return 'assert %s, %s' % (node.test.accept(self), | |||||
node.fail.accept(self)) | |||||
return 'assert %s' % node.test.accept(self) | |||||
def visit_assignname(self, node): | |||||
"""return an astroid.AssName node as string""" | |||||
return node.name | |||||
def visit_assign(self, node): | |||||
"""return an astroid.Assign node as string""" | |||||
lhs = ' = '.join([n.accept(self) for n in node.targets]) | |||||
return '%s = %s' % (lhs, node.value.accept(self)) | |||||
def visit_augassign(self, node): | |||||
"""return an astroid.AugAssign node as string""" | |||||
return '%s %s %s' % (node.target.accept(self), node.op, node.value.accept(self)) | |||||
def visit_annassign(self, node): | |||||
"""Return an astroid.AugAssign node as string""" | |||||
target = node.target.accept(self) | |||||
annotation = node.annotation.accept(self) | |||||
if node.value is None: | |||||
return '%s: %s' % (target, annotation) | |||||
return '%s: %s = %s' % (target, annotation, node.value.accept(self)) | |||||
def visit_repr(self, node): | |||||
"""return an astroid.Repr node as string""" | |||||
return '`%s`' % node.value.accept(self) | |||||
def visit_binop(self, node): | |||||
"""return an astroid.BinOp node as string""" | |||||
return '(%s) %s (%s)' % (node.left.accept(self), node.op, node.right.accept(self)) | |||||
def visit_boolop(self, node): | |||||
"""return an astroid.BoolOp node as string""" | |||||
return (' %s ' % node.op).join(['(%s)' % n.accept(self) | |||||
for n in node.values]) | |||||
def visit_break(self, node): | |||||
"""return an astroid.Break node as string""" | |||||
return 'break' | |||||
def visit_call(self, node): | |||||
"""return an astroid.Call node as string""" | |||||
expr_str = node.func.accept(self) | |||||
args = [arg.accept(self) for arg in node.args] | |||||
if node.keywords: | |||||
keywords = [kwarg.accept(self) for kwarg in node.keywords] | |||||
else: | |||||
keywords = [] | |||||
args.extend(keywords) | |||||
return '%s(%s)' % (expr_str, ', '.join(args)) | |||||
def visit_classdef(self, node): | |||||
"""return an astroid.ClassDef node as string""" | |||||
decorate = node.decorators.accept(self) if node.decorators else '' | |||||
bases = ', '.join([n.accept(self) for n in node.bases]) | |||||
if sys.version_info[0] == 2: | |||||
bases = '(%s)' % bases if bases else '' | |||||
else: | |||||
metaclass = node.metaclass() | |||||
if metaclass and not node.has_metaclass_hack(): | |||||
if bases: | |||||
bases = '(%s, metaclass=%s)' % (bases, metaclass.name) | |||||
else: | |||||
bases = '(metaclass=%s)' % metaclass.name | |||||
else: | |||||
bases = '(%s)' % bases if bases else '' | |||||
docs = '\n%s"""%s"""' % (self.indent, node.doc) if node.doc else '' | |||||
return '\n\n%sclass %s%s:%s\n%s\n' % (decorate, node.name, bases, docs, | |||||
self._stmt_list(node.body)) | |||||
def visit_compare(self, node): | |||||
"""return an astroid.Compare node as string""" | |||||
rhs_str = ' '.join(['%s %s' % (op, expr.accept(self)) | |||||
for op, expr in node.ops]) | |||||
return '%s %s' % (node.left.accept(self), rhs_str) | |||||
def visit_comprehension(self, node): | |||||
"""return an astroid.Comprehension node as string""" | |||||
ifs = ''.join([' if %s' % n.accept(self) for n in node.ifs]) | |||||
return 'for %s in %s%s' % (node.target.accept(self), | |||||
node.iter.accept(self), ifs) | |||||
def visit_const(self, node): | |||||
"""return an astroid.Const node as string""" | |||||
return repr(node.value) | |||||
def visit_continue(self, node): | |||||
"""return an astroid.Continue node as string""" | |||||
return 'continue' | |||||
def visit_delete(self, node): # XXX check if correct | |||||
"""return an astroid.Delete node as string""" | |||||
return 'del %s' % ', '.join([child.accept(self) | |||||
for child in node.targets]) | |||||
def visit_delattr(self, node): | |||||
"""return an astroid.DelAttr node as string""" | |||||
return self.visit_attribute(node) | |||||
def visit_delname(self, node): | |||||
"""return an astroid.DelName node as string""" | |||||
return node.name | |||||
def visit_decorators(self, node): | |||||
"""return an astroid.Decorators node as string""" | |||||
return '@%s\n' % '\n@'.join([item.accept(self) for item in node.nodes]) | |||||
def visit_dict(self, node): | |||||
"""return an astroid.Dict node as string""" | |||||
return '{%s}' % ', '.join(self._visit_dict(node)) | |||||
def _visit_dict(self, node): | |||||
for key, value in node.items: | |||||
key = key.accept(self) | |||||
value = value.accept(self) | |||||
if key == '**': | |||||
# It can only be a DictUnpack node. | |||||
yield key + value | |||||
else: | |||||
yield '%s: %s' % (key, value) | |||||
def visit_dictunpack(self, node): | |||||
return '**' | |||||
def visit_dictcomp(self, node): | |||||
"""return an astroid.DictComp node as string""" | |||||
return '{%s: %s %s}' % (node.key.accept(self), node.value.accept(self), | |||||
' '.join([n.accept(self) for n in node.generators])) | |||||
def visit_expr(self, node): | |||||
"""return an astroid.Discard node as string""" | |||||
return node.value.accept(self) | |||||
def visit_emptynode(self, node): | |||||
"""dummy method for visiting an Empty node""" | |||||
return '' | |||||
def visit_excepthandler(self, node): | |||||
if node.type: | |||||
if node.name: | |||||
excs = 'except %s, %s' % (node.type.accept(self), | |||||
node.name.accept(self)) | |||||
else: | |||||
excs = 'except %s' % node.type.accept(self) | |||||
else: | |||||
excs = 'except' | |||||
return '%s:\n%s' % (excs, self._stmt_list(node.body)) | |||||
def visit_ellipsis(self, node): | |||||
"""return an astroid.Ellipsis node as string""" | |||||
return '...' | |||||
def visit_empty(self, node): | |||||
"""return an Empty node as string""" | |||||
return '' | |||||
def visit_exec(self, node): | |||||
"""return an astroid.Exec node as string""" | |||||
if node.locals: | |||||
return 'exec %s in %s, %s' % (node.expr.accept(self), | |||||
node.locals.accept(self), | |||||
node.globals.accept(self)) | |||||
if node.globals: | |||||
return 'exec %s in %s' % (node.expr.accept(self), | |||||
node.globals.accept(self)) | |||||
return 'exec %s' % node.expr.accept(self) | |||||
def visit_extslice(self, node): | |||||
"""return an astroid.ExtSlice node as string""" | |||||
return ','.join([dim.accept(self) for dim in node.dims]) | |||||
def visit_for(self, node): | |||||
"""return an astroid.For node as string""" | |||||
fors = 'for %s in %s:\n%s' % (node.target.accept(self), | |||||
node.iter.accept(self), | |||||
self._stmt_list(node.body)) | |||||
if node.orelse: | |||||
fors = '%s\nelse:\n%s' % (fors, self._stmt_list(node.orelse)) | |||||
return fors | |||||
def visit_importfrom(self, node): | |||||
"""return an astroid.ImportFrom node as string""" | |||||
return 'from %s import %s' % ('.' * (node.level or 0) + node.modname, | |||||
_import_string(node.names)) | |||||
def visit_functiondef(self, node): | |||||
"""return an astroid.Function node as string""" | |||||
decorate = node.decorators.accept(self) if node.decorators else '' | |||||
docs = '\n%s"""%s"""' % (self.indent, node.doc) if node.doc else '' | |||||
return_annotation = '' | |||||
if six.PY3 and node.returns: | |||||
return_annotation = '->' + node.returns.as_string() | |||||
trailer = return_annotation + ":" | |||||
else: | |||||
trailer = ":" | |||||
def_format = "\n%sdef %s(%s)%s%s\n%s" | |||||
return def_format % (decorate, node.name, | |||||
node.args.accept(self), | |||||
trailer, docs, | |||||
self._stmt_list(node.body)) | |||||
def visit_generatorexp(self, node): | |||||
"""return an astroid.GeneratorExp node as string""" | |||||
return '(%s %s)' % (node.elt.accept(self), | |||||
' '.join([n.accept(self) for n in node.generators])) | |||||
def visit_attribute(self, node): | |||||
"""return an astroid.Getattr node as string""" | |||||
return '%s.%s' % (node.expr.accept(self), node.attrname) | |||||
def visit_global(self, node): | |||||
"""return an astroid.Global node as string""" | |||||
return 'global %s' % ', '.join(node.names) | |||||
def visit_if(self, node): | |||||
"""return an astroid.If node as string""" | |||||
ifs = ['if %s:\n%s' % (node.test.accept(self), self._stmt_list(node.body))] | |||||
if node.orelse:# XXX use elif ??? | |||||
ifs.append('else:\n%s' % self._stmt_list(node.orelse)) | |||||
return '\n'.join(ifs) | |||||
def visit_ifexp(self, node): | |||||
"""return an astroid.IfExp node as string""" | |||||
return '%s if %s else %s' % (node.body.accept(self), | |||||
node.test.accept(self), | |||||
node.orelse.accept(self)) | |||||
def visit_import(self, node): | |||||
"""return an astroid.Import node as string""" | |||||
return 'import %s' % _import_string(node.names) | |||||
def visit_keyword(self, node): | |||||
"""return an astroid.Keyword node as string""" | |||||
if node.arg is None: | |||||
return '**%s' % node.value.accept(self) | |||||
return '%s=%s' % (node.arg, node.value.accept(self)) | |||||
def visit_lambda(self, node): | |||||
"""return an astroid.Lambda node as string""" | |||||
return 'lambda %s: %s' % (node.args.accept(self), | |||||
node.body.accept(self)) | |||||
def visit_list(self, node): | |||||
"""return an astroid.List node as string""" | |||||
return '[%s]' % ', '.join([child.accept(self) for child in node.elts]) | |||||
def visit_listcomp(self, node): | |||||
"""return an astroid.ListComp node as string""" | |||||
return '[%s %s]' % (node.elt.accept(self), | |||||
' '.join([n.accept(self) for n in node.generators])) | |||||
def visit_module(self, node): | |||||
"""return an astroid.Module node as string""" | |||||
docs = '"""%s"""\n\n' % node.doc if node.doc else '' | |||||
return docs + '\n'.join([n.accept(self) for n in node.body]) + '\n\n' | |||||
def visit_name(self, node): | |||||
"""return an astroid.Name node as string""" | |||||
return node.name | |||||
def visit_pass(self, node): | |||||
"""return an astroid.Pass node as string""" | |||||
return 'pass' | |||||
def visit_print(self, node): | |||||
"""return an astroid.Print node as string""" | |||||
nodes = ', '.join([n.accept(self) for n in node.values]) | |||||
if not node.nl: | |||||
nodes = '%s,' % nodes | |||||
if node.dest: | |||||
return 'print >> %s, %s' % (node.dest.accept(self), nodes) | |||||
return 'print %s' % nodes | |||||
def visit_raise(self, node): | |||||
"""return an astroid.Raise node as string""" | |||||
if node.exc: | |||||
if node.inst: | |||||
if node.tback: | |||||
return 'raise %s, %s, %s' % (node.exc.accept(self), | |||||
node.inst.accept(self), | |||||
node.tback.accept(self)) | |||||
return 'raise %s, %s' % (node.exc.accept(self), | |||||
node.inst.accept(self)) | |||||
return 'raise %s' % node.exc.accept(self) | |||||
return 'raise' | |||||
def visit_return(self, node): | |||||
"""return an astroid.Return node as string""" | |||||
if node.value: | |||||
return 'return %s' % node.value.accept(self) | |||||
return 'return' | |||||
def visit_index(self, node): | |||||
"""return a astroid.Index node as string""" | |||||
return node.value.accept(self) | |||||
def visit_set(self, node): | |||||
"""return an astroid.Set node as string""" | |||||
return '{%s}' % ', '.join([child.accept(self) for child in node.elts]) | |||||
def visit_setcomp(self, node): | |||||
"""return an astroid.SetComp node as string""" | |||||
return '{%s %s}' % (node.elt.accept(self), | |||||
' '.join([n.accept(self) for n in node.generators])) | |||||
def visit_slice(self, node): | |||||
"""return a astroid.Slice node as string""" | |||||
lower = node.lower.accept(self) if node.lower else '' | |||||
upper = node.upper.accept(self) if node.upper else'' | |||||
step = node.step.accept(self) if node.step else '' | |||||
if step: | |||||
return '%s:%s:%s' % (lower, upper, step) | |||||
return '%s:%s' % (lower, upper) | |||||
def visit_subscript(self, node): | |||||
"""return an astroid.Subscript node as string""" | |||||
return '%s[%s]' % (node.value.accept(self), node.slice.accept(self)) | |||||
def visit_tryexcept(self, node): | |||||
"""return an astroid.TryExcept node as string""" | |||||
trys = ['try:\n%s' % self._stmt_list(node.body)] | |||||
for handler in node.handlers: | |||||
trys.append(handler.accept(self)) | |||||
if node.orelse: | |||||
trys.append('else:\n%s' % self._stmt_list(node.orelse)) | |||||
return '\n'.join(trys) | |||||
def visit_tryfinally(self, node): | |||||
"""return an astroid.TryFinally node as string""" | |||||
return 'try:\n%s\nfinally:\n%s' % (self._stmt_list(node.body), | |||||
self._stmt_list(node.finalbody)) | |||||
def visit_tuple(self, node): | |||||
"""return an astroid.Tuple node as string""" | |||||
if len(node.elts) == 1: | |||||
return '(%s, )' % node.elts[0].accept(self) | |||||
return '(%s)' % ', '.join([child.accept(self) for child in node.elts]) | |||||
def visit_unaryop(self, node): | |||||
"""return an astroid.UnaryOp node as string""" | |||||
if node.op == 'not': | |||||
operator = 'not ' | |||||
else: | |||||
operator = node.op | |||||
return '%s%s' % (operator, node.operand.accept(self)) | |||||
def visit_while(self, node): | |||||
"""return an astroid.While node as string""" | |||||
whiles = 'while %s:\n%s' % (node.test.accept(self), | |||||
self._stmt_list(node.body)) | |||||
if node.orelse: | |||||
whiles = '%s\nelse:\n%s' % (whiles, self._stmt_list(node.orelse)) | |||||
return whiles | |||||
def visit_with(self, node): # 'with' without 'as' is possible | |||||
"""return an astroid.With node as string""" | |||||
items = ', '.join(('(%s)' % expr.accept(self)) + | |||||
(vars and ' as (%s)' % (vars.accept(self)) or '') | |||||
for expr, vars in node.items) | |||||
return 'with %s:\n%s' % (items, self._stmt_list(node.body)) | |||||
def visit_yield(self, node): | |||||
"""yield an ast.Yield node as string""" | |||||
yi_val = (" " + node.value.accept(self)) if node.value else "" | |||||
expr = 'yield' + yi_val | |||||
if node.parent.is_statement: | |||||
return expr | |||||
return "(%s)" % (expr,) | |||||
def visit_starred(self, node): | |||||
"""return Starred node as string""" | |||||
return "*" + node.value.accept(self) | |||||
# These aren't for real AST nodes, but for inference objects. | |||||
def visit_frozenset(self, node): | |||||
return node.parent.accept(self) | |||||
def visit_super(self, node): | |||||
return node.parent.accept(self) | |||||
def visit_uninferable(self, node): | |||||
return str(node) | |||||
class AsStringVisitor3(AsStringVisitor): | |||||
"""AsStringVisitor3 overwrites some AsStringVisitor methods""" | |||||
def visit_excepthandler(self, node): | |||||
if node.type: | |||||
if node.name: | |||||
excs = 'except %s as %s' % (node.type.accept(self), | |||||
node.name.accept(self)) | |||||
else: | |||||
excs = 'except %s' % node.type.accept(self) | |||||
else: | |||||
excs = 'except' | |||||
return '%s:\n%s' % (excs, self._stmt_list(node.body)) | |||||
def visit_nonlocal(self, node): | |||||
"""return an astroid.Nonlocal node as string""" | |||||
return 'nonlocal %s' % ', '.join(node.names) | |||||
def visit_raise(self, node): | |||||
"""return an astroid.Raise node as string""" | |||||
if node.exc: | |||||
if node.cause: | |||||
return 'raise %s from %s' % (node.exc.accept(self), | |||||
node.cause.accept(self)) | |||||
return 'raise %s' % node.exc.accept(self) | |||||
return 'raise' | |||||
def visit_yieldfrom(self, node): | |||||
""" Return an astroid.YieldFrom node as string. """ | |||||
yi_val = (" " + node.value.accept(self)) if node.value else "" | |||||
expr = 'yield from' + yi_val | |||||
if node.parent.is_statement: | |||||
return expr | |||||
return "(%s)" % (expr,) | |||||
def visit_asyncfunctiondef(self, node): | |||||
function = super(AsStringVisitor3, self).visit_functiondef(node) | |||||
return 'async ' + function.strip() | |||||
def visit_await(self, node): | |||||
return 'await %s' % node.value.accept(self) | |||||
def visit_asyncwith(self, node): | |||||
return 'async %s' % self.visit_with(node) | |||||
def visit_asyncfor(self, node): | |||||
return 'async %s' % self.visit_for(node) | |||||
def visit_joinedstr(self, node): | |||||
# Special treatment for constants, | |||||
# as we want to join literals not reprs | |||||
string = ''.join( | |||||
value.value if type(value).__name__ == 'Const' | |||||
else value.accept(self) | |||||
for value in node.values | |||||
) | |||||
return "f'%s'" % string | |||||
def visit_formattedvalue(self, node): | |||||
return '{%s}' % node.value.accept(self) | |||||
def visit_comprehension(self, node): | |||||
"""return an astroid.Comprehension node as string""" | |||||
return '%s%s' % ('async ' if node.is_async else '', | |||||
super(AsStringVisitor3, self).visit_comprehension(node)) | |||||
def _import_string(names): | |||||
"""return a list of (name, asname) formatted as a string""" | |||||
_names = [] | |||||
for name, asname in names: | |||||
if asname is not None: | |||||
_names.append('%s as %s' % (name, asname)) | |||||
else: | |||||
_names.append(name) | |||||
return ', '.join(_names) | |||||
if sys.version_info >= (3, 0): | |||||
AsStringVisitor = AsStringVisitor3 | |||||
# This sets the default indent to 4 spaces. | |||||
to_code = AsStringVisitor(' ') |
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Small AST optimizations.""" | |||||
import _ast | |||||
from astroid import nodes | |||||
__all__ = ('ASTPeepholeOptimizer', ) | |||||
try: | |||||
_TYPES = (_ast.Str, _ast.Bytes) | |||||
except AttributeError: | |||||
_TYPES = (_ast.Str, ) | |||||
class ASTPeepholeOptimizer(object): | |||||
"""Class for applying small optimizations to generate new AST.""" | |||||
def optimize_binop(self, node, parent=None): | |||||
"""Optimize BinOps with string Const nodes on the lhs. | |||||
This fixes an infinite recursion crash, where multiple | |||||
strings are joined using the addition operator. With a | |||||
sufficient number of such strings, astroid will fail | |||||
with a maximum recursion limit exceeded. The | |||||
function will return a Const node with all the strings | |||||
already joined. | |||||
Return ``None`` if no AST node can be obtained | |||||
through optimization. | |||||
""" | |||||
ast_nodes = [] | |||||
current = node | |||||
while isinstance(current, _ast.BinOp): | |||||
# lhs must be a BinOp with the addition operand. | |||||
if not isinstance(current.left, _ast.BinOp): | |||||
return None | |||||
if (not isinstance(current.left.op, _ast.Add) | |||||
or not isinstance(current.op, _ast.Add)): | |||||
return None | |||||
# rhs must a str / bytes. | |||||
if not isinstance(current.right, _TYPES): | |||||
return None | |||||
ast_nodes.append(current.right.s) | |||||
current = current.left | |||||
if (isinstance(current, _ast.BinOp) | |||||
and isinstance(current.left, _TYPES) | |||||
and isinstance(current.right, _TYPES)): | |||||
# Stop early if we are at the last BinOp in | |||||
# the operation | |||||
ast_nodes.append(current.right.s) | |||||
ast_nodes.append(current.left.s) | |||||
break | |||||
if not ast_nodes: | |||||
return None | |||||
# If we have inconsistent types, bail out. | |||||
known = type(ast_nodes[0]) | |||||
if any(not isinstance(element, known) | |||||
for element in ast_nodes[1:]): | |||||
return None | |||||
value = known().join(reversed(ast_nodes)) | |||||
newnode = nodes.Const(value, node.lineno, node.col_offset, parent) | |||||
return newnode |
# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr> | |||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Copyright (c) 2014 Google, Inc. | |||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""This module contains base classes and functions for the nodes and some | |||||
inference utils. | |||||
""" | |||||
import collections | |||||
import sys | |||||
import six | |||||
from astroid import context as contextmod | |||||
from astroid import exceptions | |||||
from astroid import util | |||||
objectmodel = util.lazy_import('interpreter.objectmodel') | |||||
BUILTINS = six.moves.builtins.__name__ | |||||
manager = util.lazy_import('manager') | |||||
MANAGER = manager.AstroidManager() | |||||
if sys.version_info >= (3, 0): | |||||
BUILTINS = 'builtins' | |||||
BOOL_SPECIAL_METHOD = '__bool__' | |||||
else: | |||||
BUILTINS = '__builtin__' | |||||
BOOL_SPECIAL_METHOD = '__nonzero__' | |||||
PROPERTIES = {BUILTINS + '.property', 'abc.abstractproperty'} | |||||
# List of possible property names. We use this list in order | |||||
# to see if a method is a property or not. This should be | |||||
# pretty reliable and fast, the alternative being to check each | |||||
# decorator to see if its a real property-like descriptor, which | |||||
# can be too complicated. | |||||
# Also, these aren't qualified, because each project can | |||||
# define them, we shouldn't expect to know every possible | |||||
# property-like decorator! | |||||
# TODO(cpopa): just implement descriptors already. | |||||
POSSIBLE_PROPERTIES = {"cached_property", "cachedproperty", | |||||
"lazyproperty", "lazy_property", "reify", | |||||
"lazyattribute", "lazy_attribute", | |||||
"LazyProperty", "lazy"} | |||||
def _is_property(meth): | |||||
if PROPERTIES.intersection(meth.decoratornames()): | |||||
return True | |||||
stripped = {name.split(".")[-1] for name in meth.decoratornames() | |||||
if name is not util.Uninferable} | |||||
return any(name in stripped for name in POSSIBLE_PROPERTIES) | |||||
class Proxy(object): | |||||
"""a simple proxy object""" | |||||
_proxied = None # proxied object may be set by class or by instance | |||||
def __init__(self, proxied=None): | |||||
if proxied is not None: | |||||
self._proxied = proxied | |||||
def __getattr__(self, name): | |||||
if name == '_proxied': | |||||
return getattr(self.__class__, '_proxied') | |||||
if name in self.__dict__: | |||||
return self.__dict__[name] | |||||
return getattr(self._proxied, name) | |||||
def infer(self, context=None): | |||||
yield self | |||||
def _infer_stmts(stmts, context, frame=None): | |||||
"""Return an iterator on statements inferred by each statement in *stmts*.""" | |||||
stmt = None | |||||
inferred = False | |||||
if context is not None: | |||||
name = context.lookupname | |||||
context = context.clone() | |||||
else: | |||||
name = None | |||||
context = contextmod.InferenceContext() | |||||
for stmt in stmts: | |||||
if stmt is util.Uninferable: | |||||
yield stmt | |||||
inferred = True | |||||
continue | |||||
context.lookupname = stmt._infer_name(frame, name) | |||||
try: | |||||
for inferred in stmt.infer(context=context): | |||||
yield inferred | |||||
inferred = True | |||||
except exceptions.NameInferenceError: | |||||
continue | |||||
except exceptions.InferenceError: | |||||
yield util.Uninferable | |||||
inferred = True | |||||
if not inferred: | |||||
raise exceptions.InferenceError( | |||||
'Inference failed for all members of {stmts!r}.', | |||||
stmts=stmts, frame=frame, context=context) | |||||
def _infer_method_result_truth(instance, method_name, context): | |||||
# Get the method from the instance and try to infer | |||||
# its return's truth value. | |||||
meth = next(instance.igetattr(method_name, context=context), None) | |||||
if meth and hasattr(meth, 'infer_call_result'): | |||||
if not meth.callable(): | |||||
return util.Uninferable | |||||
for value in meth.infer_call_result(instance, context=context): | |||||
if value is util.Uninferable: | |||||
return value | |||||
inferred = next(value.infer(context=context)) | |||||
return inferred.bool_value() | |||||
return util.Uninferable | |||||
class BaseInstance(Proxy): | |||||
"""An instance base class, which provides lookup methods for potential instances.""" | |||||
special_attributes = None | |||||
def display_type(self): | |||||
return 'Instance of' | |||||
def getattr(self, name, context=None, lookupclass=True): | |||||
try: | |||||
values = self._proxied.instance_attr(name, context) | |||||
except exceptions.AttributeInferenceError: | |||||
if self.special_attributes and name in self.special_attributes: | |||||
return [self.special_attributes.lookup(name)] | |||||
if lookupclass: | |||||
# Class attributes not available through the instance | |||||
# unless they are explicitly defined. | |||||
return self._proxied.getattr(name, context, | |||||
class_context=False) | |||||
util.reraise(exceptions.AttributeInferenceError(target=self, | |||||
attribute=name, | |||||
context=context)) | |||||
# since we've no context information, return matching class members as | |||||
# well | |||||
if lookupclass: | |||||
try: | |||||
return values + self._proxied.getattr(name, context, | |||||
class_context=False) | |||||
except exceptions.AttributeInferenceError: | |||||
pass | |||||
return values | |||||
def igetattr(self, name, context=None): | |||||
"""inferred getattr""" | |||||
if not context: | |||||
context = contextmod.InferenceContext() | |||||
try: | |||||
# avoid recursively inferring the same attr on the same class | |||||
if context.push((self._proxied, name)): | |||||
return | |||||
# XXX frame should be self._proxied, or not ? | |||||
get_attr = self.getattr(name, context, lookupclass=False) | |||||
for stmt in _infer_stmts(self._wrap_attr(get_attr, context), | |||||
context, frame=self): | |||||
yield stmt | |||||
except exceptions.AttributeInferenceError as error: | |||||
try: | |||||
# fallback to class.igetattr since it has some logic to handle | |||||
# descriptors | |||||
# But only if the _proxied is the Class. | |||||
if self._proxied.__class__.__name__ != 'ClassDef': | |||||
util.reraise(exceptions.InferenceError(**vars(error))) | |||||
attrs = self._proxied.igetattr(name, context, class_context=False) | |||||
for stmt in self._wrap_attr(attrs, context): | |||||
yield stmt | |||||
except exceptions.AttributeInferenceError as error: | |||||
util.reraise(exceptions.InferenceError(**vars(error))) | |||||
def _wrap_attr(self, attrs, context=None): | |||||
"""wrap bound methods of attrs in a InstanceMethod proxies""" | |||||
for attr in attrs: | |||||
if isinstance(attr, UnboundMethod): | |||||
if _is_property(attr): | |||||
for inferred in attr.infer_call_result(self, context): | |||||
yield inferred | |||||
else: | |||||
yield BoundMethod(attr, self) | |||||
elif hasattr(attr, 'name') and attr.name == '<lambda>': | |||||
# This is a lambda function defined at class level, | |||||
# since its scope is the underlying _proxied class. | |||||
# Unfortunately, we can't do an isinstance check here, | |||||
# because of the circular dependency between astroid.bases | |||||
# and astroid.scoped_nodes. | |||||
if attr.statement().scope() == self._proxied: | |||||
if attr.args.args and attr.args.args[0].name == 'self': | |||||
yield BoundMethod(attr, self) | |||||
continue | |||||
yield attr | |||||
else: | |||||
yield attr | |||||
def infer_call_result(self, caller, context=None): | |||||
"""infer what a class instance is returning when called""" | |||||
inferred = False | |||||
for node in self._proxied.igetattr('__call__', context): | |||||
if node is util.Uninferable or not node.callable(): | |||||
continue | |||||
for res in node.infer_call_result(caller, context): | |||||
inferred = True | |||||
yield res | |||||
if not inferred: | |||||
raise exceptions.InferenceError(node=self, caller=caller, | |||||
context=context) | |||||
class Instance(BaseInstance): | |||||
"""A special node representing a class instance.""" | |||||
# pylint: disable=unnecessary-lambda | |||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.InstanceModel()) | |||||
def __repr__(self): | |||||
return '<Instance of %s.%s at 0x%s>' % (self._proxied.root().name, | |||||
self._proxied.name, | |||||
id(self)) | |||||
def __str__(self): | |||||
return 'Instance of %s.%s' % (self._proxied.root().name, | |||||
self._proxied.name) | |||||
def callable(self): | |||||
try: | |||||
self._proxied.getattr('__call__', class_context=False) | |||||
return True | |||||
except exceptions.AttributeInferenceError: | |||||
return False | |||||
def pytype(self): | |||||
return self._proxied.qname() | |||||
def display_type(self): | |||||
return 'Instance of' | |||||
def bool_value(self): | |||||
"""Infer the truth value for an Instance | |||||
The truth value of an instance is determined by these conditions: | |||||
* if it implements __bool__ on Python 3 or __nonzero__ | |||||
on Python 2, then its bool value will be determined by | |||||
calling this special method and checking its result. | |||||
* when this method is not defined, __len__() is called, if it | |||||
is defined, and the object is considered true if its result is | |||||
nonzero. If a class defines neither __len__() nor __bool__(), | |||||
all its instances are considered true. | |||||
""" | |||||
context = contextmod.InferenceContext() | |||||
context.callcontext = contextmod.CallContext(args=[]) | |||||
context.boundnode = self | |||||
try: | |||||
result = _infer_method_result_truth(self, BOOL_SPECIAL_METHOD, context) | |||||
except (exceptions.InferenceError, exceptions.AttributeInferenceError): | |||||
# Fallback to __len__. | |||||
try: | |||||
result = _infer_method_result_truth(self, '__len__', context) | |||||
except (exceptions.AttributeInferenceError, exceptions.InferenceError): | |||||
return True | |||||
return result | |||||
# TODO(cpopa): this is set in inference.py | |||||
# The circular dependency hell goes deeper and deeper. | |||||
def getitem(self, index, context=None): | |||||
pass | |||||
class UnboundMethod(Proxy): | |||||
"""a special node representing a method not bound to an instance""" | |||||
# pylint: disable=unnecessary-lambda | |||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.UnboundMethodModel()) | |||||
def __repr__(self): | |||||
frame = self._proxied.parent.frame() | |||||
return '<%s %s of %s at 0x%s' % (self.__class__.__name__, | |||||
self._proxied.name, | |||||
frame.qname(), id(self)) | |||||
def is_bound(self): | |||||
return False | |||||
def getattr(self, name, context=None): | |||||
if name in self.special_attributes: | |||||
return [self.special_attributes.lookup(name)] | |||||
return self._proxied.getattr(name, context) | |||||
def igetattr(self, name, context=None): | |||||
if name in self.special_attributes: | |||||
return iter((self.special_attributes.lookup(name), )) | |||||
return self._proxied.igetattr(name, context) | |||||
def infer_call_result(self, caller, context): | |||||
# If we're unbound method __new__ of builtin object, the result is an | |||||
# instance of the class given as first argument. | |||||
if (self._proxied.name == '__new__' and | |||||
self._proxied.parent.frame().qname() == '%s.object' % BUILTINS): | |||||
infer = caller.args[0].infer() if caller.args else [] | |||||
return (Instance(x) if x is not util.Uninferable else x for x in infer) | |||||
return self._proxied.infer_call_result(caller, context) | |||||
def bool_value(self): | |||||
return True | |||||
class BoundMethod(UnboundMethod): | |||||
"""a special node representing a method bound to an instance""" | |||||
# pylint: disable=unnecessary-lambda | |||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.BoundMethodModel()) | |||||
def __init__(self, proxy, bound): | |||||
UnboundMethod.__init__(self, proxy) | |||||
self.bound = bound | |||||
def is_bound(self): | |||||
return True | |||||
def _infer_type_new_call(self, caller, context): | |||||
"""Try to infer what type.__new__(mcs, name, bases, attrs) returns. | |||||
In order for such call to be valid, the metaclass needs to be | |||||
a subtype of ``type``, the name needs to be a string, the bases | |||||
needs to be a tuple of classes and the attributes a dictionary | |||||
of strings to values. | |||||
""" | |||||
from astroid import node_classes | |||||
# Verify the metaclass | |||||
mcs = next(caller.args[0].infer(context=context)) | |||||
if mcs.__class__.__name__ != 'ClassDef': | |||||
# Not a valid first argument. | |||||
return None | |||||
if not mcs.is_subtype_of("%s.type" % BUILTINS): | |||||
# Not a valid metaclass. | |||||
return None | |||||
# Verify the name | |||||
name = next(caller.args[1].infer(context=context)) | |||||
if name.__class__.__name__ != 'Const': | |||||
# Not a valid name, needs to be a const. | |||||
return None | |||||
if not isinstance(name.value, str): | |||||
# Needs to be a string. | |||||
return None | |||||
# Verify the bases | |||||
bases = next(caller.args[2].infer(context=context)) | |||||
if bases.__class__.__name__ != 'Tuple': | |||||
# Needs to be a tuple. | |||||
return None | |||||
inferred_bases = [next(elt.infer(context=context)) | |||||
for elt in bases.elts] | |||||
if any(base.__class__.__name__ != 'ClassDef' | |||||
for base in inferred_bases): | |||||
# All the bases needs to be Classes | |||||
return None | |||||
# Verify the attributes. | |||||
attrs = next(caller.args[3].infer(context=context)) | |||||
if attrs.__class__.__name__ != 'Dict': | |||||
# Needs to be a dictionary. | |||||
return None | |||||
cls_locals = collections.defaultdict(list) | |||||
for key, value in attrs.items: | |||||
key = next(key.infer(context=context)) | |||||
value = next(value.infer(context=context)) | |||||
if key.__class__.__name__ != 'Const': | |||||
# Something invalid as an attribute. | |||||
return None | |||||
if not isinstance(key.value, str): | |||||
# Not a proper attribute. | |||||
return None | |||||
cls_locals[key.value].append(value) | |||||
# Build the class from now. | |||||
cls = mcs.__class__(name=name.value, lineno=caller.lineno, | |||||
col_offset=caller.col_offset, | |||||
parent=caller) | |||||
empty = node_classes.Pass() | |||||
cls.postinit(bases=bases.elts, body=[empty], decorators=[], | |||||
newstyle=True, metaclass=mcs, keywords=[]) | |||||
cls.locals = cls_locals | |||||
return cls | |||||
def infer_call_result(self, caller, context=None): | |||||
if context is None: | |||||
context = contextmod.InferenceContext() | |||||
context = context.clone() | |||||
context.boundnode = self.bound | |||||
if (self.bound.__class__.__name__ == 'ClassDef' | |||||
and self.bound.name == 'type' | |||||
and self.name == '__new__' | |||||
and len(caller.args) == 4 | |||||
# TODO(cpopa): this check shouldn't be needed. | |||||
and self._proxied.parent.frame().qname() == '%s.object' % BUILTINS): | |||||
# Check if we have an ``type.__new__(mcs, name, bases, attrs)`` call. | |||||
new_cls = self._infer_type_new_call(caller, context) | |||||
if new_cls: | |||||
return iter((new_cls, )) | |||||
return super(BoundMethod, self).infer_call_result(caller, context) | |||||
def bool_value(self): | |||||
return True | |||||
class Generator(BaseInstance): | |||||
"""a special node representing a generator. | |||||
Proxied class is set once for all in raw_building. | |||||
""" | |||||
# pylint: disable=unnecessary-lambda | |||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.GeneratorModel()) | |||||
# pylint: disable=super-init-not-called | |||||
def __init__(self, parent=None): | |||||
self.parent = parent | |||||
def callable(self): | |||||
return False | |||||
def pytype(self): | |||||
return '%s.generator' % BUILTINS | |||||
def display_type(self): | |||||
return 'Generator' | |||||
def bool_value(self): | |||||
return True | |||||
def __repr__(self): | |||||
return '<Generator(%s) l.%s at 0x%s>' % (self._proxied.name, self.lineno, id(self)) | |||||
def __str__(self): | |||||
return 'Generator(%s)' % (self._proxied.name) |
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
""" | |||||
Astroid hook for the attrs library | |||||
Without this hook pylint reports unsupported-assignment-operation | |||||
for atrrs classes | |||||
""" | |||||
import astroid | |||||
from astroid import MANAGER | |||||
ATTR_IB = 'attr.ib' | |||||
def is_decorated_with_attrs( | |||||
node, decorator_names=('attr.s', 'attr.attrs', 'attr.attributes')): | |||||
"""Return True if a decorated node has | |||||
an attr decorator applied.""" | |||||
if not node.decorators: | |||||
return False | |||||
for decorator_attribute in node.decorators.nodes: | |||||
if decorator_attribute.as_string() in decorator_names: | |||||
return True | |||||
return False | |||||
def attr_attributes_transform(node): | |||||
"""Given that the ClassNode has an attr decorator, | |||||
rewrite class attributes as instance attributes | |||||
""" | |||||
# Astroid can't infer this attribute properly | |||||
# Prevents https://github.com/PyCQA/pylint/issues/1884 | |||||
node.locals["__attrs_attrs__"] = [astroid.Unknown(parent=node.body)] | |||||
for cdefbodynode in node.body: | |||||
if not isinstance(cdefbodynode, astroid.Assign): | |||||
continue | |||||
if isinstance(cdefbodynode.value, astroid.Call): | |||||
if cdefbodynode.value.func.as_string() != ATTR_IB: | |||||
continue | |||||
else: | |||||
continue | |||||
for target in cdefbodynode.targets: | |||||
rhs_node = astroid.Unknown( | |||||
lineno=cdefbodynode.lineno, | |||||
col_offset=cdefbodynode.col_offset, | |||||
parent=cdefbodynode | |||||
) | |||||
node.locals[target.name] = [rhs_node] | |||||
MANAGER.register_transform( | |||||
astroid.Class, | |||||
attr_attributes_transform, | |||||
is_decorated_with_attrs) |
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Astroid hooks for various builtins.""" | |||||
from functools import partial | |||||
import sys | |||||
from textwrap import dedent | |||||
import six | |||||
from astroid import (MANAGER, UseInferenceDefault, AttributeInferenceError, | |||||
inference_tip, InferenceError, NameInferenceError) | |||||
from astroid import arguments | |||||
from astroid.builder import AstroidBuilder | |||||
from astroid import helpers | |||||
from astroid import nodes | |||||
from astroid import objects | |||||
from astroid import scoped_nodes | |||||
from astroid import util | |||||
OBJECT_DUNDER_NEW = 'object.__new__' | |||||
def _extend_str(class_node, rvalue): | |||||
"""function to extend builtin str/unicode class""" | |||||
# TODO(cpopa): this approach will make astroid to believe | |||||
# that some arguments can be passed by keyword, but | |||||
# unfortunately, strings and bytes don't accept keyword arguments. | |||||
code = dedent(''' | |||||
class whatever(object): | |||||
def join(self, iterable): | |||||
return {rvalue} | |||||
def replace(self, old, new, count=None): | |||||
return {rvalue} | |||||
def format(self, *args, **kwargs): | |||||
return {rvalue} | |||||
def encode(self, encoding='ascii', errors=None): | |||||
return '' | |||||
def decode(self, encoding='ascii', errors=None): | |||||
return u'' | |||||
def capitalize(self): | |||||
return {rvalue} | |||||
def title(self): | |||||
return {rvalue} | |||||
def lower(self): | |||||
return {rvalue} | |||||
def upper(self): | |||||
return {rvalue} | |||||
def swapcase(self): | |||||
return {rvalue} | |||||
def index(self, sub, start=None, end=None): | |||||
return 0 | |||||
def find(self, sub, start=None, end=None): | |||||
return 0 | |||||
def count(self, sub, start=None, end=None): | |||||
return 0 | |||||
def strip(self, chars=None): | |||||
return {rvalue} | |||||
def lstrip(self, chars=None): | |||||
return {rvalue} | |||||
def rstrip(self, chars=None): | |||||
return {rvalue} | |||||
def rjust(self, width, fillchar=None): | |||||
return {rvalue} | |||||
def center(self, width, fillchar=None): | |||||
return {rvalue} | |||||
def ljust(self, width, fillchar=None): | |||||
return {rvalue} | |||||
''') | |||||
code = code.format(rvalue=rvalue) | |||||
fake = AstroidBuilder(MANAGER).string_build(code)['whatever'] | |||||
for method in fake.mymethods(): | |||||
class_node.locals[method.name] = [method] | |||||
method.parent = class_node | |||||
def extend_builtins(class_transforms): | |||||
from astroid.bases import BUILTINS | |||||
builtin_ast = MANAGER.astroid_cache[BUILTINS] | |||||
for class_name, transform in class_transforms.items(): | |||||
transform(builtin_ast[class_name]) | |||||
if sys.version_info > (3, 0): | |||||
extend_builtins({'bytes': partial(_extend_str, rvalue="b''"), | |||||
'str': partial(_extend_str, rvalue="''")}) | |||||
else: | |||||
extend_builtins({'str': partial(_extend_str, rvalue="''"), | |||||
'unicode': partial(_extend_str, rvalue="u''")}) | |||||
def register_builtin_transform(transform, builtin_name): | |||||
"""Register a new transform function for the given *builtin_name*. | |||||
The transform function must accept two parameters, a node and | |||||
an optional context. | |||||
""" | |||||
def _transform_wrapper(node, context=None): | |||||
result = transform(node, context=context) | |||||
if result: | |||||
if not result.parent: | |||||
# Let the transformation function determine | |||||
# the parent for its result. Otherwise, | |||||
# we set it to be the node we transformed from. | |||||
result.parent = node | |||||
result.lineno = node.lineno | |||||
result.col_offset = node.col_offset | |||||
return iter([result]) | |||||
MANAGER.register_transform(nodes.Call, | |||||
inference_tip(_transform_wrapper), | |||||
lambda n: (isinstance(n.func, nodes.Name) and | |||||
n.func.name == builtin_name)) | |||||
def _generic_inference(node, context, node_type, transform): | |||||
args = node.args | |||||
if not args: | |||||
return node_type() | |||||
if len(node.args) > 1: | |||||
raise UseInferenceDefault() | |||||
arg, = args | |||||
transformed = transform(arg) | |||||
if not transformed: | |||||
try: | |||||
inferred = next(arg.infer(context=context)) | |||||
except (InferenceError, StopIteration): | |||||
raise UseInferenceDefault() | |||||
if inferred is util.Uninferable: | |||||
raise UseInferenceDefault() | |||||
transformed = transform(inferred) | |||||
if not transformed or transformed is util.Uninferable: | |||||
raise UseInferenceDefault() | |||||
return transformed | |||||
def _generic_transform(arg, klass, iterables, build_elts): | |||||
if isinstance(arg, klass): | |||||
return arg | |||||
elif isinstance(arg, iterables): | |||||
if not all(isinstance(elt, nodes.Const) | |||||
for elt in arg.elts): | |||||
# TODO(cpopa): Don't support heterogenous elements. | |||||
# Not yet, though. | |||||
raise UseInferenceDefault() | |||||
elts = [elt.value for elt in arg.elts] | |||||
elif isinstance(arg, nodes.Dict): | |||||
if not all(isinstance(elt[0], nodes.Const) | |||||
for elt in arg.items): | |||||
raise UseInferenceDefault() | |||||
elts = [item[0].value for item in arg.items] | |||||
elif (isinstance(arg, nodes.Const) and | |||||
isinstance(arg.value, (six.string_types, six.binary_type))): | |||||
elts = arg.value | |||||
else: | |||||
return | |||||
return klass.from_constants(elts=build_elts(elts)) | |||||
def _infer_builtin(node, context, | |||||
klass=None, iterables=None, | |||||
build_elts=None): | |||||
transform_func = partial( | |||||
_generic_transform, | |||||
klass=klass, | |||||
iterables=iterables, | |||||
build_elts=build_elts) | |||||
return _generic_inference(node, context, klass, transform_func) | |||||
# pylint: disable=invalid-name | |||||
infer_tuple = partial( | |||||
_infer_builtin, | |||||
klass=nodes.Tuple, | |||||
iterables=(nodes.List, nodes.Set, objects.FrozenSet, | |||||
objects.DictItems, objects.DictKeys, | |||||
objects.DictValues), | |||||
build_elts=tuple) | |||||
infer_list = partial( | |||||
_infer_builtin, | |||||
klass=nodes.List, | |||||
iterables=(nodes.Tuple, nodes.Set, objects.FrozenSet, | |||||
objects.DictItems, objects.DictKeys, | |||||
objects.DictValues), | |||||
build_elts=list) | |||||
infer_set = partial( | |||||
_infer_builtin, | |||||
klass=nodes.Set, | |||||
iterables=(nodes.List, nodes.Tuple, objects.FrozenSet, | |||||
objects.DictKeys), | |||||
build_elts=set) | |||||
infer_frozenset = partial( | |||||
_infer_builtin, | |||||
klass=objects.FrozenSet, | |||||
iterables=(nodes.List, nodes.Tuple, nodes.Set, objects.FrozenSet, | |||||
objects.DictKeys), | |||||
build_elts=frozenset) | |||||
def _get_elts(arg, context): | |||||
is_iterable = lambda n: isinstance(n, | |||||
(nodes.List, nodes.Tuple, nodes.Set)) | |||||
try: | |||||
inferred = next(arg.infer(context)) | |||||
except (InferenceError, NameInferenceError): | |||||
raise UseInferenceDefault() | |||||
if isinstance(inferred, nodes.Dict): | |||||
items = inferred.items | |||||
elif is_iterable(inferred): | |||||
items = [] | |||||
for elt in inferred.elts: | |||||
# If an item is not a pair of two items, | |||||
# then fallback to the default inference. | |||||
# Also, take in consideration only hashable items, | |||||
# tuples and consts. We are choosing Names as well. | |||||
if not is_iterable(elt): | |||||
raise UseInferenceDefault() | |||||
if len(elt.elts) != 2: | |||||
raise UseInferenceDefault() | |||||
if not isinstance(elt.elts[0], | |||||
(nodes.Tuple, nodes.Const, nodes.Name)): | |||||
raise UseInferenceDefault() | |||||
items.append(tuple(elt.elts)) | |||||
else: | |||||
raise UseInferenceDefault() | |||||
return items | |||||
def infer_dict(node, context=None): | |||||
"""Try to infer a dict call to a Dict node. | |||||
The function treats the following cases: | |||||
* dict() | |||||
* dict(mapping) | |||||
* dict(iterable) | |||||
* dict(iterable, **kwargs) | |||||
* dict(mapping, **kwargs) | |||||
* dict(**kwargs) | |||||
If a case can't be inferred, we'll fallback to default inference. | |||||
""" | |||||
call = arguments.CallSite.from_call(node) | |||||
if call.has_invalid_arguments() or call.has_invalid_keywords(): | |||||
raise UseInferenceDefault | |||||
args = call.positional_arguments | |||||
kwargs = list(call.keyword_arguments.items()) | |||||
if not args and not kwargs: | |||||
# dict() | |||||
return nodes.Dict() | |||||
elif kwargs and not args: | |||||
# dict(a=1, b=2, c=4) | |||||
items = [(nodes.Const(key), value) for key, value in kwargs] | |||||
elif len(args) == 1 and kwargs: | |||||
# dict(some_iterable, b=2, c=4) | |||||
elts = _get_elts(args[0], context) | |||||
keys = [(nodes.Const(key), value) for key, value in kwargs] | |||||
items = elts + keys | |||||
elif len(args) == 1: | |||||
items = _get_elts(args[0], context) | |||||
else: | |||||
raise UseInferenceDefault() | |||||
value = nodes.Dict(col_offset=node.col_offset, | |||||
lineno=node.lineno, | |||||
parent=node.parent) | |||||
value.postinit(items) | |||||
return value | |||||
def infer_super(node, context=None): | |||||
"""Understand super calls. | |||||
There are some restrictions for what can be understood: | |||||
* unbounded super (one argument form) is not understood. | |||||
* if the super call is not inside a function (classmethod or method), | |||||
then the default inference will be used. | |||||
* if the super arguments can't be inferred, the default inference | |||||
will be used. | |||||
""" | |||||
if len(node.args) == 1: | |||||
# Ignore unbounded super. | |||||
raise UseInferenceDefault | |||||
scope = node.scope() | |||||
if not isinstance(scope, nodes.FunctionDef): | |||||
# Ignore non-method uses of super. | |||||
raise UseInferenceDefault | |||||
if scope.type not in ('classmethod', 'method'): | |||||
# Not interested in staticmethods. | |||||
raise UseInferenceDefault | |||||
cls = scoped_nodes.get_wrapping_class(scope) | |||||
if not len(node.args): | |||||
mro_pointer = cls | |||||
# In we are in a classmethod, the interpreter will fill | |||||
# automatically the class as the second argument, not an instance. | |||||
if scope.type == 'classmethod': | |||||
mro_type = cls | |||||
else: | |||||
mro_type = cls.instantiate_class() | |||||
else: | |||||
# TODO(cpopa): support flow control (multiple inference values). | |||||
try: | |||||
mro_pointer = next(node.args[0].infer(context=context)) | |||||
except InferenceError: | |||||
raise UseInferenceDefault | |||||
try: | |||||
mro_type = next(node.args[1].infer(context=context)) | |||||
except InferenceError: | |||||
raise UseInferenceDefault | |||||
if mro_pointer is util.Uninferable or mro_type is util.Uninferable: | |||||
# No way we could understand this. | |||||
raise UseInferenceDefault | |||||
super_obj = objects.Super(mro_pointer=mro_pointer, | |||||
mro_type=mro_type, | |||||
self_class=cls, | |||||
scope=scope) | |||||
super_obj.parent = node | |||||
return super_obj | |||||
def _infer_getattr_args(node, context): | |||||
if len(node.args) not in (2, 3): | |||||
# Not a valid getattr call. | |||||
raise UseInferenceDefault | |||||
try: | |||||
# TODO(cpopa): follow all the values of the first argument? | |||||
obj = next(node.args[0].infer(context=context)) | |||||
attr = next(node.args[1].infer(context=context)) | |||||
except InferenceError: | |||||
raise UseInferenceDefault | |||||
if obj is util.Uninferable or attr is util.Uninferable: | |||||
# If one of the arguments is something we can't infer, | |||||
# then also make the result of the getattr call something | |||||
# which is unknown. | |||||
return util.Uninferable, util.Uninferable | |||||
is_string = (isinstance(attr, nodes.Const) and | |||||
isinstance(attr.value, six.string_types)) | |||||
if not is_string: | |||||
raise UseInferenceDefault | |||||
return obj, attr.value | |||||
def infer_getattr(node, context=None): | |||||
"""Understand getattr calls | |||||
If one of the arguments is an Uninferable object, then the | |||||
result will be an Uninferable object. Otherwise, the normal attribute | |||||
lookup will be done. | |||||
""" | |||||
obj, attr = _infer_getattr_args(node, context) | |||||
if obj is util.Uninferable or attr is util.Uninferable or not hasattr(obj, 'igetattr'): | |||||
return util.Uninferable | |||||
try: | |||||
return next(obj.igetattr(attr, context=context)) | |||||
except (StopIteration, InferenceError, AttributeInferenceError): | |||||
if len(node.args) == 3: | |||||
# Try to infer the default and return it instead. | |||||
try: | |||||
return next(node.args[2].infer(context=context)) | |||||
except InferenceError: | |||||
raise UseInferenceDefault | |||||
raise UseInferenceDefault | |||||
def infer_hasattr(node, context=None): | |||||
"""Understand hasattr calls | |||||
This always guarantees three possible outcomes for calling | |||||
hasattr: Const(False) when we are sure that the object | |||||
doesn't have the intended attribute, Const(True) when | |||||
we know that the object has the attribute and Uninferable | |||||
when we are unsure of the outcome of the function call. | |||||
""" | |||||
try: | |||||
obj, attr = _infer_getattr_args(node, context) | |||||
if obj is util.Uninferable or attr is util.Uninferable or not hasattr(obj, 'getattr'): | |||||
return util.Uninferable | |||||
obj.getattr(attr, context=context) | |||||
except UseInferenceDefault: | |||||
# Can't infer something from this function call. | |||||
return util.Uninferable | |||||
except AttributeInferenceError: | |||||
# Doesn't have it. | |||||
return nodes.Const(False) | |||||
return nodes.Const(True) | |||||
def infer_callable(node, context=None): | |||||
"""Understand callable calls | |||||
This follows Python's semantics, where an object | |||||
is callable if it provides an attribute __call__, | |||||
even though that attribute is something which can't be | |||||
called. | |||||
""" | |||||
if len(node.args) != 1: | |||||
# Invalid callable call. | |||||
raise UseInferenceDefault | |||||
argument = node.args[0] | |||||
try: | |||||
inferred = next(argument.infer(context=context)) | |||||
except InferenceError: | |||||
return util.Uninferable | |||||
if inferred is util.Uninferable: | |||||
return util.Uninferable | |||||
return nodes.Const(inferred.callable()) | |||||
def infer_bool(node, context=None): | |||||
"""Understand bool calls.""" | |||||
if len(node.args) > 1: | |||||
# Invalid bool call. | |||||
raise UseInferenceDefault | |||||
if not node.args: | |||||
return nodes.Const(False) | |||||
argument = node.args[0] | |||||
try: | |||||
inferred = next(argument.infer(context=context)) | |||||
except InferenceError: | |||||
return util.Uninferable | |||||
if inferred is util.Uninferable: | |||||
return util.Uninferable | |||||
bool_value = inferred.bool_value() | |||||
if bool_value is util.Uninferable: | |||||
return util.Uninferable | |||||
return nodes.Const(bool_value) | |||||
def infer_type(node, context=None): | |||||
"""Understand the one-argument form of *type*.""" | |||||
if len(node.args) != 1: | |||||
raise UseInferenceDefault | |||||
return helpers.object_type(node.args[0], context) | |||||
def infer_slice(node, context=None): | |||||
"""Understand `slice` calls.""" | |||||
args = node.args | |||||
if not 0 < len(args) <= 3: | |||||
raise UseInferenceDefault | |||||
args = list(map(helpers.safe_infer, args)) | |||||
for arg in args: | |||||
if not arg or arg is util.Uninferable: | |||||
raise UseInferenceDefault | |||||
if not isinstance(arg, nodes.Const): | |||||
raise UseInferenceDefault | |||||
if not isinstance(arg.value, (type(None), int)): | |||||
raise UseInferenceDefault | |||||
if len(args) < 3: | |||||
# Make sure we have 3 arguments. | |||||
args.extend([None] * (3 - len(args))) | |||||
slice_node = nodes.Slice(lineno=node.lineno, | |||||
col_offset=node.col_offset, | |||||
parent=node.parent) | |||||
slice_node.postinit(*args) | |||||
return slice_node | |||||
def _infer_object__new__decorator(node, context=None): | |||||
# Instantiate class immediately | |||||
# since that's what @object.__new__ does | |||||
return iter((node.instantiate_class(),)) | |||||
def _infer_object__new__decorator_check(node): | |||||
"""Predicate before inference_tip | |||||
Check if the given ClassDef has a @object.__new__ decorator | |||||
""" | |||||
if not node.decorators: | |||||
return False | |||||
for decorator in node.decorators.nodes: | |||||
if isinstance(decorator, nodes.Attribute): | |||||
if decorator.as_string() == OBJECT_DUNDER_NEW: | |||||
return True | |||||
return False | |||||
# Builtins inference | |||||
register_builtin_transform(infer_bool, 'bool') | |||||
register_builtin_transform(infer_super, 'super') | |||||
register_builtin_transform(infer_callable, 'callable') | |||||
register_builtin_transform(infer_getattr, 'getattr') | |||||
register_builtin_transform(infer_hasattr, 'hasattr') | |||||
register_builtin_transform(infer_tuple, 'tuple') | |||||
register_builtin_transform(infer_set, 'set') | |||||
register_builtin_transform(infer_list, 'list') | |||||
register_builtin_transform(infer_dict, 'dict') | |||||
register_builtin_transform(infer_frozenset, 'frozenset') | |||||
register_builtin_transform(infer_type, 'type') | |||||
register_builtin_transform(infer_slice, 'slice') | |||||
# Infer object.__new__ calls | |||||
MANAGER.register_transform( | |||||
nodes.ClassDef, | |||||
inference_tip(_infer_object__new__decorator), | |||||
_infer_object__new__decorator_check | |||||
) |
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
import sys | |||||
import astroid | |||||
PY35 = sys.version_info >= (3, 5) | |||||
def _collections_transform(): | |||||
return astroid.parse(''' | |||||
class defaultdict(dict): | |||||
default_factory = None | |||||
def __missing__(self, key): pass | |||||
def __getitem__(self, key): return default_factory | |||||
''' + _deque_mock() + ''' | |||||
class OrderedDict(dict): | |||||
def __reversed__(self): return self[::-1] | |||||
''') | |||||
def _deque_mock(): | |||||
base_deque_class = ''' | |||||
class deque(object): | |||||
maxlen = 0 | |||||
def __init__(self, iterable=None, maxlen=None): | |||||
self.iterable = iterable | |||||
def append(self, x): pass | |||||
def appendleft(self, x): pass | |||||
def clear(self): pass | |||||
def count(self, x): return 0 | |||||
def extend(self, iterable): pass | |||||
def extendleft(self, iterable): pass | |||||
def pop(self): pass | |||||
def popleft(self): pass | |||||
def remove(self, value): pass | |||||
def reverse(self): pass | |||||
def rotate(self, n=1): pass | |||||
def __iter__(self): return self | |||||
def __reversed__(self): return self.iterable[::-1] | |||||
def __getitem__(self, index): pass | |||||
def __setitem__(self, index, value): pass | |||||
def __delitem__(self, index): pass | |||||
def __bool__(self): return bool(self.iterable) | |||||
def __nonzero__(self): return bool(self.iterable) | |||||
def __contains__(self, o): return o in self.iterable | |||||
def __len__(self): return len(self.iterable) | |||||
def __copy__(self): return deque(self.iterable)''' | |||||
if PY35: | |||||
base_deque_class += ''' | |||||
def copy(self): return deque(self.iterable) | |||||
def index(self, x, start=0, end=0): return 0 | |||||
def insert(self, x, i): pass | |||||
def __add__(self, other): pass | |||||
def __iadd__(self, other): pass | |||||
def __mul__(self, other): pass | |||||
def __imul__(self, other): pass | |||||
def __rmul__(self, other): pass''' | |||||
return base_deque_class | |||||
astroid.register_module_extender(astroid.MANAGER, 'collections', _collections_transform) | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
import astroid | |||||
def _curses_transform(): | |||||
return astroid.parse(''' | |||||
A_ALTCHARSET = 1 | |||||
A_BLINK = 1 | |||||
A_BOLD = 1 | |||||
A_DIM = 1 | |||||
A_INVIS = 1 | |||||
A_ITALIC = 1 | |||||
A_NORMAL = 1 | |||||
A_PROTECT = 1 | |||||
A_REVERSE = 1 | |||||
A_STANDOUT = 1 | |||||
A_UNDERLINE = 1 | |||||
A_HORIZONTAL = 1 | |||||
A_LEFT = 1 | |||||
A_LOW = 1 | |||||
A_RIGHT = 1 | |||||
A_TOP = 1 | |||||
A_VERTICAL = 1 | |||||
A_CHARTEXT = 1 | |||||
A_ATTRIBUTES = 1 | |||||
A_CHARTEXT = 1 | |||||
A_COLOR = 1 | |||||
KEY_MIN = 1 | |||||
KEY_BREAK = 1 | |||||
KEY_DOWN = 1 | |||||
KEY_UP = 1 | |||||
KEY_LEFT = 1 | |||||
KEY_RIGHT = 1 | |||||
KEY_HOME = 1 | |||||
KEY_BACKSPACE = 1 | |||||
KEY_F0 = 1 | |||||
KEY_Fn = 1 | |||||
KEY_DL = 1 | |||||
KEY_IL = 1 | |||||
KEY_DC = 1 | |||||
KEY_IC = 1 | |||||
KEY_EIC = 1 | |||||
KEY_CLEAR = 1 | |||||
KEY_EOS = 1 | |||||
KEY_EOL = 1 | |||||
KEY_SF = 1 | |||||
KEY_SR = 1 | |||||
KEY_NPAGE = 1 | |||||
KEY_PPAGE = 1 | |||||
KEY_STAB = 1 | |||||
KEY_CTAB = 1 | |||||
KEY_CATAB = 1 | |||||
KEY_ENTER = 1 | |||||
KEY_SRESET = 1 | |||||
KEY_RESET = 1 | |||||
KEY_PRINT = 1 | |||||
KEY_LL = 1 | |||||
KEY_A1 = 1 | |||||
KEY_A3 = 1 | |||||
KEY_B2 = 1 | |||||
KEY_C1 = 1 | |||||
KEY_C3 = 1 | |||||
KEY_BTAB = 1 | |||||
KEY_BEG = 1 | |||||
KEY_CANCEL = 1 | |||||
KEY_CLOSE = 1 | |||||
KEY_COMMAND = 1 | |||||
KEY_COPY = 1 | |||||
KEY_CREATE = 1 | |||||
KEY_END = 1 | |||||
KEY_EXIT = 1 | |||||
KEY_FIND = 1 | |||||
KEY_HELP = 1 | |||||
KEY_MARK = 1 | |||||
KEY_MESSAGE = 1 | |||||
KEY_MOVE = 1 | |||||
KEY_NEXT = 1 | |||||
KEY_OPEN = 1 | |||||
KEY_OPTIONS = 1 | |||||
KEY_PREVIOUS = 1 | |||||
KEY_REDO = 1 | |||||
KEY_REFERENCE = 1 | |||||
KEY_REFRESH = 1 | |||||
KEY_REPLACE = 1 | |||||
KEY_RESTART = 1 | |||||
KEY_RESUME = 1 | |||||
KEY_SAVE = 1 | |||||
KEY_SBEG = 1 | |||||
KEY_SCANCEL = 1 | |||||
KEY_SCOMMAND = 1 | |||||
KEY_SCOPY = 1 | |||||
KEY_SCREATE = 1 | |||||
KEY_SDC = 1 | |||||
KEY_SDL = 1 | |||||
KEY_SELECT = 1 | |||||
KEY_SEND = 1 | |||||
KEY_SEOL = 1 | |||||
KEY_SEXIT = 1 | |||||
KEY_SFIND = 1 | |||||
KEY_SHELP = 1 | |||||
KEY_SHOME = 1 | |||||
KEY_SIC = 1 | |||||
KEY_SLEFT = 1 | |||||
KEY_SMESSAGE = 1 | |||||
KEY_SMOVE = 1 | |||||
KEY_SNEXT = 1 | |||||
KEY_SOPTIONS = 1 | |||||
KEY_SPREVIOUS = 1 | |||||
KEY_SPRINT = 1 | |||||
KEY_SREDO = 1 | |||||
KEY_SREPLACE = 1 | |||||
KEY_SRIGHT = 1 | |||||
KEY_SRSUME = 1 | |||||
KEY_SSAVE = 1 | |||||
KEY_SSUSPEND = 1 | |||||
KEY_SUNDO = 1 | |||||
KEY_SUSPEND = 1 | |||||
KEY_UNDO = 1 | |||||
KEY_MOUSE = 1 | |||||
KEY_RESIZE = 1 | |||||
KEY_MAX = 1 | |||||
ACS_BBSS = 1 | |||||
ACS_BLOCK = 1 | |||||
ACS_BOARD = 1 | |||||
ACS_BSBS = 1 | |||||
ACS_BSSB = 1 | |||||
ACS_BSSS = 1 | |||||
ACS_BTEE = 1 | |||||
ACS_BULLET = 1 | |||||
ACS_CKBOARD = 1 | |||||
ACS_DARROW = 1 | |||||
ACS_DEGREE = 1 | |||||
ACS_DIAMOND = 1 | |||||
ACS_GEQUAL = 1 | |||||
ACS_HLINE = 1 | |||||
ACS_LANTERN = 1 | |||||
ACS_LARROW = 1 | |||||
ACS_LEQUAL = 1 | |||||
ACS_LLCORNER = 1 | |||||
ACS_LRCORNER = 1 | |||||
ACS_LTEE = 1 | |||||
ACS_NEQUAL = 1 | |||||
ACS_PI = 1 | |||||
ACS_PLMINUS = 1 | |||||
ACS_PLUS = 1 | |||||
ACS_RARROW = 1 | |||||
ACS_RTEE = 1 | |||||
ACS_S1 = 1 | |||||
ACS_S3 = 1 | |||||
ACS_S7 = 1 | |||||
ACS_S9 = 1 | |||||
ACS_SBBS = 1 | |||||
ACS_SBSB = 1 | |||||
ACS_SBSS = 1 | |||||
ACS_SSBB = 1 | |||||
ACS_SSBS = 1 | |||||
ACS_SSSB = 1 | |||||
ACS_SSSS = 1 | |||||
ACS_STERLING = 1 | |||||
ACS_TTEE = 1 | |||||
ACS_UARROW = 1 | |||||
ACS_ULCORNER = 1 | |||||
ACS_URCORNER = 1 | |||||
ACS_VLINE = 1 | |||||
COLOR_BLACK = 1 | |||||
COLOR_BLUE = 1 | |||||
COLOR_CYAN = 1 | |||||
COLOR_GREEN = 1 | |||||
COLOR_MAGENTA = 1 | |||||
COLOR_RED = 1 | |||||
COLOR_WHITE = 1 | |||||
COLOR_YELLOW = 1 | |||||
''') | |||||
astroid.register_module_extender(astroid.MANAGER, 'curses', _curses_transform) |
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Copyright (c) 2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Astroid hooks for dateutil""" | |||||
import textwrap | |||||
from astroid import MANAGER, register_module_extender | |||||
from astroid.builder import AstroidBuilder | |||||
def dateutil_transform(): | |||||
return AstroidBuilder(MANAGER).string_build(textwrap.dedent(''' | |||||
import datetime | |||||
def parse(timestr, parserinfo=None, **kwargs): | |||||
return datetime.datetime() | |||||
''')) | |||||
register_module_extender(MANAGER, 'dateutil.parser', dateutil_transform) |
# Copyright (c) 2017 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
import sys | |||||
import astroid | |||||
def _clone_node_with_lineno(node, parent, lineno): | |||||
cls = node.__class__ | |||||
other_fields = node._other_fields | |||||
_astroid_fields = node._astroid_fields | |||||
init_params = { | |||||
'lineno': lineno, | |||||
'col_offset': node.col_offset, | |||||
'parent': parent | |||||
} | |||||
postinit_params = { | |||||
param: getattr(node, param) | |||||
for param in _astroid_fields | |||||
} | |||||
if other_fields: | |||||
init_params.update({ | |||||
param: getattr(node, param) | |||||
for param in other_fields | |||||
}) | |||||
new_node = cls(**init_params) | |||||
if hasattr(node, 'postinit') and _astroid_fields: | |||||
new_node.postinit(**postinit_params) | |||||
return new_node | |||||
def _transform_formatted_value(node): | |||||
if node.value and node.value.lineno == 1: | |||||
if node.lineno != node.value.lineno: | |||||
new_node = astroid.FormattedValue( | |||||
lineno=node.lineno, | |||||
col_offset=node.col_offset, | |||||
parent=node.parent | |||||
) | |||||
new_value = _clone_node_with_lineno( | |||||
node=node.value, | |||||
lineno=node.lineno, | |||||
parent=new_node | |||||
) | |||||
new_node.postinit(value=new_value, | |||||
format_spec=node.format_spec) | |||||
return new_node | |||||
if sys.version_info[:2] >= (3, 6): | |||||
# TODO: this fix tries to *patch* http://bugs.python.org/issue29051 | |||||
# The problem is that FormattedValue.value, which is a Name node, | |||||
# has wrong line numbers, usually 1. This creates problems for pylint, | |||||
# which expects correct line numbers for things such as message control. | |||||
astroid.MANAGER.register_transform( | |||||
astroid.FormattedValue, | |||||
_transform_formatted_value) |
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
"""Astroid hooks for understanding functools library module.""" | |||||
import astroid | |||||
from astroid import BoundMethod | |||||
from astroid import extract_node | |||||
from astroid import helpers | |||||
from astroid.interpreter import objectmodel | |||||
from astroid import MANAGER | |||||
LRU_CACHE = 'functools.lru_cache' | |||||
class LruWrappedModel(objectmodel.FunctionModel): | |||||
"""Special attribute model for functions decorated with functools.lru_cache. | |||||
The said decorators patches at decoration time some functions onto | |||||
the decorated function. | |||||
""" | |||||
@property | |||||
def py__wrapped__(self): | |||||
return self._instance | |||||
@property | |||||
def pycache_info(self): | |||||
cache_info = extract_node(''' | |||||
from functools import _CacheInfo | |||||
_CacheInfo(0, 0, 0, 0) | |||||
''') | |||||
class CacheInfoBoundMethod(BoundMethod): | |||||
def infer_call_result(self, caller, context=None): | |||||
yield helpers.safe_infer(cache_info) | |||||
return CacheInfoBoundMethod(proxy=self._instance, bound=self._instance) | |||||
@property | |||||
def pycache_clear(self): | |||||
node = extract_node('''def cache_clear(self): pass''') | |||||
return BoundMethod(proxy=node, bound=self._instance.parent.scope()) | |||||
def _transform_lru_cache(node, context=None): | |||||
# TODO: this is not ideal, since the node should be immutable, | |||||
# but due to https://github.com/PyCQA/astroid/issues/354, | |||||
# there's not much we can do now. | |||||
# Replacing the node would work partially, because, | |||||
# in pylint, the old node would still be available, leading | |||||
# to spurious false positives. | |||||
node.special_attributes = LruWrappedModel()(node) | |||||
return | |||||
def _looks_like_lru_cache(node): | |||||
"""Check if the given function node is decorated with lru_cache.""" | |||||
if not node.decorators: | |||||
return False | |||||
for decorator in node.decorators.nodes: | |||||
if not isinstance(decorator, astroid.Call): | |||||
continue | |||||
func = helpers.safe_infer(decorator.func) | |||||
if func in (None, astroid.Uninferable): | |||||
continue | |||||
if isinstance(func, astroid.FunctionDef) and func.qname() == LRU_CACHE: | |||||
return True | |||||
return False | |||||
MANAGER.register_transform(astroid.FunctionDef, _transform_lru_cache, | |||||
_looks_like_lru_cache) |
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Astroid hooks for the Python 2 GObject introspection bindings. | |||||
Helps with understanding everything imported from 'gi.repository' | |||||
""" | |||||
import inspect | |||||
import itertools | |||||
import sys | |||||
import re | |||||
import warnings | |||||
from astroid import MANAGER, AstroidBuildingError, nodes | |||||
from astroid.builder import AstroidBuilder | |||||
_inspected_modules = {} | |||||
_identifier_re = r'^[A-Za-z_]\w*$' | |||||
def _gi_build_stub(parent): | |||||
""" | |||||
Inspect the passed module recursively and build stubs for functions, | |||||
classes, etc. | |||||
""" | |||||
classes = {} | |||||
functions = {} | |||||
constants = {} | |||||
methods = {} | |||||
for name in dir(parent): | |||||
if name.startswith("__"): | |||||
continue | |||||
# Check if this is a valid name in python | |||||
if not re.match(_identifier_re, name): | |||||
continue | |||||
try: | |||||
obj = getattr(parent, name) | |||||
except: | |||||
continue | |||||
if inspect.isclass(obj): | |||||
classes[name] = obj | |||||
elif (inspect.isfunction(obj) or | |||||
inspect.isbuiltin(obj)): | |||||
functions[name] = obj | |||||
elif (inspect.ismethod(obj) or | |||||
inspect.ismethoddescriptor(obj)): | |||||
methods[name] = obj | |||||
elif (str(obj).startswith("<flags") or | |||||
str(obj).startswith("<enum ") or | |||||
str(obj).startswith("<GType ") or | |||||
inspect.isdatadescriptor(obj)): | |||||
constants[name] = 0 | |||||
elif isinstance(obj, (int, str)): | |||||
constants[name] = obj | |||||
elif callable(obj): | |||||
# Fall back to a function for anything callable | |||||
functions[name] = obj | |||||
else: | |||||
# Assume everything else is some manner of constant | |||||
constants[name] = 0 | |||||
ret = "" | |||||
if constants: | |||||
ret += "# %s constants\n\n" % parent.__name__ | |||||
for name in sorted(constants): | |||||
if name[0].isdigit(): | |||||
# GDK has some busted constant names like | |||||
# Gdk.EventType.2BUTTON_PRESS | |||||
continue | |||||
val = constants[name] | |||||
strval = str(val) | |||||
if isinstance(val, str): | |||||
strval = '"%s"' % str(val).replace("\\", "\\\\") | |||||
ret += "%s = %s\n" % (name, strval) | |||||
if ret: | |||||
ret += "\n\n" | |||||
if functions: | |||||
ret += "# %s functions\n\n" % parent.__name__ | |||||
for name in sorted(functions): | |||||
ret += "def %s(*args, **kwargs):\n" % name | |||||
ret += " pass\n" | |||||
if ret: | |||||
ret += "\n\n" | |||||
if methods: | |||||
ret += "# %s methods\n\n" % parent.__name__ | |||||
for name in sorted(methods): | |||||
ret += "def %s(self, *args, **kwargs):\n" % name | |||||
ret += " pass\n" | |||||
if ret: | |||||
ret += "\n\n" | |||||
if classes: | |||||
ret += "# %s classes\n\n" % parent.__name__ | |||||
for name in sorted(classes): | |||||
ret += "class %s(object):\n" % name | |||||
classret = _gi_build_stub(classes[name]) | |||||
if not classret: | |||||
classret = "pass\n" | |||||
for line in classret.splitlines(): | |||||
ret += " " + line + "\n" | |||||
ret += "\n" | |||||
return ret | |||||
def _import_gi_module(modname): | |||||
# we only consider gi.repository submodules | |||||
if not modname.startswith('gi.repository.'): | |||||
raise AstroidBuildingError(modname=modname) | |||||
# build astroid representation unless we already tried so | |||||
if modname not in _inspected_modules: | |||||
modnames = [modname] | |||||
optional_modnames = [] | |||||
# GLib and GObject may have some special case handling | |||||
# in pygobject that we need to cope with. However at | |||||
# least as of pygobject3-3.13.91 the _glib module doesn't | |||||
# exist anymore, so if treat these modules as optional. | |||||
if modname == 'gi.repository.GLib': | |||||
optional_modnames.append('gi._glib') | |||||
elif modname == 'gi.repository.GObject': | |||||
optional_modnames.append('gi._gobject') | |||||
try: | |||||
modcode = '' | |||||
for m in itertools.chain(modnames, optional_modnames): | |||||
try: | |||||
with warnings.catch_warnings(): | |||||
# Just inspecting the code can raise gi deprecation | |||||
# warnings, so ignore them. | |||||
try: | |||||
from gi import PyGIDeprecationWarning, PyGIWarning | |||||
warnings.simplefilter("ignore", PyGIDeprecationWarning) | |||||
warnings.simplefilter("ignore", PyGIWarning) | |||||
except Exception: | |||||
pass | |||||
__import__(m) | |||||
modcode += _gi_build_stub(sys.modules[m]) | |||||
except ImportError: | |||||
if m not in optional_modnames: | |||||
raise | |||||
except ImportError: | |||||
astng = _inspected_modules[modname] = None | |||||
else: | |||||
astng = AstroidBuilder(MANAGER).string_build(modcode, modname) | |||||
_inspected_modules[modname] = astng | |||||
else: | |||||
astng = _inspected_modules[modname] | |||||
if astng is None: | |||||
raise AstroidBuildingError(modname=modname) | |||||
return astng | |||||
def _looks_like_require_version(node): | |||||
# Return whether this looks like a call to gi.require_version(<name>, <version>) | |||||
# Only accept function calls with two constant arguments | |||||
if len(node.args) != 2: | |||||
return False | |||||
if not all(isinstance(arg, nodes.Const) for arg in node.args): | |||||
return False | |||||
func = node.func | |||||
if isinstance(func, nodes.Attribute): | |||||
if func.attrname != 'require_version': | |||||
return False | |||||
if isinstance(func.expr, nodes.Name) and func.expr.name == 'gi': | |||||
return True | |||||
return False | |||||
if isinstance(func, nodes.Name): | |||||
return func.name == 'require_version' | |||||
return False | |||||
def _register_require_version(node): | |||||
# Load the gi.require_version locally | |||||
try: | |||||
import gi | |||||
gi.require_version(node.args[0].value, node.args[1].value) | |||||
except Exception: | |||||
pass | |||||
return node | |||||
MANAGER.register_failed_import_hook(_import_gi_module) | |||||
MANAGER.register_transform(nodes.Call, _register_require_version, _looks_like_require_version) |
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
import six | |||||
import astroid | |||||
def _hashlib_transform(): | |||||
template = ''' | |||||
class %(name)s(object): | |||||
def __init__(self, value=''): pass | |||||
def digest(self): | |||||
return %(digest)s | |||||
def copy(self): | |||||
return self | |||||
def update(self, value): pass | |||||
def hexdigest(self): | |||||
return '' | |||||
@property | |||||
def name(self): | |||||
return %(name)r | |||||
@property | |||||
def block_size(self): | |||||
return 1 | |||||
@property | |||||
def digest_size(self): | |||||
return 1 | |||||
''' | |||||
algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') | |||||
classes = "".join( | |||||
template % {'name': hashfunc, 'digest': 'b""' if six.PY3 else '""'} | |||||
for hashfunc in algorithms) | |||||
return astroid.parse(classes) | |||||
astroid.register_module_extender(astroid.MANAGER, 'hashlib', _hashlib_transform) | |||||
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
'''Astroid brain hints for some of the _io C objects.''' | |||||
import astroid | |||||
BUFFERED = {'BufferedWriter', 'BufferedReader'} | |||||
TextIOWrapper = 'TextIOWrapper' | |||||
FileIO = 'FileIO' | |||||
BufferedWriter = 'BufferedWriter' | |||||
def _generic_io_transform(node, name, cls): | |||||
'''Transform the given name, by adding the given *class* as a member of the node.''' | |||||
io_module = astroid.MANAGER.ast_from_module_name('_io') | |||||
attribute_object = io_module[cls] | |||||
instance = attribute_object.instantiate_class() | |||||
node.locals[name] = [instance] | |||||
def _transform_text_io_wrapper(node): | |||||
# This is not always correct, since it can vary with the type of the descriptor, | |||||
# being stdout, stderr or stdin. But we cannot get access to the name of the | |||||
# stream, which is why we are using the BufferedWriter class as a default | |||||
# value | |||||
return _generic_io_transform(node, name='buffer', cls=BufferedWriter) | |||||
def _transform_buffered(node): | |||||
return _generic_io_transform(node, name='raw', cls=FileIO) | |||||
astroid.MANAGER.register_transform(astroid.ClassDef, | |||||
_transform_buffered, | |||||
lambda node: node.name in BUFFERED) | |||||
astroid.MANAGER.register_transform(astroid.ClassDef, | |||||
_transform_text_io_wrapper, | |||||
lambda node: node.name == TextIOWrapper) |
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
from astroid import MANAGER, register_module_extender | |||||
from astroid.builder import AstroidBuilder | |||||
def mechanize_transform(): | |||||
return AstroidBuilder(MANAGER).string_build(''' | |||||
class Browser(object): | |||||
def open(self, url, data=None, timeout=None): | |||||
return None | |||||
def open_novisit(self, url, data=None, timeout=None): | |||||
return None | |||||
def open_local_file(self, filename): | |||||
return None | |||||
''') | |||||
register_module_extender(MANAGER, 'mechanize', mechanize_transform) |
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
import sys | |||||
import astroid | |||||
from astroid import exceptions | |||||
PY34 = sys.version_info >= (3, 4) | |||||
def _multiprocessing_transform(): | |||||
module = astroid.parse(''' | |||||
from multiprocessing.managers import SyncManager | |||||
def Manager(): | |||||
return SyncManager() | |||||
''') | |||||
if not PY34: | |||||
return module | |||||
# On Python 3.4, multiprocessing uses a getattr lookup inside contexts, | |||||
# in order to get the attributes they need. Since it's extremely | |||||
# dynamic, we use this approach to fake it. | |||||
node = astroid.parse(''' | |||||
from multiprocessing.context import DefaultContext, BaseContext | |||||
default = DefaultContext() | |||||
base = BaseContext() | |||||
''') | |||||
try: | |||||
context = next(node['default'].infer()) | |||||
base = next(node['base'].infer()) | |||||
except exceptions.InferenceError: | |||||
return module | |||||
for node in (context, base): | |||||
for key, value in node.locals.items(): | |||||
if key.startswith("_"): | |||||
continue | |||||
value = value[0] | |||||
if isinstance(value, astroid.FunctionDef): | |||||
# We need to rebound this, since otherwise | |||||
# it will have an extra argument (self). | |||||
value = astroid.BoundMethod(value, node) | |||||
module[key] = value | |||||
return module | |||||
def _multiprocessing_managers_transform(): | |||||
return astroid.parse(''' | |||||
import array | |||||
import threading | |||||
import multiprocessing.pool as pool | |||||
import six | |||||
class Namespace(object): | |||||
pass | |||||
class Value(object): | |||||
def __init__(self, typecode, value, lock=True): | |||||
self._typecode = typecode | |||||
self._value = value | |||||
def get(self): | |||||
return self._value | |||||
def set(self, value): | |||||
self._value = value | |||||
def __repr__(self): | |||||
return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value) | |||||
value = property(get, set) | |||||
def Array(typecode, sequence, lock=True): | |||||
return array.array(typecode, sequence) | |||||
class SyncManager(object): | |||||
Queue = JoinableQueue = six.moves.queue.Queue | |||||
Event = threading.Event | |||||
RLock = threading.RLock | |||||
BoundedSemaphore = threading.BoundedSemaphore | |||||
Condition = threading.Condition | |||||
Barrier = threading.Barrier | |||||
Pool = pool.Pool | |||||
list = list | |||||
dict = dict | |||||
Value = Value | |||||
Array = Array | |||||
Namespace = Namespace | |||||
__enter__ = lambda self: self | |||||
__exit__ = lambda *args: args | |||||
def start(self, initializer=None, initargs=None): | |||||
pass | |||||
def shutdown(self): | |||||
pass | |||||
''') | |||||
astroid.register_module_extender(astroid.MANAGER, 'multiprocessing.managers', | |||||
_multiprocessing_managers_transform) | |||||
astroid.register_module_extender(astroid.MANAGER, 'multiprocessing', | |||||
_multiprocessing_transform) |
# Copyright (c) 2012-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr> | |||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Astroid hooks for the Python standard library.""" | |||||
import functools | |||||
import sys | |||||
import keyword | |||||
from textwrap import dedent | |||||
from astroid import ( | |||||
MANAGER, UseInferenceDefault, inference_tip, | |||||
InferenceError) | |||||
from astroid import arguments | |||||
from astroid import exceptions | |||||
from astroid import nodes | |||||
from astroid.builder import AstroidBuilder, extract_node | |||||
from astroid import util | |||||
def _infer_first(node, context): | |||||
if node is util.Uninferable: | |||||
raise UseInferenceDefault | |||||
try: | |||||
value = next(node.infer(context=context)) | |||||
if value is util.Uninferable: | |||||
raise UseInferenceDefault() | |||||
else: | |||||
return value | |||||
except StopIteration: | |||||
raise InferenceError() | |||||
def _find_func_form_arguments(node, context): | |||||
def _extract_namedtuple_arg_or_keyword(position, key_name=None): | |||||
if len(args) > position: | |||||
return _infer_first(args[position], context) | |||||
if key_name and key_name in found_keywords: | |||||
return _infer_first(found_keywords[key_name], context) | |||||
args = node.args | |||||
keywords = node.keywords | |||||
found_keywords = { | |||||
keyword.arg: keyword.value for keyword in keywords | |||||
} if keywords else {} | |||||
name = _extract_namedtuple_arg_or_keyword( | |||||
position=0, | |||||
key_name='typename' | |||||
) | |||||
names = _extract_namedtuple_arg_or_keyword( | |||||
position=1, | |||||
key_name='field_names' | |||||
) | |||||
if name and names: | |||||
return name.value, names | |||||
raise UseInferenceDefault() | |||||
def infer_func_form(node, base_type, context=None, enum=False): | |||||
"""Specific inference function for namedtuple or Python 3 enum. """ | |||||
# node is a Call node, class name as first argument and generated class | |||||
# attributes as second argument | |||||
# namedtuple or enums list of attributes can be a list of strings or a | |||||
# whitespace-separate string | |||||
try: | |||||
name, names = _find_func_form_arguments(node, context) | |||||
try: | |||||
attributes = names.value.replace(',', ' ').split() | |||||
except AttributeError: | |||||
if not enum: | |||||
attributes = [_infer_first(const, context).value | |||||
for const in names.elts] | |||||
else: | |||||
# Enums supports either iterator of (name, value) pairs | |||||
# or mappings. | |||||
# TODO: support only list, tuples and mappings. | |||||
if hasattr(names, 'items') and isinstance(names.items, list): | |||||
attributes = [_infer_first(const[0], context).value | |||||
for const in names.items | |||||
if isinstance(const[0], nodes.Const)] | |||||
elif hasattr(names, 'elts'): | |||||
# Enums can support either ["a", "b", "c"] | |||||
# or [("a", 1), ("b", 2), ...], but they can't | |||||
# be mixed. | |||||
if all(isinstance(const, nodes.Tuple) | |||||
for const in names.elts): | |||||
attributes = [_infer_first(const.elts[0], context).value | |||||
for const in names.elts | |||||
if isinstance(const, nodes.Tuple)] | |||||
else: | |||||
attributes = [_infer_first(const, context).value | |||||
for const in names.elts] | |||||
else: | |||||
raise AttributeError | |||||
if not attributes: | |||||
raise AttributeError | |||||
except (AttributeError, exceptions.InferenceError): | |||||
raise UseInferenceDefault() | |||||
# If we can't infer the name of the class, don't crash, up to this point | |||||
# we know it is a namedtuple anyway. | |||||
name = name or 'Uninferable' | |||||
# we want to return a Class node instance with proper attributes set | |||||
class_node = nodes.ClassDef(name, 'docstring') | |||||
class_node.parent = node.parent | |||||
# set base class=tuple | |||||
class_node.bases.append(base_type) | |||||
# XXX add __init__(*attributes) method | |||||
for attr in attributes: | |||||
fake_node = nodes.EmptyNode() | |||||
fake_node.parent = class_node | |||||
fake_node.attrname = attr | |||||
class_node.instance_attrs[attr] = [fake_node] | |||||
return class_node, name, attributes | |||||
def _looks_like(node, name): | |||||
func = node.func | |||||
if isinstance(func, nodes.Attribute): | |||||
return func.attrname == name | |||||
if isinstance(func, nodes.Name): | |||||
return func.name == name | |||||
return False | |||||
_looks_like_namedtuple = functools.partial(_looks_like, name='namedtuple') | |||||
_looks_like_enum = functools.partial(_looks_like, name='Enum') | |||||
def infer_named_tuple(node, context=None): | |||||
"""Specific inference function for namedtuple Call node""" | |||||
class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied, | |||||
context=context) | |||||
call_site = arguments.CallSite.from_call(node) | |||||
func = next(extract_node('import collections; collections.namedtuple').infer()) | |||||
try: | |||||
rename = next(call_site.infer_argument(func, 'rename', context)).bool_value() | |||||
except InferenceError: | |||||
rename = False | |||||
if rename: | |||||
attributes = _get_renamed_namedtuple_atributes(attributes) | |||||
replace_args = ', '.join( | |||||
'{arg}=None'.format(arg=arg) | |||||
for arg in attributes | |||||
) | |||||
field_def = (" {name} = property(lambda self: self[{index:d}], " | |||||
"doc='Alias for field number {index:d}')") | |||||
field_defs = '\n'.join(field_def.format(name=name, index=index) | |||||
for index, name in enumerate(attributes)) | |||||
fake = AstroidBuilder(MANAGER).string_build(''' | |||||
class %(name)s(tuple): | |||||
__slots__ = () | |||||
_fields = %(fields)r | |||||
def _asdict(self): | |||||
return self.__dict__ | |||||
@classmethod | |||||
def _make(cls, iterable, new=tuple.__new__, len=len): | |||||
return new(cls, iterable) | |||||
def _replace(self, %(replace_args)s): | |||||
return self | |||||
def __getnewargs__(self): | |||||
return tuple(self) | |||||
%(field_defs)s | |||||
''' % {'name': name, | |||||
'fields': attributes, | |||||
'field_defs': field_defs, | |||||
'replace_args': replace_args}) | |||||
class_node.locals['_asdict'] = fake.body[0].locals['_asdict'] | |||||
class_node.locals['_make'] = fake.body[0].locals['_make'] | |||||
class_node.locals['_replace'] = fake.body[0].locals['_replace'] | |||||
class_node.locals['_fields'] = fake.body[0].locals['_fields'] | |||||
for attr in attributes: | |||||
class_node.locals[attr] = fake.body[0].locals[attr] | |||||
# we use UseInferenceDefault, we can't be a generator so return an iterator | |||||
return iter([class_node]) | |||||
def _get_renamed_namedtuple_atributes(field_names): | |||||
names = list(field_names) | |||||
seen = set() | |||||
for i, name in enumerate(field_names): | |||||
if (not all(c.isalnum() or c == '_' for c in name) or keyword.iskeyword(name) | |||||
or not name or name[0].isdigit() or name.startswith('_') or name in seen): | |||||
names[i] = '_%d' % i | |||||
seen.add(name) | |||||
return tuple(names) | |||||
def infer_enum(node, context=None): | |||||
""" Specific inference function for enum Call node. """ | |||||
enum_meta = extract_node(''' | |||||
class EnumMeta(object): | |||||
'docstring' | |||||
def __call__(self, node): | |||||
class EnumAttribute(object): | |||||
name = '' | |||||
value = 0 | |||||
return EnumAttribute() | |||||
def __iter__(self): | |||||
class EnumAttribute(object): | |||||
name = '' | |||||
value = 0 | |||||
return [EnumAttribute()] | |||||
def __next__(self): | |||||
return next(iter(self)) | |||||
def __getitem__(self, attr): | |||||
class Value(object): | |||||
@property | |||||
def name(self): | |||||
return '' | |||||
@property | |||||
def value(self): | |||||
return attr | |||||
return Value() | |||||
__members__ = [''] | |||||
''') | |||||
class_node = infer_func_form(node, enum_meta, | |||||
context=context, enum=True)[0] | |||||
return iter([class_node.instantiate_class()]) | |||||
def infer_enum_class(node): | |||||
""" Specific inference for enums. """ | |||||
names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum')) | |||||
for basename in node.basenames: | |||||
# TODO: doesn't handle subclasses yet. This implementation | |||||
# is a hack to support enums. | |||||
if basename not in names: | |||||
continue | |||||
if node.root().name == 'enum': | |||||
# Skip if the class is directly from enum module. | |||||
break | |||||
for local, values in node.locals.items(): | |||||
if any(not isinstance(value, nodes.AssignName) | |||||
for value in values): | |||||
continue | |||||
stmt = values[0].statement() | |||||
if isinstance(stmt, nodes.Assign): | |||||
if isinstance(stmt.targets[0], nodes.Tuple): | |||||
targets = stmt.targets[0].itered() | |||||
else: | |||||
targets = stmt.targets | |||||
elif isinstance(stmt, nodes.AnnAssign): | |||||
targets = [stmt.target] | |||||
new_targets = [] | |||||
for target in targets: | |||||
# Replace all the assignments with our mocked class. | |||||
classdef = dedent(''' | |||||
class %(name)s(%(types)s): | |||||
@property | |||||
def value(self): | |||||
# Not the best return. | |||||
return None | |||||
@property | |||||
def name(self): | |||||
return %(name)r | |||||
''' % {'name': target.name, 'types': ', '.join(node.basenames)}) | |||||
fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name] | |||||
fake.parent = target.parent | |||||
for method in node.mymethods(): | |||||
fake.locals[method.name] = [method] | |||||
new_targets.append(fake.instantiate_class()) | |||||
node.locals[local] = new_targets | |||||
break | |||||
return node | |||||
MANAGER.register_transform(nodes.Call, inference_tip(infer_named_tuple), | |||||
_looks_like_namedtuple) | |||||
MANAGER.register_transform(nodes.Call, inference_tip(infer_enum), | |||||
_looks_like_enum) | |||||
MANAGER.register_transform(nodes.ClassDef, infer_enum_class) |
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Hooks for nose library.""" | |||||
import re | |||||
import textwrap | |||||
import astroid | |||||
import astroid.builder | |||||
_BUILDER = astroid.builder.AstroidBuilder(astroid.MANAGER) | |||||
def _pep8(name, caps=re.compile('([A-Z])')): | |||||
return caps.sub(lambda m: '_' + m.groups()[0].lower(), name) | |||||
def _nose_tools_functions(): | |||||
"""Get an iterator of names and bound methods.""" | |||||
module = _BUILDER.string_build(textwrap.dedent(''' | |||||
import unittest | |||||
class Test(unittest.TestCase): | |||||
pass | |||||
a = Test() | |||||
''')) | |||||
try: | |||||
case = next(module['a'].infer()) | |||||
except astroid.InferenceError: | |||||
return | |||||
for method in case.methods(): | |||||
if method.name.startswith('assert') and '_' not in method.name: | |||||
pep8_name = _pep8(method.name) | |||||
yield pep8_name, astroid.BoundMethod(method, case) | |||||
if method.name == 'assertEqual': | |||||
# nose also exports assert_equals. | |||||
yield 'assert_equals', astroid.BoundMethod(method, case) | |||||
def _nose_tools_transform(node): | |||||
for method_name, method in _nose_tools_functions(): | |||||
node.locals[method_name] = [method] | |||||
def _nose_tools_trivial_transform(): | |||||
"""Custom transform for the nose.tools module.""" | |||||
stub = _BUILDER.string_build('''__all__ = []''') | |||||
all_entries = ['ok_', 'eq_'] | |||||
for pep8_name, method in _nose_tools_functions(): | |||||
all_entries.append(pep8_name) | |||||
stub[pep8_name] = method | |||||
# Update the __all__ variable, since nose.tools | |||||
# does this manually with .append. | |||||
all_assign = stub['__all__'].parent | |||||
all_object = astroid.List(all_entries) | |||||
all_object.parent = all_assign | |||||
all_assign.value = all_object | |||||
return stub | |||||
astroid.register_module_extender(astroid.MANAGER, 'nose.tools.trivial', | |||||
_nose_tools_trivial_transform) | |||||
astroid.MANAGER.register_transform(astroid.Module, _nose_tools_transform, | |||||
lambda n: n.name == 'nose.tools') |
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Astroid hooks for numpy.""" | |||||
import astroid | |||||
def numpy_random_mtrand_transform(): | |||||
return astroid.parse(''' | |||||
def beta(a, b, size=None): pass | |||||
def binomial(n, p, size=None): pass | |||||
def bytes(length): pass | |||||
def chisquare(df, size=None): pass | |||||
def choice(a, size=None, replace=True, p=None): pass | |||||
def dirichlet(alpha, size=None): pass | |||||
def exponential(scale=1.0, size=None): pass | |||||
def f(dfnum, dfden, size=None): pass | |||||
def gamma(shape, scale=1.0, size=None): pass | |||||
def geometric(p, size=None): pass | |||||
def get_state(): pass | |||||
def gumbel(loc=0.0, scale=1.0, size=None): pass | |||||
def hypergeometric(ngood, nbad, nsample, size=None): pass | |||||
def laplace(loc=0.0, scale=1.0, size=None): pass | |||||
def logistic(loc=0.0, scale=1.0, size=None): pass | |||||
def lognormal(mean=0.0, sigma=1.0, size=None): pass | |||||
def logseries(p, size=None): pass | |||||
def multinomial(n, pvals, size=None): pass | |||||
def multivariate_normal(mean, cov, size=None): pass | |||||
def negative_binomial(n, p, size=None): pass | |||||
def noncentral_chisquare(df, nonc, size=None): pass | |||||
def noncentral_f(dfnum, dfden, nonc, size=None): pass | |||||
def normal(loc=0.0, scale=1.0, size=None): pass | |||||
def pareto(a, size=None): pass | |||||
def permutation(x): pass | |||||
def poisson(lam=1.0, size=None): pass | |||||
def power(a, size=None): pass | |||||
def rand(*args): pass | |||||
def randint(low, high=None, size=None, dtype='l'): pass | |||||
def randn(*args): pass | |||||
def random_integers(low, high=None, size=None): pass | |||||
def random_sample(size=None): pass | |||||
def rayleigh(scale=1.0, size=None): pass | |||||
def seed(seed=None): pass | |||||
def set_state(state): pass | |||||
def shuffle(x): pass | |||||
def standard_cauchy(size=None): pass | |||||
def standard_exponential(size=None): pass | |||||
def standard_gamma(shape, size=None): pass | |||||
def standard_normal(size=None): pass | |||||
def standard_t(df, size=None): pass | |||||
def triangular(left, mode, right, size=None): pass | |||||
def uniform(low=0.0, high=1.0, size=None): pass | |||||
def vonmises(mu, kappa, size=None): pass | |||||
def wald(mean, scale, size=None): pass | |||||
def weibull(a, size=None): pass | |||||
def zipf(a, size=None): pass | |||||
''') | |||||
def numpy_core_umath_transform(): | |||||
ufunc_optional_keyword_arguments = ("""out=None, where=True, casting='same_kind', order='K', """ | |||||
"""dtype=None, subok=True""") | |||||
return astroid.parse(''' | |||||
# Constants | |||||
e = 2.718281828459045 | |||||
euler_gamma = 0.5772156649015329 | |||||
# No arg functions | |||||
def geterrobj(): pass | |||||
# One arg functions | |||||
def seterrobj(errobj): pass | |||||
# One arg functions with optional kwargs | |||||
def arccos(x, {opt_args:s}): pass | |||||
def arccosh(x, {opt_args:s}): pass | |||||
def arcsin(x, {opt_args:s}): pass | |||||
def arcsinh(x, {opt_args:s}): pass | |||||
def arctan(x, {opt_args:s}): pass | |||||
def arctanh(x, {opt_args:s}): pass | |||||
def cbrt(x, {opt_args:s}): pass | |||||
def conj(x, {opt_args:s}): pass | |||||
def conjugate(x, {opt_args:s}): pass | |||||
def cosh(x, {opt_args:s}): pass | |||||
def deg2rad(x, {opt_args:s}): pass | |||||
def degrees(x, {opt_args:s}): pass | |||||
def exp2(x, {opt_args:s}): pass | |||||
def expm1(x, {opt_args:s}): pass | |||||
def fabs(x, {opt_args:s}): pass | |||||
def frexp(x, {opt_args:s}): pass | |||||
def isfinite(x, {opt_args:s}): pass | |||||
def isinf(x, {opt_args:s}): pass | |||||
def log(x, {opt_args:s}): pass | |||||
def log1p(x, {opt_args:s}): pass | |||||
def log2(x, {opt_args:s}): pass | |||||
def logical_not(x, {opt_args:s}): pass | |||||
def modf(x, {opt_args:s}): pass | |||||
def negative(x, {opt_args:s}): pass | |||||
def rad2deg(x, {opt_args:s}): pass | |||||
def radians(x, {opt_args:s}): pass | |||||
def reciprocal(x, {opt_args:s}): pass | |||||
def rint(x, {opt_args:s}): pass | |||||
def sign(x, {opt_args:s}): pass | |||||
def signbit(x, {opt_args:s}): pass | |||||
def sinh(x, {opt_args:s}): pass | |||||
def spacing(x, {opt_args:s}): pass | |||||
def square(x, {opt_args:s}): pass | |||||
def tan(x, {opt_args:s}): pass | |||||
def tanh(x, {opt_args:s}): pass | |||||
def trunc(x, {opt_args:s}): pass | |||||
# Two args functions with optional kwargs | |||||
def bitwise_and(x1, x2, {opt_args:s}): pass | |||||
def bitwise_or(x1, x2, {opt_args:s}): pass | |||||
def bitwise_xor(x1, x2, {opt_args:s}): pass | |||||
def copysign(x1, x2, {opt_args:s}): pass | |||||
def divide(x1, x2, {opt_args:s}): pass | |||||
def equal(x1, x2, {opt_args:s}): pass | |||||
def float_power(x1, x2, {opt_args:s}): pass | |||||
def floor_divide(x1, x2, {opt_args:s}): pass | |||||
def fmax(x1, x2, {opt_args:s}): pass | |||||
def fmin(x1, x2, {opt_args:s}): pass | |||||
def fmod(x1, x2, {opt_args:s}): pass | |||||
def greater(x1, x2, {opt_args:s}): pass | |||||
def hypot(x1, x2, {opt_args:s}): pass | |||||
def ldexp(x1, x2, {opt_args:s}): pass | |||||
def left_shift(x1, x2, {opt_args:s}): pass | |||||
def less(x1, x2, {opt_args:s}): pass | |||||
def logaddexp(x1, x2, {opt_args:s}): pass | |||||
def logaddexp2(x1, x2, {opt_args:s}): pass | |||||
def logical_and(x1, x2, {opt_args:s}): pass | |||||
def logical_or(x1, x2, {opt_args:s}): pass | |||||
def logical_xor(x1, x2, {opt_args:s}): pass | |||||
def maximum(x1, x2, {opt_args:s}): pass | |||||
def minimum(x1, x2, {opt_args:s}): pass | |||||
def nextafter(x1, x2, {opt_args:s}): pass | |||||
def not_equal(x1, x2, {opt_args:s}): pass | |||||
def power(x1, x2, {opt_args:s}): pass | |||||
def remainder(x1, x2, {opt_args:s}): pass | |||||
def right_shift(x1, x2, {opt_args:s}): pass | |||||
def subtract(x1, x2, {opt_args:s}): pass | |||||
def true_divide(x1, x2, {opt_args:s}): pass | |||||
'''.format(opt_args=ufunc_optional_keyword_arguments)) | |||||
def numpy_core_numerictypes_transform(): | |||||
return astroid.parse(''' | |||||
# different types defined in numerictypes.py | |||||
uint16 = type('uint16') | |||||
uint32 = type('uint32') | |||||
uint64 = type('uint64') | |||||
int128 = type('int128') | |||||
uint128 = type('uint128') | |||||
float16 = type('float16') | |||||
float32 = type('float32') | |||||
float64 = type('float64') | |||||
float80 = type('float80') | |||||
float96 = type('float96') | |||||
float128 = type('float128') | |||||
float256 = type('float256') | |||||
complex32 = type('complex32') | |||||
complex64 = type('complex64') | |||||
complex128 = type('complex128') | |||||
complex160 = type('complex160') | |||||
complex192 = type('complex192') | |||||
complex256 = type('complex256') | |||||
complex512 = type('complex512') | |||||
timedelta64 = type('timedelta64') | |||||
datetime64 = type('datetime64') | |||||
unicode_ = type('unicode_') | |||||
string_ = type('string_') | |||||
object_ = type('object_') | |||||
''') | |||||
def numpy_funcs(): | |||||
return astroid.parse(''' | |||||
import builtins | |||||
def sum(a, axis=None, dtype=None, out=None, keepdims=None): | |||||
return builtins.sum(a) | |||||
''') | |||||
astroid.register_module_extender(astroid.MANAGER, 'numpy.core.umath', numpy_core_umath_transform) | |||||
astroid.register_module_extender(astroid.MANAGER, 'numpy.random.mtrand', | |||||
numpy_random_mtrand_transform) | |||||
astroid.register_module_extender(astroid.MANAGER, 'numpy.core.numerictypes', | |||||
numpy_core_numerictypes_transform) | |||||
astroid.register_module_extender(astroid.MANAGER, 'numpy', numpy_funcs) |
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
import astroid | |||||
from astroid import parse | |||||
from astroid import inference_tip | |||||
from astroid import register_module_extender | |||||
from astroid import MANAGER | |||||
def pkg_resources_transform(): | |||||
return parse(''' | |||||
def require(*requirements): | |||||
return pkg_resources.working_set.require(*requirements) | |||||
def run_script(requires, script_name): | |||||
return pkg_resources.working_set.run_script(requires, script_name) | |||||
def iter_entry_points(group, name=None): | |||||
return pkg_resources.working_set.iter_entry_points(group, name) | |||||
def resource_exists(package_or_requirement, resource_name): | |||||
return get_provider(package_or_requirement).has_resource(resource_name) | |||||
def resource_isdir(package_or_requirement, resource_name): | |||||
return get_provider(package_or_requirement).resource_isdir( | |||||
resource_name) | |||||
def resource_filename(package_or_requirement, resource_name): | |||||
return get_provider(package_or_requirement).get_resource_filename( | |||||
self, resource_name) | |||||
def resource_stream(package_or_requirement, resource_name): | |||||
return get_provider(package_or_requirement).get_resource_stream( | |||||
self, resource_name) | |||||
def resource_string(package_or_requirement, resource_name): | |||||
return get_provider(package_or_requirement).get_resource_string( | |||||
self, resource_name) | |||||
def resource_listdir(package_or_requirement, resource_name): | |||||
return get_provider(package_or_requirement).resource_listdir( | |||||
resource_name) | |||||
def extraction_error(): | |||||
pass | |||||
def get_cache_path(archive_name, names=()): | |||||
extract_path = self.extraction_path or get_default_cache() | |||||
target_path = os.path.join(extract_path, archive_name+'-tmp', *names) | |||||
return target_path | |||||
def postprocess(tempname, filename): | |||||
pass | |||||
def set_extraction_path(path): | |||||
pass | |||||
def cleanup_resources(force=False): | |||||
pass | |||||
def get_distribution(dist): | |||||
return Distribution(dist) | |||||
''') | |||||
register_module_extender(MANAGER, 'pkg_resources', pkg_resources_transform) |
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Copyright (c) 2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Astroid hooks for pytest.""" | |||||
from __future__ import absolute_import | |||||
from astroid import MANAGER, register_module_extender | |||||
from astroid.builder import AstroidBuilder | |||||
def pytest_transform(): | |||||
return AstroidBuilder(MANAGER).string_build(''' | |||||
try: | |||||
import _pytest.mark | |||||
import _pytest.recwarn | |||||
import _pytest.runner | |||||
import _pytest.python | |||||
import _pytest.skipping | |||||
import _pytest.assertion | |||||
except ImportError: | |||||
pass | |||||
else: | |||||
deprecated_call = _pytest.recwarn.deprecated_call | |||||
warns = _pytest.recwarn.warns | |||||
exit = _pytest.runner.exit | |||||
fail = _pytest.runner.fail | |||||
skip = _pytest.runner.skip | |||||
importorskip = _pytest.runner.importorskip | |||||
xfail = _pytest.skipping.xfail | |||||
mark = _pytest.mark.MarkGenerator() | |||||
raises = _pytest.python.raises | |||||
# New in pytest 3.0 | |||||
try: | |||||
approx = _pytest.python.approx | |||||
register_assert_rewrite = _pytest.assertion.register_assert_rewrite | |||||
except AttributeError: | |||||
pass | |||||
# Moved in pytest 3.0 | |||||
try: | |||||
import _pytest.freeze_support | |||||
freeze_includes = _pytest.freeze_support.freeze_includes | |||||
except ImportError: | |||||
try: | |||||
import _pytest.genscript | |||||
freeze_includes = _pytest.genscript.freeze_includes | |||||
except ImportError: | |||||
pass | |||||
try: | |||||
import _pytest.debugging | |||||
set_trace = _pytest.debugging.pytestPDB().set_trace | |||||
except ImportError: | |||||
try: | |||||
import _pytest.pdb | |||||
set_trace = _pytest.pdb.pytestPDB().set_trace | |||||
except ImportError: | |||||
pass | |||||
try: | |||||
import _pytest.fixtures | |||||
fixture = _pytest.fixtures.fixture | |||||
yield_fixture = _pytest.fixtures.yield_fixture | |||||
except ImportError: | |||||
try: | |||||
import _pytest.python | |||||
fixture = _pytest.python.fixture | |||||
yield_fixture = _pytest.python.yield_fixture | |||||
except ImportError: | |||||
pass | |||||
''') | |||||
register_module_extender(MANAGER, 'pytest', pytest_transform) | |||||
register_module_extender(MANAGER, 'py.test', pytest_transform) |
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Astroid hooks for the PyQT library.""" | |||||
from astroid import MANAGER, register_module_extender | |||||
from astroid.builder import AstroidBuilder | |||||
from astroid import nodes | |||||
from astroid import parse | |||||
def _looks_like_signal(node, signal_name='pyqtSignal'): | |||||
if '__class__' in node.instance_attrs: | |||||
try: | |||||
cls = node.instance_attrs['__class__'][0] | |||||
return cls.name == signal_name | |||||
except AttributeError: | |||||
# return False if the cls does not have a name attribute | |||||
pass | |||||
return False | |||||
def transform_pyqt_signal(node): | |||||
module = parse(''' | |||||
class pyqtSignal(object): | |||||
def connect(self, slot, type=None, no_receiver_check=False): | |||||
pass | |||||
def disconnect(self, slot): | |||||
pass | |||||
def emit(self, *args): | |||||
pass | |||||
''') | |||||
signal_cls = module['pyqtSignal'] | |||||
node.instance_attrs['emit'] = signal_cls['emit'] | |||||
node.instance_attrs['disconnect'] = signal_cls['disconnect'] | |||||
node.instance_attrs['connect'] = signal_cls['connect'] | |||||
def pyqt4_qtcore_transform(): | |||||
return AstroidBuilder(MANAGER).string_build(''' | |||||
def SIGNAL(signal_name): pass | |||||
class QObject(object): | |||||
def emit(self, signal): pass | |||||
''') | |||||
register_module_extender(MANAGER, 'PyQt4.QtCore', pyqt4_qtcore_transform) | |||||
MANAGER.register_transform(nodes.FunctionDef, transform_pyqt_signal, | |||||
_looks_like_signal) |
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
import random | |||||
import astroid | |||||
from astroid import helpers | |||||
from astroid import MANAGER | |||||
ACCEPTED_ITERABLES_FOR_SAMPLE = ( | |||||
astroid.List, | |||||
astroid.Set, | |||||
astroid.Tuple, | |||||
) | |||||
def _clone_node_with_lineno(node, parent, lineno): | |||||
cls = node.__class__ | |||||
other_fields = node._other_fields | |||||
_astroid_fields = node._astroid_fields | |||||
init_params = { | |||||
'lineno': lineno, | |||||
'col_offset': node.col_offset, | |||||
'parent': parent | |||||
} | |||||
postinit_params = { | |||||
param: getattr(node, param) | |||||
for param in _astroid_fields | |||||
} | |||||
if other_fields: | |||||
init_params.update({ | |||||
param: getattr(node, param) | |||||
for param in other_fields | |||||
}) | |||||
new_node = cls(**init_params) | |||||
if hasattr(node, 'postinit') and _astroid_fields: | |||||
new_node.postinit(**postinit_params) | |||||
return new_node | |||||
def infer_random_sample(node, context=None): | |||||
if len(node.args) != 2: | |||||
raise astroid.UseInferenceDefault | |||||
length = node.args[1] | |||||
if not isinstance(length, astroid.Const): | |||||
raise astroid.UseInferenceDefault | |||||
if not isinstance(length.value, int): | |||||
raise astroid.UseInferenceDefault | |||||
inferred_sequence = helpers.safe_infer(node.args[0], context=context) | |||||
if inferred_sequence in (None, astroid.Uninferable): | |||||
raise astroid.UseInferenceDefault | |||||
# TODO: might need to support more cases | |||||
if not isinstance(inferred_sequence, ACCEPTED_ITERABLES_FOR_SAMPLE): | |||||
raise astroid.UseInferenceDefault | |||||
if length.value > len(inferred_sequence.elts): | |||||
# In this case, this will raise a ValueError | |||||
raise astroid.UseInferenceDefault | |||||
try: | |||||
elts = random.sample(inferred_sequence.elts, length.value) | |||||
except ValueError: | |||||
raise astroid.UseInferenceDefault | |||||
new_node = astroid.List( | |||||
lineno=node.lineno, | |||||
col_offset=node.col_offset, | |||||
parent=node.scope(), | |||||
) | |||||
new_elts = [ | |||||
_clone_node_with_lineno( | |||||
elt, | |||||
parent=new_node, | |||||
lineno=new_node.lineno | |||||
) | |||||
for elt in elts | |||||
] | |||||
new_node.postinit(new_elts) | |||||
return iter((new_node, )) | |||||
def _looks_like_random_sample(node): | |||||
func = node.func | |||||
if isinstance(func, astroid.Attribute): | |||||
return func.attrname == 'sample' | |||||
if isinstance(func, astroid.Name): | |||||
return func.name == 'sample' | |||||
return False | |||||
MANAGER.register_transform( | |||||
astroid.Call, | |||||
astroid.inference_tip(infer_random_sample), | |||||
_looks_like_random_sample, | |||||
) |
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
import sys | |||||
import astroid | |||||
PY36 = sys.version_info >= (3, 6) | |||||
if PY36: | |||||
# Since Python 3.6 there is the RegexFlag enum | |||||
# where every entry will be exposed via updating globals() | |||||
def _re_transform(): | |||||
return astroid.parse(''' | |||||
import sre_compile | |||||
ASCII = sre_compile.SRE_FLAG_ASCII | |||||
IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE | |||||
LOCALE = sre_compile.SRE_FLAG_LOCALE | |||||
UNICODE = sre_compile.SRE_FLAG_UNICODE | |||||
MULTILINE = sre_compile.SRE_FLAG_MULTILINE | |||||
DOTALL = sre_compile.SRE_FLAG_DOTALL | |||||
VERBOSE = sre_compile.SRE_FLAG_VERBOSE | |||||
A = ASCII | |||||
I = IGNORECASE | |||||
L = LOCALE | |||||
U = UNICODE | |||||
M = MULTILINE | |||||
S = DOTALL | |||||
X = VERBOSE | |||||
TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE | |||||
T = TEMPLATE | |||||
DEBUG = sre_compile.SRE_FLAG_DEBUG | |||||
''') | |||||
astroid.register_module_extender(astroid.MANAGER, 're', _re_transform) |
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Astroid hooks for six module.""" | |||||
import sys | |||||
from textwrap import dedent | |||||
from astroid import MANAGER, register_module_extender | |||||
from astroid.builder import AstroidBuilder | |||||
from astroid.exceptions import AstroidBuildingError, InferenceError, AttributeInferenceError | |||||
from astroid import nodes | |||||
SIX_ADD_METACLASS = 'six.add_metaclass' | |||||
def _indent(text, prefix, predicate=None): | |||||
"""Adds 'prefix' to the beginning of selected lines in 'text'. | |||||
If 'predicate' is provided, 'prefix' will only be added to the lines | |||||
where 'predicate(line)' is True. If 'predicate' is not provided, | |||||
it will default to adding 'prefix' to all non-empty lines that do not | |||||
consist solely of whitespace characters. | |||||
""" | |||||
if predicate is None: | |||||
predicate = lambda line: line.strip() | |||||
def prefixed_lines(): | |||||
for line in text.splitlines(True): | |||||
yield prefix + line if predicate(line) else line | |||||
return ''.join(prefixed_lines()) | |||||
if sys.version_info[0] == 2: | |||||
_IMPORTS_2 = """ | |||||
import BaseHTTPServer | |||||
import CGIHTTPServer | |||||
import SimpleHTTPServer | |||||
from StringIO import StringIO | |||||
from cStringIO import StringIO as cStringIO | |||||
from UserDict import UserDict | |||||
from UserList import UserList | |||||
from UserString import UserString | |||||
import __builtin__ as builtins | |||||
import thread as _thread | |||||
import dummy_thread as _dummy_thread | |||||
import ConfigParser as configparser | |||||
import copy_reg as copyreg | |||||
from itertools import (imap as map, | |||||
ifilter as filter, | |||||
ifilterfalse as filterfalse, | |||||
izip_longest as zip_longest, | |||||
izip as zip) | |||||
import htmlentitydefs as html_entities | |||||
import HTMLParser as html_parser | |||||
import httplib as http_client | |||||
import cookielib as http_cookiejar | |||||
import Cookie as http_cookies | |||||
import Queue as queue | |||||
import repr as reprlib | |||||
from pipes import quote as shlex_quote | |||||
import SocketServer as socketserver | |||||
import SimpleXMLRPCServer as xmlrpc_server | |||||
import xmlrpclib as xmlrpc_client | |||||
import _winreg as winreg | |||||
import robotparser as urllib_robotparser | |||||
import Tkinter as tkinter | |||||
import tkFileDialog as tkinter_tkfiledialog | |||||
input = raw_input | |||||
intern = intern | |||||
range = xrange | |||||
xrange = xrange | |||||
reduce = reduce | |||||
reload_module = reload | |||||
class UrllibParse(object): | |||||
def __init__(self): | |||||
import urlparse as _urlparse | |||||
import urllib as _urllib | |||||
self.ParseResult = _urlparse.ParseResult | |||||
self.SplitResult = _urlparse.SplitResult | |||||
self.parse_qs = _urlparse.parse_qs | |||||
self.parse_qsl = _urlparse.parse_qsl | |||||
self.urldefrag = _urlparse.urldefrag | |||||
self.urljoin = _urlparse.urljoin | |||||
self.urlparse = _urlparse.urlparse | |||||
self.urlsplit = _urlparse.urlsplit | |||||
self.urlunparse = _urlparse.urlunparse | |||||
self.urlunsplit = _urlparse.urlunsplit | |||||
self.quote = _urllib.quote | |||||
self.quote_plus = _urllib.quote_plus | |||||
self.unquote = _urllib.unquote | |||||
self.unquote_plus = _urllib.unquote_plus | |||||
self.urlencode = _urllib.urlencode | |||||
self.splitquery = _urllib.splitquery | |||||
self.splittag = _urllib.splittag | |||||
self.splituser = _urllib.splituser | |||||
self.uses_fragment = _urlparse.uses_fragment | |||||
self.uses_netloc = _urlparse.uses_netloc | |||||
self.uses_params = _urlparse.uses_params | |||||
self.uses_query = _urlparse.uses_query | |||||
self.uses_relative = _urlparse.uses_relative | |||||
class UrllibError(object): | |||||
import urllib2 as _urllib2 | |||||
import urllib as _urllib | |||||
URLError = _urllib2.URLError | |||||
HTTPError = _urllib2.HTTPError | |||||
ContentTooShortError = _urllib.ContentTooShortError | |||||
class DummyModule(object): | |||||
pass | |||||
class UrllibRequest(object): | |||||
def __init__(self): | |||||
import urlparse as _urlparse | |||||
import urllib2 as _urllib2 | |||||
import urllib as _urllib | |||||
self.urlopen = _urllib2.urlopen | |||||
self.install_opener = _urllib2.install_opener | |||||
self.build_opener = _urllib2.build_opener | |||||
self.pathname2url = _urllib.pathname2url | |||||
self.url2pathname = _urllib.url2pathname | |||||
self.getproxies = _urllib.getproxies | |||||
self.Request = _urllib2.Request | |||||
self.OpenerDirector = _urllib2.OpenerDirector | |||||
self.HTTPDefaultErrorHandler = _urllib2.HTTPDefaultErrorHandler | |||||
self.HTTPRedirectHandler = _urllib2.HTTPRedirectHandler | |||||
self.HTTPCookieProcessor = _urllib2.HTTPCookieProcessor | |||||
self.ProxyHandler = _urllib2.ProxyHandler | |||||
self.BaseHandler = _urllib2.BaseHandler | |||||
self.HTTPPasswordMgr = _urllib2.HTTPPasswordMgr | |||||
self.HTTPPasswordMgrWithDefaultRealm = _urllib2.HTTPPasswordMgrWithDefaultRealm | |||||
self.AbstractBasicAuthHandler = _urllib2.AbstractBasicAuthHandler | |||||
self.HTTPBasicAuthHandler = _urllib2.HTTPBasicAuthHandler | |||||
self.ProxyBasicAuthHandler = _urllib2.ProxyBasicAuthHandler | |||||
self.AbstractDigestAuthHandler = _urllib2.AbstractDigestAuthHandler | |||||
self.HTTPDigestAuthHandler = _urllib2.HTTPDigestAuthHandler | |||||
self.ProxyDigestAuthHandler = _urllib2.ProxyDigestAuthHandler | |||||
self.HTTPHandler = _urllib2.HTTPHandler | |||||
self.HTTPSHandler = _urllib2.HTTPSHandler | |||||
self.FileHandler = _urllib2.FileHandler | |||||
self.FTPHandler = _urllib2.FTPHandler | |||||
self.CacheFTPHandler = _urllib2.CacheFTPHandler | |||||
self.UnknownHandler = _urllib2.UnknownHandler | |||||
self.HTTPErrorProcessor = _urllib2.HTTPErrorProcessor | |||||
self.urlretrieve = _urllib.urlretrieve | |||||
self.urlcleanup = _urllib.urlcleanup | |||||
self.proxy_bypass = _urllib.proxy_bypass | |||||
urllib_parse = UrllibParse() | |||||
urllib_error = UrllibError() | |||||
urllib = DummyModule() | |||||
urllib.request = UrllibRequest() | |||||
urllib.parse = UrllibParse() | |||||
urllib.error = UrllibError() | |||||
""" | |||||
else: | |||||
_IMPORTS_3 = """ | |||||
import _io | |||||
cStringIO = _io.StringIO | |||||
filter = filter | |||||
from itertools import filterfalse | |||||
input = input | |||||
from sys import intern | |||||
map = map | |||||
range = range | |||||
from imp import reload as reload_module | |||||
from functools import reduce | |||||
from shlex import quote as shlex_quote | |||||
from io import StringIO | |||||
from collections import UserDict, UserList, UserString | |||||
xrange = range | |||||
zip = zip | |||||
from itertools import zip_longest | |||||
import builtins | |||||
import configparser | |||||
import copyreg | |||||
import _dummy_thread | |||||
import http.cookiejar as http_cookiejar | |||||
import http.cookies as http_cookies | |||||
import html.entities as html_entities | |||||
import html.parser as html_parser | |||||
import http.client as http_client | |||||
import http.server as http_server | |||||
BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server | |||||
import pickle as cPickle | |||||
import queue | |||||
import reprlib | |||||
import socketserver | |||||
import _thread | |||||
import winreg | |||||
import xmlrpc.server as xmlrpc_server | |||||
import xmlrpc.client as xmlrpc_client | |||||
import urllib.robotparser as urllib_robotparser | |||||
import email.mime.multipart as email_mime_multipart | |||||
import email.mime.nonmultipart as email_mime_nonmultipart | |||||
import email.mime.text as email_mime_text | |||||
import email.mime.base as email_mime_base | |||||
import urllib.parse as urllib_parse | |||||
import urllib.error as urllib_error | |||||
import tkinter | |||||
import tkinter.dialog as tkinter_dialog | |||||
import tkinter.filedialog as tkinter_filedialog | |||||
import tkinter.scrolledtext as tkinter_scrolledtext | |||||
import tkinter.simpledialog as tkinder_simpledialog | |||||
import tkinter.tix as tkinter_tix | |||||
import tkinter.ttk as tkinter_ttk | |||||
import tkinter.constants as tkinter_constants | |||||
import tkinter.dnd as tkinter_dnd | |||||
import tkinter.colorchooser as tkinter_colorchooser | |||||
import tkinter.commondialog as tkinter_commondialog | |||||
import tkinter.filedialog as tkinter_tkfiledialog | |||||
import tkinter.font as tkinter_font | |||||
import tkinter.messagebox as tkinter_messagebox | |||||
import urllib | |||||
import urllib.request as urllib_request | |||||
import urllib.robotparser as urllib_robotparser | |||||
import urllib.parse as urllib_parse | |||||
import urllib.error as urllib_error | |||||
""" | |||||
if sys.version_info[0] == 2: | |||||
_IMPORTS = dedent(_IMPORTS_2) | |||||
else: | |||||
_IMPORTS = dedent(_IMPORTS_3) | |||||
def six_moves_transform(): | |||||
code = dedent(''' | |||||
class Moves(object): | |||||
{} | |||||
moves = Moves() | |||||
''').format(_indent(_IMPORTS, " ")) | |||||
module = AstroidBuilder(MANAGER).string_build(code) | |||||
module.name = 'six.moves' | |||||
return module | |||||
def _six_fail_hook(modname): | |||||
"""Fix six.moves imports due to the dynamic nature of this | |||||
class. | |||||
Construct a psuedo-module which contains all the nessecary imports | |||||
for six | |||||
:param modname: Name of failed module | |||||
:type modname: str | |||||
:return: An astroid module | |||||
:rtype: nodes.Module | |||||
""" | |||||
attribute_of = (modname != "six.moves" and | |||||
modname.startswith("six.moves")) | |||||
if modname != 'six.moves' and not attribute_of: | |||||
raise AstroidBuildingError(modname=modname) | |||||
module = AstroidBuilder(MANAGER).string_build(_IMPORTS) | |||||
module.name = 'six.moves' | |||||
if attribute_of: | |||||
# Facilitate import of submodules in Moves | |||||
start_index = len(module.name) | |||||
attribute = modname[start_index:].lstrip(".").replace(".", "_") | |||||
try: | |||||
import_attr = module.getattr(attribute)[0] | |||||
except AttributeInferenceError: | |||||
raise AstroidBuildingError(modname=modname) | |||||
if isinstance(import_attr, nodes.Import): | |||||
submodule = MANAGER.ast_from_module_name(import_attr.names[0][0]) | |||||
return submodule | |||||
# Let dummy submodule imports pass through | |||||
# This will cause an Uninferable result, which is okay | |||||
return module | |||||
def transform_six_add_metaclass(node): | |||||
"""Check if the given class node is decorated with *six.add_metaclass* | |||||
If so, inject its argument as the metaclass of the underlying class. | |||||
""" | |||||
if not node.decorators: | |||||
return | |||||
for decorator in node.decorators.nodes: | |||||
if not isinstance(decorator, nodes.Call): | |||||
continue | |||||
try: | |||||
func = next(decorator.func.infer()) | |||||
except InferenceError: | |||||
continue | |||||
if func.qname() == SIX_ADD_METACLASS and decorator.args: | |||||
metaclass = decorator.args[0] | |||||
node._metaclass = metaclass | |||||
return node | |||||
register_module_extender(MANAGER, 'six', six_moves_transform) | |||||
register_module_extender(MANAGER, 'requests.packages.urllib3.packages.six', | |||||
six_moves_transform) | |||||
MANAGER.register_failed_import_hook(_six_fail_hook) | |||||
MANAGER.register_transform(nodes.ClassDef, transform_six_add_metaclass) |
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Astroid hooks for the ssl library.""" | |||||
from astroid import MANAGER, register_module_extender | |||||
from astroid.builder import AstroidBuilder | |||||
from astroid import nodes | |||||
from astroid import parse | |||||
def ssl_transform(): | |||||
return parse(''' | |||||
from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION | |||||
from _ssl import _SSLContext, MemoryBIO | |||||
from _ssl import ( | |||||
SSLError, SSLZeroReturnError, SSLWantReadError, SSLWantWriteError, | |||||
SSLSyscallError, SSLEOFError, | |||||
) | |||||
from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED | |||||
from _ssl import txt2obj as _txt2obj, nid2obj as _nid2obj | |||||
from _ssl import RAND_status, RAND_add, RAND_bytes, RAND_pseudo_bytes | |||||
try: | |||||
from _ssl import RAND_egd | |||||
except ImportError: | |||||
# LibreSSL does not provide RAND_egd | |||||
pass | |||||
from _ssl import (OP_ALL, OP_CIPHER_SERVER_PREFERENCE, | |||||
OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3, | |||||
OP_NO_TLSv1, OP_NO_TLSv1_1, OP_NO_TLSv1_2, | |||||
OP_SINGLE_DH_USE, OP_SINGLE_ECDH_USE) | |||||
from _ssl import (ALERT_DESCRIPTION_ACCESS_DENIED, ALERT_DESCRIPTION_BAD_CERTIFICATE, | |||||
ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE, | |||||
ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE, | |||||
ALERT_DESCRIPTION_BAD_RECORD_MAC, | |||||
ALERT_DESCRIPTION_CERTIFICATE_EXPIRED, | |||||
ALERT_DESCRIPTION_CERTIFICATE_REVOKED, | |||||
ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN, | |||||
ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE, | |||||
ALERT_DESCRIPTION_CLOSE_NOTIFY, ALERT_DESCRIPTION_DECODE_ERROR, | |||||
ALERT_DESCRIPTION_DECOMPRESSION_FAILURE, | |||||
ALERT_DESCRIPTION_DECRYPT_ERROR, | |||||
ALERT_DESCRIPTION_HANDSHAKE_FAILURE, | |||||
ALERT_DESCRIPTION_ILLEGAL_PARAMETER, | |||||
ALERT_DESCRIPTION_INSUFFICIENT_SECURITY, | |||||
ALERT_DESCRIPTION_INTERNAL_ERROR, | |||||
ALERT_DESCRIPTION_NO_RENEGOTIATION, | |||||
ALERT_DESCRIPTION_PROTOCOL_VERSION, | |||||
ALERT_DESCRIPTION_RECORD_OVERFLOW, | |||||
ALERT_DESCRIPTION_UNEXPECTED_MESSAGE, | |||||
ALERT_DESCRIPTION_UNKNOWN_CA, | |||||
ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY, | |||||
ALERT_DESCRIPTION_UNRECOGNIZED_NAME, | |||||
ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE, | |||||
ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION, | |||||
ALERT_DESCRIPTION_USER_CANCELLED) | |||||
from _ssl import (SSL_ERROR_EOF, SSL_ERROR_INVALID_ERROR_CODE, SSL_ERROR_SSL, | |||||
SSL_ERROR_SYSCALL, SSL_ERROR_WANT_CONNECT, SSL_ERROR_WANT_READ, | |||||
SSL_ERROR_WANT_WRITE, SSL_ERROR_WANT_X509_LOOKUP, SSL_ERROR_ZERO_RETURN) | |||||
from _ssl import VERIFY_CRL_CHECK_CHAIN, VERIFY_CRL_CHECK_LEAF, VERIFY_DEFAULT, VERIFY_X509_STRICT | |||||
from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN | |||||
from _ssl import _OPENSSL_API_VERSION | |||||
from _ssl import PROTOCOL_SSLv23, PROTOCOL_TLSv1, PROTOCOL_TLSv1_1, PROTOCOL_TLSv1_2 | |||||
''') | |||||
register_module_extender(MANAGER, 'ssl', ssl_transform) |
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
import sys | |||||
import textwrap | |||||
import six | |||||
import astroid | |||||
PY34 = sys.version_info >= (3, 4) | |||||
PY36 = sys.version_info >= (3, 6) | |||||
def _subprocess_transform(): | |||||
if six.PY3: | |||||
communicate = (bytes('string', 'ascii'), bytes('string', 'ascii')) | |||||
communicate_signature = 'def communicate(self, input=None, timeout=None)' | |||||
if PY36: | |||||
init = """ | |||||
def __init__(self, args, bufsize=0, executable=None, | |||||
stdin=None, stdout=None, stderr=None, | |||||
preexec_fn=None, close_fds=False, shell=False, | |||||
cwd=None, env=None, universal_newlines=False, | |||||
startupinfo=None, creationflags=0, restore_signals=True, | |||||
start_new_session=False, pass_fds=(), *, | |||||
encoding=None, errors=None): | |||||
pass | |||||
""" | |||||
else: | |||||
init = """ | |||||
def __init__(self, args, bufsize=0, executable=None, | |||||
stdin=None, stdout=None, stderr=None, | |||||
preexec_fn=None, close_fds=False, shell=False, | |||||
cwd=None, env=None, universal_newlines=False, | |||||
startupinfo=None, creationflags=0, restore_signals=True, | |||||
start_new_session=False, pass_fds=()): | |||||
pass | |||||
""" | |||||
else: | |||||
communicate = ('string', 'string') | |||||
communicate_signature = 'def communicate(self, input=None)' | |||||
init = """ | |||||
def __init__(self, args, bufsize=0, executable=None, | |||||
stdin=None, stdout=None, stderr=None, | |||||
preexec_fn=None, close_fds=False, shell=False, | |||||
cwd=None, env=None, universal_newlines=False, | |||||
startupinfo=None, creationflags=0): | |||||
pass | |||||
""" | |||||
if PY34: | |||||
wait_signature = 'def wait(self, timeout=None)' | |||||
else: | |||||
wait_signature = 'def wait(self)' | |||||
if six.PY3: | |||||
ctx_manager = ''' | |||||
def __enter__(self): return self | |||||
def __exit__(self, *args): pass | |||||
''' | |||||
else: | |||||
ctx_manager = '' | |||||
code = textwrap.dedent(''' | |||||
class Popen(object): | |||||
returncode = pid = 0 | |||||
stdin = stdout = stderr = file() | |||||
%(communicate_signature)s: | |||||
return %(communicate)r | |||||
%(wait_signature)s: | |||||
return self.returncode | |||||
def poll(self): | |||||
return self.returncode | |||||
def send_signal(self, signal): | |||||
pass | |||||
def terminate(self): | |||||
pass | |||||
def kill(self): | |||||
pass | |||||
%(ctx_manager)s | |||||
''' % {'communicate': communicate, | |||||
'communicate_signature': communicate_signature, | |||||
'wait_signature': wait_signature, | |||||
'ctx_manager': ctx_manager}) | |||||
init_lines = textwrap.dedent(init).splitlines() | |||||
indented_init = '\n'.join([' ' * 4 + line for line in init_lines]) | |||||
code += indented_init | |||||
return astroid.parse(code) | |||||
astroid.register_module_extender(astroid.MANAGER, 'subprocess', _subprocess_transform) |
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
import astroid | |||||
def _thread_transform(): | |||||
return astroid.parse(''' | |||||
class lock(object): | |||||
def acquire(self, blocking=True): | |||||
pass | |||||
def release(self): | |||||
pass | |||||
def __enter__(self): | |||||
return True | |||||
def __exit__(self, *args): | |||||
pass | |||||
def Lock(): | |||||
return lock() | |||||
''') | |||||
astroid.register_module_extender(astroid.MANAGER, 'threading', _thread_transform) |
# Copyright (c) 2016 David Euresti <david@dropbox.com> | |||||
"""Astroid hooks for typing.py support.""" | |||||
import textwrap | |||||
from astroid import ( | |||||
MANAGER, UseInferenceDefault, extract_node, inference_tip, | |||||
nodes, InferenceError) | |||||
from astroid.nodes import List, Tuple | |||||
TYPING_NAMEDTUPLE_BASENAMES = { | |||||
'NamedTuple', | |||||
'typing.NamedTuple' | |||||
} | |||||
def infer_typing_namedtuple(node, context=None): | |||||
"""Infer a typing.NamedTuple(...) call.""" | |||||
# This is essentially a namedtuple with different arguments | |||||
# so we extract the args and infer a named tuple. | |||||
try: | |||||
func = next(node.func.infer()) | |||||
except InferenceError: | |||||
raise UseInferenceDefault | |||||
if func.qname() != 'typing.NamedTuple': | |||||
raise UseInferenceDefault | |||||
if len(node.args) != 2: | |||||
raise UseInferenceDefault | |||||
if not isinstance(node.args[1], (List, Tuple)): | |||||
raise UseInferenceDefault | |||||
names = [] | |||||
for elt in node.args[1].elts: | |||||
if not isinstance(elt, (List, Tuple)): | |||||
raise UseInferenceDefault | |||||
if len(elt.elts) != 2: | |||||
raise UseInferenceDefault | |||||
names.append(elt.elts[0].as_string()) | |||||
typename = node.args[0].as_string() | |||||
node = extract_node('namedtuple(%(typename)s, (%(fields)s,)) ' % | |||||
{'typename': typename, 'fields': ",".join(names)}) | |||||
return node.infer(context=context) | |||||
def infer_typing_namedtuple_class(node, context=None): | |||||
"""Infer a subclass of typing.NamedTuple""" | |||||
# Check if it has the corresponding bases | |||||
annassigns_fields = [ | |||||
annassign.target.name for annassign in node.body | |||||
if isinstance(annassign, nodes.AnnAssign) | |||||
] | |||||
code = textwrap.dedent(''' | |||||
from collections import namedtuple | |||||
namedtuple({typename!r}, {fields!r}) | |||||
''').format( | |||||
typename=node.name, | |||||
fields=",".join(annassigns_fields) | |||||
) | |||||
node = extract_node(code) | |||||
return node.infer(context=context) | |||||
def has_namedtuple_base(node): | |||||
"""Predicate for class inference tip | |||||
:type node: ClassDef | |||||
:rtype: bool | |||||
""" | |||||
return set(node.basenames) & TYPING_NAMEDTUPLE_BASENAMES | |||||
def looks_like_typing_namedtuple(node): | |||||
func = node.func | |||||
if isinstance(func, nodes.Attribute): | |||||
return func.attrname == 'NamedTuple' | |||||
if isinstance(func, nodes.Name): | |||||
return func.name == 'NamedTuple' | |||||
return False | |||||
MANAGER.register_transform( | |||||
nodes.Call, | |||||
inference_tip(infer_typing_namedtuple), | |||||
looks_like_typing_namedtuple | |||||
) | |||||
MANAGER.register_transform( | |||||
nodes.ClassDef, | |||||
inference_tip(infer_typing_namedtuple_class), | |||||
has_namedtuple_base | |||||
) |
# Copyright (c) 2017 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Astroid hooks for the UUID module.""" | |||||
from astroid import MANAGER | |||||
from astroid import nodes | |||||
def _patch_uuid_class(node): | |||||
# The .int member is patched using __dict__ | |||||
node.locals['int'] = [nodes.Const(0, parent=node)] | |||||
MANAGER.register_transform( | |||||
nodes.ClassDef, | |||||
_patch_uuid_class, | |||||
lambda node: node.qname() == 'uuid.UUID' | |||||
) |
# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr> | |||||
# Copyright (c) 2014-2015 Google, Inc. | |||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""The AstroidBuilder makes astroid from living object and / or from _ast | |||||
The builder is not thread safe and can't be used to parse different sources | |||||
at the same time. | |||||
""" | |||||
import re | |||||
import os | |||||
import sys | |||||
import textwrap | |||||
import _ast | |||||
from astroid import bases | |||||
from astroid import exceptions | |||||
from astroid import manager | |||||
from astroid import modutils | |||||
from astroid import raw_building | |||||
from astroid import rebuilder | |||||
from astroid import nodes | |||||
from astroid import util | |||||
# The name of the transient function that is used to | |||||
# wrap expressions to be extracted when calling | |||||
# extract_node. | |||||
_TRANSIENT_FUNCTION = '__' | |||||
# The comment used to select a statement to be extracted | |||||
# when calling extract_node. | |||||
_STATEMENT_SELECTOR = '#@' | |||||
def _parse(string): | |||||
return compile(string, "<string>", 'exec', _ast.PyCF_ONLY_AST) | |||||
if sys.version_info >= (3, 0): | |||||
from tokenize import detect_encoding | |||||
def open_source_file(filename): | |||||
with open(filename, 'rb') as byte_stream: | |||||
encoding = detect_encoding(byte_stream.readline)[0] | |||||
stream = open(filename, 'r', newline=None, encoding=encoding) | |||||
data = stream.read() | |||||
return stream, encoding, data | |||||
else: | |||||
_ENCODING_RGX = re.compile(r"\s*#+.*coding[:=]\s*([-\w.]+)") | |||||
def _guess_encoding(string): | |||||
"""get encoding from a python file as string or return None if not found""" | |||||
# check for UTF-8 byte-order mark | |||||
if string.startswith('\xef\xbb\xbf'): | |||||
return 'UTF-8' | |||||
for line in string.split('\n', 2)[:2]: | |||||
# check for encoding declaration | |||||
match = _ENCODING_RGX.match(line) | |||||
if match is not None: | |||||
return match.group(1) | |||||
return None | |||||
def open_source_file(filename): | |||||
"""get data for parsing a file""" | |||||
stream = open(filename, 'U') | |||||
data = stream.read() | |||||
encoding = _guess_encoding(data) | |||||
return stream, encoding, data | |||||
MANAGER = manager.AstroidManager() | |||||
def _can_assign_attr(node, attrname): | |||||
try: | |||||
slots = node.slots() | |||||
except NotImplementedError: | |||||
pass | |||||
else: | |||||
if slots and attrname not in set(slot.value for slot in slots): | |||||
return False | |||||
return True | |||||
class AstroidBuilder(raw_building.InspectBuilder): | |||||
"""Class for building an astroid tree from source code or from a live module. | |||||
The param *manager* specifies the manager class which should be used. | |||||
If no manager is given, then the default one will be used. The | |||||
param *apply_transforms* determines if the transforms should be | |||||
applied after the tree was built from source or from a live object, | |||||
by default being True. | |||||
""" | |||||
# pylint: disable=redefined-outer-name | |||||
def __init__(self, manager=None, apply_transforms=True): | |||||
super(AstroidBuilder, self).__init__() | |||||
self._manager = manager or MANAGER | |||||
self._apply_transforms = apply_transforms | |||||
def module_build(self, module, modname=None): | |||||
"""Build an astroid from a living module instance.""" | |||||
node = None | |||||
path = getattr(module, '__file__', None) | |||||
if path is not None: | |||||
path_, ext = os.path.splitext(modutils._path_from_filename(path)) | |||||
if ext in ('.py', '.pyc', '.pyo') and os.path.exists(path_ + '.py'): | |||||
node = self.file_build(path_ + '.py', modname) | |||||
if node is None: | |||||
# this is a built-in module | |||||
# get a partial representation by introspection | |||||
node = self.inspect_build(module, modname=modname, path=path) | |||||
if self._apply_transforms: | |||||
# We have to handle transformation by ourselves since the | |||||
# rebuilder isn't called for builtin nodes | |||||
node = self._manager.visit_transforms(node) | |||||
return node | |||||
def file_build(self, path, modname=None): | |||||
"""Build astroid from a source code file (i.e. from an ast) | |||||
*path* is expected to be a python source file | |||||
""" | |||||
try: | |||||
stream, encoding, data = open_source_file(path) | |||||
except IOError as exc: | |||||
util.reraise(exceptions.AstroidBuildingError( | |||||
'Unable to load file {path}:\n{error}', | |||||
modname=modname, path=path, error=exc)) | |||||
except (SyntaxError, LookupError) as exc: | |||||
util.reraise(exceptions.AstroidSyntaxError( | |||||
'Python 3 encoding specification error or unknown encoding:\n' | |||||
'{error}', modname=modname, path=path, error=exc)) | |||||
except UnicodeError: # wrong encoding | |||||
# detect_encoding returns utf-8 if no encoding specified | |||||
util.reraise(exceptions.AstroidBuildingError( | |||||
'Wrong or no encoding specified for {filename}.', | |||||
filename=path)) | |||||
with stream: | |||||
# get module name if necessary | |||||
if modname is None: | |||||
try: | |||||
modname = '.'.join(modutils.modpath_from_file(path)) | |||||
except ImportError: | |||||
modname = os.path.splitext(os.path.basename(path))[0] | |||||
# build astroid representation | |||||
module = self._data_build(data, modname, path) | |||||
return self._post_build(module, encoding) | |||||
def string_build(self, data, modname='', path=None): | |||||
"""Build astroid from source code string.""" | |||||
module = self._data_build(data, modname, path) | |||||
module.file_bytes = data.encode('utf-8') | |||||
return self._post_build(module, 'utf-8') | |||||
def _post_build(self, module, encoding): | |||||
"""Handles encoding and delayed nodes after a module has been built""" | |||||
module.file_encoding = encoding | |||||
self._manager.cache_module(module) | |||||
# post tree building steps after we stored the module in the cache: | |||||
for from_node in module._import_from_nodes: | |||||
if from_node.modname == '__future__': | |||||
for symbol, _ in from_node.names: | |||||
module.future_imports.add(symbol) | |||||
self.add_from_names_to_locals(from_node) | |||||
# handle delayed assattr nodes | |||||
for delayed in module._delayed_assattr: | |||||
self.delayed_assattr(delayed) | |||||
# Visit the transforms | |||||
if self._apply_transforms: | |||||
module = self._manager.visit_transforms(module) | |||||
return module | |||||
def _data_build(self, data, modname, path): | |||||
"""Build tree node from data and add some informations""" | |||||
try: | |||||
node = _parse(data + '\n') | |||||
except (TypeError, ValueError, SyntaxError) as exc: | |||||
util.reraise(exceptions.AstroidSyntaxError( | |||||
'Parsing Python code failed:\n{error}', | |||||
source=data, modname=modname, path=path, error=exc)) | |||||
if path is not None: | |||||
node_file = os.path.abspath(path) | |||||
else: | |||||
node_file = '<?>' | |||||
if modname.endswith('.__init__'): | |||||
modname = modname[:-9] | |||||
package = True | |||||
else: | |||||
package = path is not None and os.path.splitext(os.path.basename(path))[0] == '__init__' | |||||
builder = rebuilder.TreeRebuilder(self._manager) | |||||
module = builder.visit_module(node, modname, node_file, package) | |||||
module._import_from_nodes = builder._import_from_nodes | |||||
module._delayed_assattr = builder._delayed_assattr | |||||
return module | |||||
def add_from_names_to_locals(self, node): | |||||
"""Store imported names to the locals | |||||
Resort the locals if coming from a delayed node | |||||
""" | |||||
_key_func = lambda node: node.fromlineno | |||||
def sort_locals(my_list): | |||||
my_list.sort(key=_key_func) | |||||
for (name, asname) in node.names: | |||||
if name == '*': | |||||
try: | |||||
imported = node.do_import_module() | |||||
except exceptions.AstroidBuildingError: | |||||
continue | |||||
for name in imported.public_names(): | |||||
node.parent.set_local(name, node) | |||||
sort_locals(node.parent.scope().locals[name]) | |||||
else: | |||||
node.parent.set_local(asname or name, node) | |||||
sort_locals(node.parent.scope().locals[asname or name]) | |||||
def delayed_assattr(self, node): | |||||
"""Visit a AssAttr node | |||||
This adds name to locals and handle members definition. | |||||
""" | |||||
try: | |||||
frame = node.frame() | |||||
for inferred in node.expr.infer(): | |||||
if inferred is util.Uninferable: | |||||
continue | |||||
try: | |||||
if inferred.__class__ is bases.Instance: | |||||
inferred = inferred._proxied | |||||
iattrs = inferred.instance_attrs | |||||
if not _can_assign_attr(inferred, node.attrname): | |||||
continue | |||||
elif isinstance(inferred, bases.Instance): | |||||
# Const, Tuple, ... we may be wrong, may be not, but | |||||
# anyway we don't want to pollute builtin's namespace | |||||
continue | |||||
elif inferred.is_function: | |||||
iattrs = inferred.instance_attrs | |||||
else: | |||||
iattrs = inferred.locals | |||||
except AttributeError: | |||||
# XXX log error | |||||
continue | |||||
values = iattrs.setdefault(node.attrname, []) | |||||
if node in values: | |||||
continue | |||||
# get assign in __init__ first XXX useful ? | |||||
if (frame.name == '__init__' and values and | |||||
values[0].frame().name != '__init__'): | |||||
values.insert(0, node) | |||||
else: | |||||
values.append(node) | |||||
except exceptions.InferenceError: | |||||
pass | |||||
def build_namespace_package_module(name, path): | |||||
return nodes.Module(name, doc='', path=path, package=True) | |||||
def parse(code, module_name='', path=None, apply_transforms=True): | |||||
"""Parses a source string in order to obtain an astroid AST from it | |||||
:param str code: The code for the module. | |||||
:param str module_name: The name for the module, if any | |||||
:param str path: The path for the module | |||||
:param bool apply_transforms: | |||||
Apply the transforms for the give code. Use it if you | |||||
don't want the default transforms to be applied. | |||||
""" | |||||
code = textwrap.dedent(code) | |||||
builder = AstroidBuilder(manager=MANAGER, | |||||
apply_transforms=apply_transforms) | |||||
return builder.string_build(code, modname=module_name, path=path) | |||||
def _extract_expressions(node): | |||||
"""Find expressions in a call to _TRANSIENT_FUNCTION and extract them. | |||||
The function walks the AST recursively to search for expressions that | |||||
are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an | |||||
expression, it completely removes the function call node from the tree, | |||||
replacing it by the wrapped expression inside the parent. | |||||
:param node: An astroid node. | |||||
:type node: astroid.bases.NodeNG | |||||
:yields: The sequence of wrapped expressions on the modified tree | |||||
expression can be found. | |||||
""" | |||||
if (isinstance(node, nodes.Call) | |||||
and isinstance(node.func, nodes.Name) | |||||
and node.func.name == _TRANSIENT_FUNCTION): | |||||
real_expr = node.args[0] | |||||
real_expr.parent = node.parent | |||||
# Search for node in all _astng_fields (the fields checked when | |||||
# get_children is called) of its parent. Some of those fields may | |||||
# be lists or tuples, in which case the elements need to be checked. | |||||
# When we find it, replace it by real_expr, so that the AST looks | |||||
# like no call to _TRANSIENT_FUNCTION ever took place. | |||||
for name in node.parent._astroid_fields: | |||||
child = getattr(node.parent, name) | |||||
if isinstance(child, (list, tuple)): | |||||
for idx, compound_child in enumerate(child): | |||||
if compound_child is node: | |||||
child[idx] = real_expr | |||||
elif child is node: | |||||
setattr(node.parent, name, real_expr) | |||||
yield real_expr | |||||
else: | |||||
for child in node.get_children(): | |||||
for result in _extract_expressions(child): | |||||
yield result | |||||
def _find_statement_by_line(node, line): | |||||
"""Extracts the statement on a specific line from an AST. | |||||
If the line number of node matches line, it will be returned; | |||||
otherwise its children are iterated and the function is called | |||||
recursively. | |||||
:param node: An astroid node. | |||||
:type node: astroid.bases.NodeNG | |||||
:param line: The line number of the statement to extract. | |||||
:type line: int | |||||
:returns: The statement on the line, or None if no statement for the line | |||||
can be found. | |||||
:rtype: astroid.bases.NodeNG or None | |||||
""" | |||||
if isinstance(node, (nodes.ClassDef, nodes.FunctionDef)): | |||||
# This is an inaccuracy in the AST: the nodes that can be | |||||
# decorated do not carry explicit information on which line | |||||
# the actual definition (class/def), but .fromline seems to | |||||
# be close enough. | |||||
node_line = node.fromlineno | |||||
else: | |||||
node_line = node.lineno | |||||
if node_line == line: | |||||
return node | |||||
for child in node.get_children(): | |||||
result = _find_statement_by_line(child, line) | |||||
if result: | |||||
return result | |||||
return None | |||||
def extract_node(code, module_name=''): | |||||
"""Parses some Python code as a module and extracts a designated AST node. | |||||
Statements: | |||||
To extract one or more statement nodes, append #@ to the end of the line | |||||
Examples: | |||||
>>> def x(): | |||||
>>> def y(): | |||||
>>> return 1 #@ | |||||
The return statement will be extracted. | |||||
>>> class X(object): | |||||
>>> def meth(self): #@ | |||||
>>> pass | |||||
The function object 'meth' will be extracted. | |||||
Expressions: | |||||
To extract arbitrary expressions, surround them with the fake | |||||
function call __(...). After parsing, the surrounded expression | |||||
will be returned and the whole AST (accessible via the returned | |||||
node's parent attribute) will look like the function call was | |||||
never there in the first place. | |||||
Examples: | |||||
>>> a = __(1) | |||||
The const node will be extracted. | |||||
>>> def x(d=__(foo.bar)): pass | |||||
The node containing the default argument will be extracted. | |||||
>>> def foo(a, b): | |||||
>>> return 0 < __(len(a)) < b | |||||
The node containing the function call 'len' will be extracted. | |||||
If no statements or expressions are selected, the last toplevel | |||||
statement will be returned. | |||||
If the selected statement is a discard statement, (i.e. an expression | |||||
turned into a statement), the wrapped expression is returned instead. | |||||
For convenience, singleton lists are unpacked. | |||||
:param str code: A piece of Python code that is parsed as | |||||
a module. Will be passed through textwrap.dedent first. | |||||
:param str module_name: The name of the module. | |||||
:returns: The designated node from the parse tree, or a list of nodes. | |||||
:rtype: astroid.bases.NodeNG, or a list of nodes. | |||||
""" | |||||
def _extract(node): | |||||
if isinstance(node, nodes.Expr): | |||||
return node.value | |||||
return node | |||||
requested_lines = [] | |||||
for idx, line in enumerate(code.splitlines()): | |||||
if line.strip().endswith(_STATEMENT_SELECTOR): | |||||
requested_lines.append(idx + 1) | |||||
tree = parse(code, module_name=module_name) | |||||
extracted = [] | |||||
if requested_lines: | |||||
for line in requested_lines: | |||||
extracted.append(_find_statement_by_line(tree, line)) | |||||
# Modifies the tree. | |||||
extracted.extend(_extract_expressions(tree)) | |||||
if not extracted: | |||||
extracted.append(tree.body[-1]) | |||||
extracted = [_extract(node) for node in extracted] | |||||
if len(extracted) == 1: | |||||
return extracted[0] | |||||
return extracted |
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Various context related utilities, including inference and call contexts.""" | |||||
import contextlib | |||||
import copy | |||||
import pprint | |||||
class InferenceContext(object): | |||||
"""Provide context for inference | |||||
Store already inferred nodes to save time | |||||
Account for already visited nodes to infinite stop infinite recursion | |||||
""" | |||||
__slots__ = ('path', 'lookupname', 'callcontext', 'boundnode', 'inferred') | |||||
def __init__(self, path=None, inferred=None): | |||||
self.path = path or set() | |||||
"""Path of visited nodes and their lookupname | |||||
:type: set(tuple(NodeNG, optional(str)))""" | |||||
self.lookupname = None | |||||
self.callcontext = None | |||||
self.boundnode = None | |||||
self.inferred = inferred or {} | |||||
""" | |||||
:type: dict(seq, seq) | |||||
Inferred node contexts to their mapped results | |||||
Currently the key is (node, lookupname, callcontext, boundnode) | |||||
and the value is tuple of the inferred results | |||||
""" | |||||
def push(self, node): | |||||
"""Push node into inference path | |||||
:return: True if node is already in context path else False | |||||
:rtype: bool | |||||
Allows one to see if the given node has already | |||||
been looked at for this inference context""" | |||||
name = self.lookupname | |||||
if (node, name) in self.path: | |||||
return True | |||||
self.path.add((node, name)) | |||||
return False | |||||
def clone(self): | |||||
"""Clone inference path | |||||
For example, each side of a binary operation (BinOp) | |||||
starts with the same context but diverge as each side is inferred | |||||
so the InferenceContext will need be cloned""" | |||||
# XXX copy lookupname/callcontext ? | |||||
clone = InferenceContext(copy.copy(self.path), inferred=self.inferred) | |||||
clone.callcontext = self.callcontext | |||||
clone.boundnode = self.boundnode | |||||
return clone | |||||
def cache_generator(self, key, generator): | |||||
"""Cache result of generator into dictionary | |||||
Used to cache inference results""" | |||||
results = [] | |||||
for result in generator: | |||||
results.append(result) | |||||
yield result | |||||
self.inferred[key] = tuple(results) | |||||
@contextlib.contextmanager | |||||
def restore_path(self): | |||||
path = set(self.path) | |||||
yield | |||||
self.path = path | |||||
def __str__(self): | |||||
state = ('%s=%s' % (field, pprint.pformat(getattr(self, field), | |||||
width=80 - len(field))) | |||||
for field in self.__slots__) | |||||
return '%s(%s)' % (type(self).__name__, ',\n '.join(state)) | |||||
class CallContext(object): | |||||
"""Holds information for a call site.""" | |||||
__slots__ = ('args', 'keywords') | |||||
def __init__(self, args, keywords=None): | |||||
self.args = args | |||||
if keywords: | |||||
keywords = [(arg.arg, arg.value) for arg in keywords] | |||||
else: | |||||
keywords = [] | |||||
self.keywords = keywords | |||||
def copy_context(context): | |||||
if context is not None: | |||||
return context.clone() | |||||
return InferenceContext() |
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org> | |||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
""" A few useful function/method decorators.""" | |||||
import functools | |||||
import wrapt | |||||
from astroid import context as contextmod | |||||
from astroid import exceptions | |||||
from astroid import util | |||||
@wrapt.decorator | |||||
def cached(func, instance, args, kwargs): | |||||
"""Simple decorator to cache result of method calls without args.""" | |||||
cache = getattr(instance, '__cache', None) | |||||
if cache is None: | |||||
instance.__cache = cache = {} | |||||
try: | |||||
return cache[func] | |||||
except KeyError: | |||||
cache[func] = result = func(*args, **kwargs) | |||||
return result | |||||
class cachedproperty(object): | |||||
""" Provides a cached property equivalent to the stacking of | |||||
@cached and @property, but more efficient. | |||||
After first usage, the <property_name> becomes part of the object's | |||||
__dict__. Doing: | |||||
del obj.<property_name> empties the cache. | |||||
Idea taken from the pyramid_ framework and the mercurial_ project. | |||||
.. _pyramid: http://pypi.python.org/pypi/pyramid | |||||
.. _mercurial: http://pypi.python.org/pypi/Mercurial | |||||
""" | |||||
__slots__ = ('wrapped',) | |||||
def __init__(self, wrapped): | |||||
try: | |||||
wrapped.__name__ | |||||
except AttributeError: | |||||
util.reraise(TypeError('%s must have a __name__ attribute' | |||||
% wrapped)) | |||||
self.wrapped = wrapped | |||||
@property | |||||
def __doc__(self): | |||||
doc = getattr(self.wrapped, '__doc__', None) | |||||
return ('<wrapped by the cachedproperty decorator>%s' | |||||
% ('\n%s' % doc if doc else '')) | |||||
def __get__(self, inst, objtype=None): | |||||
if inst is None: | |||||
return self | |||||
val = self.wrapped(inst) | |||||
setattr(inst, self.wrapped.__name__, val) | |||||
return val | |||||
def path_wrapper(func): | |||||
"""return the given infer function wrapped to handle the path | |||||
Used to stop inference if the node has already been looked | |||||
at for a given `InferenceContext` to prevent infinite recursion | |||||
""" | |||||
# TODO: switch this to wrapt after the monkey-patching is fixed (ceridwen) | |||||
@functools.wraps(func) | |||||
def wrapped(node, context=None, _func=func, **kwargs): | |||||
"""wrapper function handling context""" | |||||
if context is None: | |||||
context = contextmod.InferenceContext() | |||||
if context.push(node): | |||||
return | |||||
yielded = set() | |||||
generator = _func(node, context, **kwargs) | |||||
try: | |||||
while True: | |||||
res = next(generator) | |||||
# unproxy only true instance, not const, tuple, dict... | |||||
if res.__class__.__name__ == 'Instance': | |||||
ares = res._proxied | |||||
else: | |||||
ares = res | |||||
if ares not in yielded: | |||||
yield res | |||||
yielded.add(ares) | |||||
except StopIteration as error: | |||||
# Explicit StopIteration to return error information, see | |||||
# comment in raise_if_nothing_inferred. | |||||
if error.args: | |||||
raise StopIteration(error.args[0]) | |||||
else: | |||||
raise StopIteration | |||||
return wrapped | |||||
@wrapt.decorator | |||||
def yes_if_nothing_inferred(func, instance, args, kwargs): | |||||
inferred = False | |||||
for node in func(*args, **kwargs): | |||||
inferred = True | |||||
yield node | |||||
if not inferred: | |||||
yield util.Uninferable | |||||
@wrapt.decorator | |||||
def raise_if_nothing_inferred(func, instance, args, kwargs): | |||||
'''All generators wrapped with raise_if_nothing_inferred *must* | |||||
explicitly raise StopIteration with information to create an | |||||
appropriate structured InferenceError. | |||||
''' | |||||
# TODO: Explicitly raising StopIteration in a generator will cause | |||||
# a RuntimeError in Python >=3.7, as per | |||||
# http://legacy.python.org/dev/peps/pep-0479/ . Before 3.7 is | |||||
# released, this code will need to use one of four possible | |||||
# solutions: a decorator that restores the current behavior as | |||||
# described in | |||||
# http://legacy.python.org/dev/peps/pep-0479/#sub-proposal-decorator-to-explicitly-request-current-behaviour | |||||
# , dynamic imports or exec to generate different code for | |||||
# different versions, drop support for all Python versions <3.3, | |||||
# or refactoring to change how these decorators work. In any | |||||
# event, after dropping support for Python <3.3 this code should | |||||
# be refactored to use `yield from`. | |||||
inferred = False | |||||
try: | |||||
generator = func(*args, **kwargs) | |||||
while True: | |||||
yield next(generator) | |||||
inferred = True | |||||
except StopIteration as error: | |||||
if not inferred: | |||||
if error.args: | |||||
# pylint: disable=not-a-mapping | |||||
raise exceptions.InferenceError(**error.args[0]) | |||||
else: | |||||
raise exceptions.InferenceError( | |||||
'StopIteration raised without any error information.') |
# Copyright (c) 2007, 2009-2010, 2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr> | |||||
# Copyright (c) 2014 Google, Inc. | |||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""this module contains exceptions used in the astroid library | |||||
""" | |||||
from astroid import util | |||||
class AstroidError(Exception): | |||||
"""base exception class for all astroid related exceptions | |||||
AstroidError and its subclasses are structured, intended to hold | |||||
objects representing state when the exception is thrown. Field | |||||
values are passed to the constructor as keyword-only arguments. | |||||
Each subclass has its own set of standard fields, but use your | |||||
best judgment to decide whether a specific exception instance | |||||
needs more or fewer fields for debugging. Field values may be | |||||
used to lazily generate the error message: self.message.format() | |||||
will be called with the field names and values supplied as keyword | |||||
arguments. | |||||
""" | |||||
def __init__(self, message='', **kws): | |||||
super(AstroidError, self).__init__(message) | |||||
self.message = message | |||||
for key, value in kws.items(): | |||||
setattr(self, key, value) | |||||
def __str__(self): | |||||
return self.message.format(**vars(self)) | |||||
class AstroidBuildingError(AstroidError): | |||||
"""exception class when we are unable to build an astroid representation | |||||
Standard attributes: | |||||
modname: Name of the module that AST construction failed for. | |||||
error: Exception raised during construction. | |||||
""" | |||||
def __init__(self, message='Failed to import module {modname}.', **kws): | |||||
super(AstroidBuildingError, self).__init__(message, **kws) | |||||
class AstroidImportError(AstroidBuildingError): | |||||
"""Exception class used when a module can't be imported by astroid.""" | |||||
class TooManyLevelsError(AstroidImportError): | |||||
"""Exception class which is raised when a relative import was beyond the top-level. | |||||
Standard attributes: | |||||
level: The level which was attempted. | |||||
name: the name of the module on which the relative import was attempted. | |||||
""" | |||||
level = None | |||||
name = None | |||||
def __init__(self, message='Relative import with too many levels ' | |||||
'({level}) for module {name!r}', **kws): | |||||
super(TooManyLevelsError, self).__init__(message, **kws) | |||||
class AstroidSyntaxError(AstroidBuildingError): | |||||
"""Exception class used when a module can't be parsed.""" | |||||
class NoDefault(AstroidError): | |||||
"""raised by function's `default_value` method when an argument has | |||||
no default value | |||||
Standard attributes: | |||||
func: Function node. | |||||
name: Name of argument without a default. | |||||
""" | |||||
func = None | |||||
name = None | |||||
def __init__(self, message='{func!r} has no default for {name!r}.', **kws): | |||||
super(NoDefault, self).__init__(message, **kws) | |||||
class ResolveError(AstroidError): | |||||
"""Base class of astroid resolution/inference error. | |||||
ResolveError is not intended to be raised. | |||||
Standard attributes: | |||||
context: InferenceContext object. | |||||
""" | |||||
context = None | |||||
class MroError(ResolveError): | |||||
"""Error raised when there is a problem with method resolution of a class. | |||||
Standard attributes: | |||||
mros: A sequence of sequences containing ClassDef nodes. | |||||
cls: ClassDef node whose MRO resolution failed. | |||||
context: InferenceContext object. | |||||
""" | |||||
mros = () | |||||
cls = None | |||||
def __str__(self): | |||||
mro_names = ", ".join("({})".format(", ".join(b.name for b in m)) | |||||
for m in self.mros) | |||||
return self.message.format(mros=mro_names, cls=self.cls) | |||||
class DuplicateBasesError(MroError): | |||||
"""Error raised when there are duplicate bases in the same class bases.""" | |||||
class InconsistentMroError(MroError): | |||||
"""Error raised when a class's MRO is inconsistent.""" | |||||
class SuperError(ResolveError): | |||||
"""Error raised when there is a problem with a super call. | |||||
Standard attributes: | |||||
super_: The Super instance that raised the exception. | |||||
context: InferenceContext object. | |||||
""" | |||||
super_ = None | |||||
def __str__(self): | |||||
return self.message.format(**vars(self.super_)) | |||||
class InferenceError(ResolveError): | |||||
"""raised when we are unable to infer a node | |||||
Standard attributes: | |||||
node: The node inference was called on. | |||||
context: InferenceContext object. | |||||
""" | |||||
node = None | |||||
context = None | |||||
def __init__(self, message='Inference failed for {node!r}.', **kws): | |||||
super(InferenceError, self).__init__(message, **kws) | |||||
# Why does this inherit from InferenceError rather than ResolveError? | |||||
# Changing it causes some inference tests to fail. | |||||
class NameInferenceError(InferenceError): | |||||
"""Raised when a name lookup fails, corresponds to NameError. | |||||
Standard attributes: | |||||
name: The name for which lookup failed, as a string. | |||||
scope: The node representing the scope in which the lookup occurred. | |||||
context: InferenceContext object. | |||||
""" | |||||
name = None | |||||
scope = None | |||||
def __init__(self, message='{name!r} not found in {scope!r}.', **kws): | |||||
super(NameInferenceError, self).__init__(message, **kws) | |||||
class AttributeInferenceError(ResolveError): | |||||
"""Raised when an attribute lookup fails, corresponds to AttributeError. | |||||
Standard attributes: | |||||
target: The node for which lookup failed. | |||||
attribute: The attribute for which lookup failed, as a string. | |||||
context: InferenceContext object. | |||||
""" | |||||
target = None | |||||
attribute = None | |||||
def __init__(self, message='{attribute!r} not found on {target!r}.', **kws): | |||||
super(AttributeInferenceError, self).__init__(message, **kws) | |||||
class UseInferenceDefault(Exception): | |||||
"""exception to be raised in custom inference function to indicate that it | |||||
should go back to the default behaviour | |||||
""" | |||||
class _NonDeducibleTypeHierarchy(Exception): | |||||
"""Raised when is_subtype / is_supertype can't deduce the relation between two types.""" | |||||
class AstroidIndexError(AstroidError): | |||||
"""Raised when an Indexable / Mapping does not have an index / key.""" | |||||
class AstroidTypeError(AstroidError): | |||||
"""Raised when a TypeError would be expected in Python code.""" | |||||
class InferenceOverwriteError(AstroidError): | |||||
"""Raised when an inference tip is overwritten | |||||
Currently only used for debugging. | |||||
""" | |||||
# Backwards-compatibility aliases | |||||
OperationError = util.BadOperationMessage | |||||
UnaryOperationError = util.BadUnaryOperationMessage | |||||
BinaryOperationError = util.BadBinaryOperationMessage | |||||
SuperArgumentTypeError = SuperError | |||||
UnresolvableName = NameInferenceError | |||||
NotFoundError = AttributeInferenceError | |||||
AstroidBuildingException = AstroidBuildingError |
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
""" | |||||
Various helper utilities. | |||||
""" | |||||
import six | |||||
from astroid import bases | |||||
from astroid import context as contextmod | |||||
from astroid import exceptions | |||||
from astroid import manager | |||||
from astroid import nodes | |||||
from astroid import raw_building | |||||
from astroid import scoped_nodes | |||||
from astroid import util | |||||
BUILTINS = six.moves.builtins.__name__ | |||||
def _build_proxy_class(cls_name, builtins): | |||||
proxy = raw_building.build_class(cls_name) | |||||
proxy.parent = builtins | |||||
return proxy | |||||
def _function_type(function, builtins): | |||||
if isinstance(function, scoped_nodes.Lambda): | |||||
if function.root().name == BUILTINS: | |||||
cls_name = 'builtin_function_or_method' | |||||
else: | |||||
cls_name = 'function' | |||||
elif isinstance(function, bases.BoundMethod): | |||||
if six.PY2: | |||||
cls_name = 'instancemethod' | |||||
else: | |||||
cls_name = 'method' | |||||
elif isinstance(function, bases.UnboundMethod): | |||||
if six.PY2: | |||||
cls_name = 'instancemethod' | |||||
else: | |||||
cls_name = 'function' | |||||
return _build_proxy_class(cls_name, builtins) | |||||
def _object_type(node, context=None): | |||||
astroid_manager = manager.AstroidManager() | |||||
builtins = astroid_manager.astroid_cache[BUILTINS] | |||||
context = context or contextmod.InferenceContext() | |||||
for inferred in node.infer(context=context): | |||||
if isinstance(inferred, scoped_nodes.ClassDef): | |||||
if inferred.newstyle: | |||||
metaclass = inferred.metaclass() | |||||
if metaclass: | |||||
yield metaclass | |||||
continue | |||||
yield builtins.getattr('type')[0] | |||||
elif isinstance(inferred, (scoped_nodes.Lambda, bases.UnboundMethod)): | |||||
yield _function_type(inferred, builtins) | |||||
elif isinstance(inferred, scoped_nodes.Module): | |||||
yield _build_proxy_class('module', builtins) | |||||
else: | |||||
yield inferred._proxied | |||||
def object_type(node, context=None): | |||||
"""Obtain the type of the given node | |||||
This is used to implement the ``type`` builtin, which means that it's | |||||
used for inferring type calls, as well as used in a couple of other places | |||||
in the inference. | |||||
The node will be inferred first, so this function can support all | |||||
sorts of objects, as long as they support inference. | |||||
""" | |||||
try: | |||||
types = set(_object_type(node, context)) | |||||
except exceptions.InferenceError: | |||||
return util.Uninferable | |||||
if len(types) > 1 or not types: | |||||
return util.Uninferable | |||||
return list(types)[0] | |||||
def safe_infer(node, context=None): | |||||
"""Return the inferred value for the given node. | |||||
Return None if inference failed or if there is some ambiguity (more than | |||||
one node has been inferred). | |||||
""" | |||||
try: | |||||
inferit = node.infer(context=context) | |||||
value = next(inferit) | |||||
except exceptions.InferenceError: | |||||
return None | |||||
try: | |||||
next(inferit) | |||||
return None # None if there is ambiguity on the inferred node | |||||
except exceptions.InferenceError: | |||||
return None# there is some kind of ambiguity | |||||
except StopIteration: | |||||
return value | |||||
def has_known_bases(klass, context=None): | |||||
"""Return true if all base classes of a class could be inferred.""" | |||||
try: | |||||
return klass._all_bases_known | |||||
except AttributeError: | |||||
pass | |||||
for base in klass.bases: | |||||
result = safe_infer(base, context=context) | |||||
# TODO: check for A->B->A->B pattern in class structure too? | |||||
if (not isinstance(result, scoped_nodes.ClassDef) or | |||||
result is klass or | |||||
not has_known_bases(result, context=context)): | |||||
klass._all_bases_known = False | |||||
return False | |||||
klass._all_bases_known = True | |||||
return True | |||||
def _type_check(type1, type2): | |||||
if not all(map(has_known_bases, (type1, type2))): | |||||
raise exceptions._NonDeducibleTypeHierarchy | |||||
if not all([type1.newstyle, type2.newstyle]): | |||||
return False | |||||
try: | |||||
return type1 in type2.mro()[:-1] | |||||
except exceptions.MroError: | |||||
# The MRO is invalid. | |||||
raise exceptions._NonDeducibleTypeHierarchy | |||||
def is_subtype(type1, type2): | |||||
"""Check if *type1* is a subtype of *typ2*.""" | |||||
return _type_check(type2, type1) | |||||
def is_supertype(type1, type2): | |||||
"""Check if *type2* is a supertype of *type1*.""" | |||||
return _type_check(type1, type2) | |||||
def class_instance_as_index(node): | |||||
"""Get the value as an index for the given instance. | |||||
If an instance provides an __index__ method, then it can | |||||
be used in some scenarios where an integer is expected, | |||||
for instance when multiplying or subscripting a list. | |||||
""" | |||||
context = contextmod.InferenceContext() | |||||
context.callcontext = contextmod.CallContext(args=[node]) | |||||
try: | |||||
for inferred in node.igetattr('__index__', context=context): | |||||
if not isinstance(inferred, bases.BoundMethod): | |||||
continue | |||||
for result in inferred.infer_call_result(node, context=context): | |||||
if (isinstance(result, nodes.Const) | |||||
and isinstance(result.value, int)): | |||||
return result | |||||
except exceptions.InferenceError: | |||||
pass | |||||
return None |
# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr> | |||||
# Copyright (c) 2013-2014 Google, Inc. | |||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""this module contains a set of functions to handle inference on astroid trees | |||||
""" | |||||
import functools | |||||
import itertools | |||||
import operator | |||||
from astroid import bases | |||||
from astroid import context as contextmod | |||||
from astroid import exceptions | |||||
from astroid import decorators | |||||
from astroid import helpers | |||||
from astroid import manager | |||||
from astroid import nodes | |||||
from astroid.interpreter import dunder_lookup | |||||
from astroid import protocols | |||||
from astroid import util | |||||
MANAGER = manager.AstroidManager() | |||||
# .infer method ############################################################### | |||||
def infer_end(self, context=None): | |||||
"""inference's end for node such as Module, ClassDef, FunctionDef, | |||||
Const... | |||||
""" | |||||
yield self | |||||
nodes.Module._infer = infer_end | |||||
nodes.ClassDef._infer = infer_end | |||||
nodes.FunctionDef._infer = infer_end | |||||
nodes.Lambda._infer = infer_end | |||||
nodes.Const._infer = infer_end | |||||
nodes.Slice._infer = infer_end | |||||
def infer_seq(self, context=None): | |||||
if not any(isinstance(e, nodes.Starred) for e in self.elts): | |||||
yield self | |||||
else: | |||||
values = _infer_seq(self, context) | |||||
new_seq = type(self)(self.lineno, self.col_offset, self.parent) | |||||
new_seq.postinit(values) | |||||
yield new_seq | |||||
def _infer_seq(node, context=None): | |||||
"""Infer all values based on _BaseContainer.elts""" | |||||
values = [] | |||||
for elt in node.elts: | |||||
if isinstance(elt, nodes.Starred): | |||||
starred = helpers.safe_infer(elt.value, context) | |||||
if starred in (None, util.Uninferable): | |||||
raise exceptions.InferenceError(node=node, | |||||
context=context) | |||||
if not hasattr(starred, 'elts'): | |||||
raise exceptions.InferenceError(node=node, | |||||
context=context) | |||||
values.extend(_infer_seq(starred)) | |||||
else: | |||||
values.append(elt) | |||||
return values | |||||
nodes.List._infer = infer_seq | |||||
nodes.Tuple._infer = infer_seq | |||||
nodes.Set._infer = infer_seq | |||||
def infer_map(self, context=None): | |||||
if not any(isinstance(k, nodes.DictUnpack) for k, _ in self.items): | |||||
yield self | |||||
else: | |||||
items = _infer_map(self, context) | |||||
new_seq = type(self)(self.lineno, self.col_offset, self.parent) | |||||
new_seq.postinit(list(items.items())) | |||||
yield new_seq | |||||
def _infer_map(node, context): | |||||
"""Infer all values based on Dict.items""" | |||||
values = {} | |||||
for name, value in node.items: | |||||
if isinstance(name, nodes.DictUnpack): | |||||
double_starred = helpers.safe_infer(value, context) | |||||
if double_starred in (None, util.Uninferable): | |||||
raise exceptions.InferenceError | |||||
if not isinstance(double_starred, nodes.Dict): | |||||
raise exceptions.InferenceError(node=node, | |||||
context=context) | |||||
values.update(_infer_map(double_starred, context)) | |||||
else: | |||||
key = helpers.safe_infer(name, context=context) | |||||
value = helpers.safe_infer(value, context=context) | |||||
if any(elem in (None, util.Uninferable) for elem in (key, value)): | |||||
raise exceptions.InferenceError(node=node, | |||||
context=context) | |||||
values[key] = value | |||||
return values | |||||
nodes.Dict._infer = infer_map | |||||
def _higher_function_scope(node): | |||||
""" Search for the first function which encloses the given | |||||
scope. This can be used for looking up in that function's | |||||
scope, in case looking up in a lower scope for a particular | |||||
name fails. | |||||
:param node: A scope node. | |||||
:returns: | |||||
``None``, if no parent function scope was found, | |||||
otherwise an instance of :class:`astroid.scoped_nodes.Function`, | |||||
which encloses the given node. | |||||
""" | |||||
current = node | |||||
while current.parent and not isinstance(current.parent, nodes.FunctionDef): | |||||
current = current.parent | |||||
if current and current.parent: | |||||
return current.parent | |||||
return None | |||||
def infer_name(self, context=None): | |||||
"""infer a Name: use name lookup rules""" | |||||
frame, stmts = self.lookup(self.name) | |||||
if not stmts: | |||||
# Try to see if the name is enclosed in a nested function | |||||
# and use the higher (first function) scope for searching. | |||||
# TODO: should this be promoted to other nodes as well? | |||||
parent_function = _higher_function_scope(self.scope()) | |||||
if parent_function: | |||||
_, stmts = parent_function.lookup(self.name) | |||||
if not stmts: | |||||
raise exceptions.NameInferenceError(name=self.name, | |||||
scope=self.scope(), | |||||
context=context) | |||||
context = context.clone() | |||||
context.lookupname = self.name | |||||
return bases._infer_stmts(stmts, context, frame) | |||||
nodes.Name._infer = decorators.path_wrapper(infer_name) | |||||
nodes.AssignName.infer_lhs = infer_name # won't work with a path wrapper | |||||
@decorators.raise_if_nothing_inferred | |||||
@decorators.path_wrapper | |||||
def infer_call(self, context=None): | |||||
"""infer a Call node by trying to guess what the function returns""" | |||||
callcontext = context.clone() | |||||
callcontext.callcontext = contextmod.CallContext(args=self.args, | |||||
keywords=self.keywords) | |||||
callcontext.boundnode = None | |||||
for callee in self.func.infer(context): | |||||
if callee is util.Uninferable: | |||||
yield callee | |||||
continue | |||||
try: | |||||
if hasattr(callee, 'infer_call_result'): | |||||
for inferred in callee.infer_call_result(self, callcontext): | |||||
yield inferred | |||||
except exceptions.InferenceError: | |||||
## XXX log error ? | |||||
continue | |||||
# Explicit StopIteration to return error information, see comment | |||||
# in raise_if_nothing_inferred. | |||||
raise StopIteration(dict(node=self, context=context)) | |||||
nodes.Call._infer = infer_call | |||||
@decorators.path_wrapper | |||||
def infer_import(self, context=None, asname=True): | |||||
"""infer an Import node: return the imported module/object""" | |||||
name = context.lookupname | |||||
if name is None: | |||||
raise exceptions.InferenceError(node=self, context=context) | |||||
try: | |||||
if asname: | |||||
yield self.do_import_module(self.real_name(name)) | |||||
else: | |||||
yield self.do_import_module(name) | |||||
except exceptions.AstroidBuildingError as exc: | |||||
util.reraise(exceptions.InferenceError(node=self, error=exc, | |||||
context=context)) | |||||
nodes.Import._infer = infer_import | |||||
def infer_name_module(self, name): | |||||
context = contextmod.InferenceContext() | |||||
context.lookupname = name | |||||
return self.infer(context, asname=False) | |||||
nodes.Import.infer_name_module = infer_name_module | |||||
@decorators.path_wrapper | |||||
def infer_import_from(self, context=None, asname=True): | |||||
"""infer a ImportFrom node: return the imported module/object""" | |||||
name = context.lookupname | |||||
if name is None: | |||||
raise exceptions.InferenceError(node=self, context=context) | |||||
if asname: | |||||
name = self.real_name(name) | |||||
try: | |||||
module = self.do_import_module() | |||||
except exceptions.AstroidBuildingError as exc: | |||||
util.reraise(exceptions.InferenceError(node=self, error=exc, | |||||
context=context)) | |||||
try: | |||||
context = contextmod.copy_context(context) | |||||
context.lookupname = name | |||||
stmts = module.getattr(name, ignore_locals=module is self.root()) | |||||
return bases._infer_stmts(stmts, context) | |||||
except exceptions.AttributeInferenceError as error: | |||||
util.reraise(exceptions.InferenceError( | |||||
error.message, target=self, attribute=name, context=context)) | |||||
nodes.ImportFrom._infer = infer_import_from | |||||
@decorators.raise_if_nothing_inferred | |||||
def infer_attribute(self, context=None): | |||||
"""infer an Attribute node by using getattr on the associated object""" | |||||
for owner in self.expr.infer(context): | |||||
if owner is util.Uninferable: | |||||
yield owner | |||||
continue | |||||
if context and context.boundnode: | |||||
# This handles the situation where the attribute is accessed through a subclass | |||||
# of a base class and the attribute is defined at the base class's level, | |||||
# by taking in consideration a redefinition in the subclass. | |||||
if (isinstance(owner, bases.Instance) | |||||
and isinstance(context.boundnode, bases.Instance)): | |||||
try: | |||||
if helpers.is_subtype(helpers.object_type(context.boundnode), | |||||
helpers.object_type(owner)): | |||||
owner = context.boundnode | |||||
except exceptions._NonDeducibleTypeHierarchy: | |||||
# Can't determine anything useful. | |||||
pass | |||||
try: | |||||
context.boundnode = owner | |||||
for obj in owner.igetattr(self.attrname, context): | |||||
yield obj | |||||
context.boundnode = None | |||||
except (exceptions.AttributeInferenceError, exceptions.InferenceError): | |||||
context.boundnode = None | |||||
except AttributeError: | |||||
# XXX method / function | |||||
context.boundnode = None | |||||
# Explicit StopIteration to return error information, see comment | |||||
# in raise_if_nothing_inferred. | |||||
raise StopIteration(dict(node=self, context=context)) | |||||
nodes.Attribute._infer = decorators.path_wrapper(infer_attribute) | |||||
nodes.AssignAttr.infer_lhs = infer_attribute # # won't work with a path wrapper | |||||
@decorators.path_wrapper | |||||
def infer_global(self, context=None): | |||||
if context.lookupname is None: | |||||
raise exceptions.InferenceError(node=self, context=context) | |||||
try: | |||||
return bases._infer_stmts(self.root().getattr(context.lookupname), | |||||
context) | |||||
except exceptions.AttributeInferenceError as error: | |||||
util.reraise(exceptions.InferenceError( | |||||
error.message, target=self, attribute=context.lookupname, | |||||
context=context)) | |||||
nodes.Global._infer = infer_global | |||||
_SUBSCRIPT_SENTINEL = object() | |||||
@decorators.raise_if_nothing_inferred | |||||
def infer_subscript(self, context=None): | |||||
"""Inference for subscripts | |||||
We're understanding if the index is a Const | |||||
or a slice, passing the result of inference | |||||
to the value's `getitem` method, which should | |||||
handle each supported index type accordingly. | |||||
""" | |||||
value = next(self.value.infer(context)) | |||||
if value is util.Uninferable: | |||||
yield util.Uninferable | |||||
return | |||||
index = next(self.slice.infer(context)) | |||||
if index is util.Uninferable: | |||||
yield util.Uninferable | |||||
return | |||||
# Try to deduce the index value. | |||||
index_value = _SUBSCRIPT_SENTINEL | |||||
if value.__class__ == bases.Instance: | |||||
index_value = index | |||||
else: | |||||
if index.__class__ == bases.Instance: | |||||
instance_as_index = helpers.class_instance_as_index(index) | |||||
if instance_as_index: | |||||
index_value = instance_as_index | |||||
else: | |||||
index_value = index | |||||
if index_value is _SUBSCRIPT_SENTINEL: | |||||
raise exceptions.InferenceError(node=self, context=context) | |||||
try: | |||||
assigned = value.getitem(index_value, context) | |||||
except (exceptions.AstroidTypeError, | |||||
exceptions.AstroidIndexError, | |||||
exceptions.AttributeInferenceError, | |||||
AttributeError) as exc: | |||||
util.reraise(exceptions.InferenceError(node=self, error=exc, | |||||
context=context)) | |||||
# Prevent inferring if the inferred subscript | |||||
# is the same as the original subscripted object. | |||||
if self is assigned or assigned is util.Uninferable: | |||||
yield util.Uninferable | |||||
return | |||||
for inferred in assigned.infer(context): | |||||
yield inferred | |||||
# Explicit StopIteration to return error information, see comment | |||||
# in raise_if_nothing_inferred. | |||||
raise StopIteration(dict(node=self, context=context)) | |||||
nodes.Subscript._infer = decorators.path_wrapper(infer_subscript) | |||||
nodes.Subscript.infer_lhs = infer_subscript | |||||
@decorators.raise_if_nothing_inferred | |||||
@decorators.path_wrapper | |||||
def _infer_boolop(self, context=None): | |||||
"""Infer a boolean operation (and / or / not). | |||||
The function will calculate the boolean operation | |||||
for all pairs generated through inference for each component | |||||
node. | |||||
""" | |||||
values = self.values | |||||
if self.op == 'or': | |||||
predicate = operator.truth | |||||
else: | |||||
predicate = operator.not_ | |||||
try: | |||||
values = [value.infer(context=context) for value in values] | |||||
except exceptions.InferenceError: | |||||
yield util.Uninferable | |||||
return | |||||
for pair in itertools.product(*values): | |||||
if any(item is util.Uninferable for item in pair): | |||||
# Can't infer the final result, just yield Uninferable. | |||||
yield util.Uninferable | |||||
continue | |||||
bool_values = [item.bool_value() for item in pair] | |||||
if any(item is util.Uninferable for item in bool_values): | |||||
# Can't infer the final result, just yield Uninferable. | |||||
yield util.Uninferable | |||||
continue | |||||
# Since the boolean operations are short circuited operations, | |||||
# this code yields the first value for which the predicate is True | |||||
# and if no value respected the predicate, then the last value will | |||||
# be returned (or Uninferable if there was no last value). | |||||
# This is conforming to the semantics of `and` and `or`: | |||||
# 1 and 0 -> 1 | |||||
# 0 and 1 -> 0 | |||||
# 1 or 0 -> 1 | |||||
# 0 or 1 -> 1 | |||||
value = util.Uninferable | |||||
for value, bool_value in zip(pair, bool_values): | |||||
if predicate(bool_value): | |||||
yield value | |||||
break | |||||
else: | |||||
yield value | |||||
# Explicit StopIteration to return error information, see comment | |||||
# in raise_if_nothing_inferred. | |||||
raise StopIteration(dict(node=self, context=context)) | |||||
nodes.BoolOp._infer = _infer_boolop | |||||
# UnaryOp, BinOp and AugAssign inferences | |||||
def _filter_operation_errors(self, infer_callable, context, error): | |||||
for result in infer_callable(self, context): | |||||
if isinstance(result, error): | |||||
# For the sake of .infer(), we don't care about operation | |||||
# errors, which is the job of pylint. So return something | |||||
# which shows that we can't infer the result. | |||||
yield util.Uninferable | |||||
else: | |||||
yield result | |||||
def _infer_unaryop(self, context=None): | |||||
"""Infer what an UnaryOp should return when evaluated.""" | |||||
for operand in self.operand.infer(context): | |||||
try: | |||||
yield operand.infer_unary_op(self.op) | |||||
except TypeError as exc: | |||||
# The operand doesn't support this operation. | |||||
yield util.BadUnaryOperationMessage(operand, self.op, exc) | |||||
except AttributeError as exc: | |||||
meth = protocols.UNARY_OP_METHOD[self.op] | |||||
if meth is None: | |||||
# `not node`. Determine node's boolean | |||||
# value and negate its result, unless it is | |||||
# Uninferable, which will be returned as is. | |||||
bool_value = operand.bool_value() | |||||
if bool_value is not util.Uninferable: | |||||
yield nodes.const_factory(not bool_value) | |||||
else: | |||||
yield util.Uninferable | |||||
else: | |||||
if not isinstance(operand, (bases.Instance, nodes.ClassDef)): | |||||
# The operation was used on something which | |||||
# doesn't support it. | |||||
yield util.BadUnaryOperationMessage(operand, self.op, exc) | |||||
continue | |||||
try: | |||||
try: | |||||
methods = dunder_lookup.lookup(operand, meth) | |||||
except exceptions.AttributeInferenceError: | |||||
yield util.BadUnaryOperationMessage(operand, self.op, exc) | |||||
continue | |||||
meth = methods[0] | |||||
inferred = next(meth.infer(context=context)) | |||||
if inferred is util.Uninferable or not inferred.callable(): | |||||
continue | |||||
context = contextmod.copy_context(context) | |||||
context.callcontext = contextmod.CallContext(args=[operand]) | |||||
call_results = inferred.infer_call_result(self, context=context) | |||||
result = next(call_results, None) | |||||
if result is None: | |||||
# Failed to infer, return the same type. | |||||
yield operand | |||||
else: | |||||
yield result | |||||
except exceptions.AttributeInferenceError as exc: | |||||
# The unary operation special method was not found. | |||||
yield util.BadUnaryOperationMessage(operand, self.op, exc) | |||||
except exceptions.InferenceError: | |||||
yield util.Uninferable | |||||
@decorators.raise_if_nothing_inferred | |||||
@decorators.path_wrapper | |||||
def infer_unaryop(self, context=None): | |||||
"""Infer what an UnaryOp should return when evaluated.""" | |||||
for inferred in _filter_operation_errors(self, _infer_unaryop, context, | |||||
util.BadUnaryOperationMessage): | |||||
yield inferred | |||||
# Explicit StopIteration to return error information, see comment | |||||
# in raise_if_nothing_inferred. | |||||
raise StopIteration(dict(node=self, context=context)) | |||||
nodes.UnaryOp._infer_unaryop = _infer_unaryop | |||||
nodes.UnaryOp._infer = infer_unaryop | |||||
def _is_not_implemented(const): | |||||
"""Check if the given const node is NotImplemented.""" | |||||
return isinstance(const, nodes.Const) and const.value is NotImplemented | |||||
def _invoke_binop_inference(instance, opnode, op, other, context, method_name): | |||||
"""Invoke binary operation inference on the given instance.""" | |||||
methods = dunder_lookup.lookup(instance, method_name) | |||||
method = methods[0] | |||||
inferred = next(method.infer(context=context)) | |||||
if inferred is util.Uninferable: | |||||
raise exceptions.InferenceError | |||||
return instance.infer_binary_op(opnode, op, other, context, inferred) | |||||
def _aug_op(instance, opnode, op, other, context, reverse=False): | |||||
"""Get an inference callable for an augmented binary operation.""" | |||||
method_name = protocols.AUGMENTED_OP_METHOD[op] | |||||
return functools.partial(_invoke_binop_inference, | |||||
instance=instance, | |||||
op=op, opnode=opnode, other=other, | |||||
context=context, | |||||
method_name=method_name) | |||||
def _bin_op(instance, opnode, op, other, context, reverse=False): | |||||
"""Get an inference callable for a normal binary operation. | |||||
If *reverse* is True, then the reflected method will be used instead. | |||||
""" | |||||
if reverse: | |||||
method_name = protocols.REFLECTED_BIN_OP_METHOD[op] | |||||
else: | |||||
method_name = protocols.BIN_OP_METHOD[op] | |||||
return functools.partial(_invoke_binop_inference, | |||||
instance=instance, | |||||
op=op, opnode=opnode, other=other, | |||||
context=context, | |||||
method_name=method_name) | |||||
def _get_binop_contexts(context, left, right): | |||||
"""Get contexts for binary operations. | |||||
This will return two inferrence contexts, the first one | |||||
for x.__op__(y), the other one for y.__rop__(x), where | |||||
only the arguments are inversed. | |||||
""" | |||||
# The order is important, since the first one should be | |||||
# left.__op__(right). | |||||
for arg in (right, left): | |||||
new_context = context.clone() | |||||
new_context.callcontext = contextmod.CallContext(args=[arg]) | |||||
new_context.boundnode = None | |||||
yield new_context | |||||
def _same_type(type1, type2): | |||||
"""Check if type1 is the same as type2.""" | |||||
return type1.qname() == type2.qname() | |||||
def _get_binop_flow(left, left_type, binary_opnode, right, right_type, | |||||
context, reverse_context): | |||||
"""Get the flow for binary operations. | |||||
The rules are a bit messy: | |||||
* if left and right have the same type, then only one | |||||
method will be called, left.__op__(right) | |||||
* if left and right are unrelated typewise, then first | |||||
left.__op__(right) is tried and if this does not exist | |||||
or returns NotImplemented, then right.__rop__(left) is tried. | |||||
* if left is a subtype of right, then only left.__op__(right) | |||||
is tried. | |||||
* if left is a supertype of right, then right.__rop__(left) | |||||
is first tried and then left.__op__(right) | |||||
""" | |||||
op = binary_opnode.op | |||||
if _same_type(left_type, right_type): | |||||
methods = [_bin_op(left, binary_opnode, op, right, context)] | |||||
elif helpers.is_subtype(left_type, right_type): | |||||
methods = [_bin_op(left, binary_opnode, op, right, context)] | |||||
elif helpers.is_supertype(left_type, right_type): | |||||
methods = [_bin_op(right, binary_opnode, op, left, reverse_context, reverse=True), | |||||
_bin_op(left, binary_opnode, op, right, context)] | |||||
else: | |||||
methods = [_bin_op(left, binary_opnode, op, right, context), | |||||
_bin_op(right, binary_opnode, op, left, reverse_context, reverse=True)] | |||||
return methods | |||||
def _get_aug_flow(left, left_type, aug_opnode, right, right_type, | |||||
context, reverse_context): | |||||
"""Get the flow for augmented binary operations. | |||||
The rules are a bit messy: | |||||
* if left and right have the same type, then left.__augop__(right) | |||||
is first tried and then left.__op__(right). | |||||
* if left and right are unrelated typewise, then | |||||
left.__augop__(right) is tried, then left.__op__(right) | |||||
is tried and then right.__rop__(left) is tried. | |||||
* if left is a subtype of right, then left.__augop__(right) | |||||
is tried and then left.__op__(right). | |||||
* if left is a supertype of right, then left.__augop__(right) | |||||
is tried, then right.__rop__(left) and then | |||||
left.__op__(right) | |||||
""" | |||||
bin_op = aug_opnode.op.strip("=") | |||||
aug_op = aug_opnode.op | |||||
if _same_type(left_type, right_type): | |||||
methods = [_aug_op(left, aug_opnode, aug_op, right, context), | |||||
_bin_op(left, aug_opnode, bin_op, right, context)] | |||||
elif helpers.is_subtype(left_type, right_type): | |||||
methods = [_aug_op(left, aug_opnode, aug_op, right, context), | |||||
_bin_op(left, aug_opnode, bin_op, right, context)] | |||||
elif helpers.is_supertype(left_type, right_type): | |||||
methods = [_aug_op(left, aug_opnode, aug_op, right, context), | |||||
_bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True), | |||||
_bin_op(left, aug_opnode, bin_op, right, context)] | |||||
else: | |||||
methods = [_aug_op(left, aug_opnode, aug_op, right, context), | |||||
_bin_op(left, aug_opnode, bin_op, right, context), | |||||
_bin_op(right, aug_opnode, bin_op, left, reverse_context, reverse=True)] | |||||
return methods | |||||
def _infer_binary_operation(left, right, binary_opnode, context, flow_factory): | |||||
"""Infer a binary operation between a left operand and a right operand | |||||
This is used by both normal binary operations and augmented binary | |||||
operations, the only difference is the flow factory used. | |||||
""" | |||||
context, reverse_context = _get_binop_contexts(context, left, right) | |||||
left_type = helpers.object_type(left) | |||||
right_type = helpers.object_type(right) | |||||
methods = flow_factory(left, left_type, binary_opnode, right, right_type, | |||||
context, reverse_context) | |||||
for method in methods: | |||||
try: | |||||
results = list(method()) | |||||
except AttributeError: | |||||
continue | |||||
except exceptions.AttributeInferenceError: | |||||
continue | |||||
except exceptions.InferenceError: | |||||
yield util.Uninferable | |||||
return | |||||
else: | |||||
if any(result is util.Uninferable for result in results): | |||||
yield util.Uninferable | |||||
return | |||||
# TODO(cpopa): since the inference engine might return | |||||
# more values than are actually possible, we decide | |||||
# to return util.Uninferable if we have union types. | |||||
if all(map(_is_not_implemented, results)): | |||||
continue | |||||
not_implemented = sum(1 for result in results | |||||
if _is_not_implemented(result)) | |||||
if not_implemented and not_implemented != len(results): | |||||
# Can't decide yet what this is, not yet though. | |||||
yield util.Uninferable | |||||
return | |||||
for result in results: | |||||
yield result | |||||
return | |||||
# TODO(cpopa): yield a BadBinaryOperationMessage here, | |||||
# since the operation is not supported | |||||
yield util.BadBinaryOperationMessage(left_type, binary_opnode.op, right_type) | |||||
def _infer_binop(self, context): | |||||
"""Binary operation inferrence logic.""" | |||||
if context is None: | |||||
context = contextmod.InferenceContext() | |||||
left = self.left | |||||
right = self.right | |||||
# we use two separate contexts for evaluating lhs and rhs because | |||||
# 1. evaluating lhs may leave some undesired entries in context.path | |||||
# which may not let us infer right value of rhs | |||||
lhs_context = context.clone() | |||||
rhs_context = context.clone() | |||||
for lhs in left.infer(context=lhs_context): | |||||
if lhs is util.Uninferable: | |||||
# Don't know how to process this. | |||||
yield util.Uninferable | |||||
return | |||||
for rhs in right.infer(context=rhs_context): | |||||
if rhs is util.Uninferable: | |||||
# Don't know how to process this. | |||||
yield util.Uninferable | |||||
return | |||||
try: | |||||
for result in _infer_binary_operation(lhs, rhs, self, | |||||
context, _get_binop_flow): | |||||
yield result | |||||
except exceptions._NonDeducibleTypeHierarchy: | |||||
yield util.Uninferable | |||||
@decorators.yes_if_nothing_inferred | |||||
@decorators.path_wrapper | |||||
def infer_binop(self, context=None): | |||||
return _filter_operation_errors(self, _infer_binop, context, | |||||
util.BadBinaryOperationMessage) | |||||
nodes.BinOp._infer_binop = _infer_binop | |||||
nodes.BinOp._infer = infer_binop | |||||
def _infer_augassign(self, context=None): | |||||
"""Inference logic for augmented binary operations.""" | |||||
if context is None: | |||||
context = contextmod.InferenceContext() | |||||
for lhs in self.target.infer_lhs(context=context): | |||||
if lhs is util.Uninferable: | |||||
# Don't know how to process this. | |||||
yield util.Uninferable | |||||
return | |||||
rhs_context = context.clone() | |||||
for rhs in self.value.infer(context=rhs_context): | |||||
if rhs is util.Uninferable: | |||||
# Don't know how to process this. | |||||
yield util.Uninferable | |||||
return | |||||
try: | |||||
for result in _infer_binary_operation(lhs, rhs, self, context, _get_aug_flow): | |||||
yield result | |||||
except exceptions._NonDeducibleTypeHierarchy: | |||||
yield util.Uninferable | |||||
@decorators.path_wrapper | |||||
def infer_augassign(self, context=None): | |||||
return _filter_operation_errors(self, _infer_augassign, context, | |||||
util.BadBinaryOperationMessage) | |||||
nodes.AugAssign._infer_augassign = _infer_augassign | |||||
nodes.AugAssign._infer = infer_augassign | |||||
# End of binary operation inference. | |||||
def infer_arguments(self, context=None): | |||||
name = context.lookupname | |||||
if name is None: | |||||
raise exceptions.InferenceError(node=self, context=context) | |||||
return protocols._arguments_infer_argname(self, name, context) | |||||
nodes.Arguments._infer = infer_arguments | |||||
@decorators.path_wrapper | |||||
def infer_assign(self, context=None): | |||||
"""infer a AssignName/AssignAttr: need to inspect the RHS part of the | |||||
assign node | |||||
""" | |||||
stmt = self.statement() | |||||
if isinstance(stmt, nodes.AugAssign): | |||||
return stmt.infer(context) | |||||
stmts = list(self.assigned_stmts(context=context)) | |||||
return bases._infer_stmts(stmts, context) | |||||
nodes.AssignName._infer = infer_assign | |||||
nodes.AssignAttr._infer = infer_assign | |||||
# no infer method on DelName and DelAttr (expected InferenceError) | |||||
@decorators.path_wrapper | |||||
def infer_empty_node(self, context=None): | |||||
if not self.has_underlying_object(): | |||||
yield util.Uninferable | |||||
else: | |||||
try: | |||||
for inferred in MANAGER.infer_ast_from_something(self.object, | |||||
context=context): | |||||
yield inferred | |||||
except exceptions.AstroidError: | |||||
yield util.Uninferable | |||||
nodes.EmptyNode._infer = infer_empty_node | |||||
def infer_index(self, context=None): | |||||
return self.value.infer(context) | |||||
nodes.Index._infer = infer_index | |||||
# TODO: move directly into bases.Instance when the dependency hell | |||||
# will be solved. | |||||
def instance_getitem(self, index, context=None): | |||||
# Rewrap index to Const for this case | |||||
if context: | |||||
new_context = context.clone() | |||||
else: | |||||
context = new_context = contextmod.InferenceContext() | |||||
# Create a new callcontext for providing index as an argument. | |||||
new_context.callcontext = contextmod.CallContext(args=[index]) | |||||
new_context.boundnode = self | |||||
method = next(self.igetattr('__getitem__', context=context)) | |||||
if not isinstance(method, bases.BoundMethod): | |||||
raise exceptions.InferenceError( | |||||
'Could not find __getitem__ for {node!r}.', | |||||
node=self, context=context) | |||||
try: | |||||
return next(method.infer_call_result(self, new_context)) | |||||
except StopIteration: | |||||
util.reraise(exceptions.InferenceError( | |||||
message='Inference for {node!r}[{index!s}] failed.', | |||||
node=self, index=index, context=context)) | |||||
bases.Instance.getitem = instance_getitem |
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
import abc | |||||
import collections | |||||
import enum | |||||
import imp | |||||
import os | |||||
import sys | |||||
import zipimport | |||||
try: | |||||
import importlib.machinery | |||||
_HAS_MACHINERY = True | |||||
except ImportError: | |||||
_HAS_MACHINERY = False | |||||
try: | |||||
from functools import lru_cache | |||||
except ImportError: | |||||
from backports.functools_lru_cache import lru_cache | |||||
from . import util | |||||
ModuleType = enum.Enum('ModuleType', 'C_BUILTIN C_EXTENSION PKG_DIRECTORY ' | |||||
'PY_CODERESOURCE PY_COMPILED PY_FROZEN PY_RESOURCE ' | |||||
'PY_SOURCE PY_ZIPMODULE PY_NAMESPACE') | |||||
_ImpTypes = {imp.C_BUILTIN: ModuleType.C_BUILTIN, | |||||
imp.C_EXTENSION: ModuleType.C_EXTENSION, | |||||
imp.PKG_DIRECTORY: ModuleType.PKG_DIRECTORY, | |||||
imp.PY_COMPILED: ModuleType.PY_COMPILED, | |||||
imp.PY_FROZEN: ModuleType.PY_FROZEN, | |||||
imp.PY_SOURCE: ModuleType.PY_SOURCE, | |||||
} | |||||
if hasattr(imp, 'PY_RESOURCE'): | |||||
_ImpTypes[imp.PY_RESOURCE] = ModuleType.PY_RESOURCE | |||||
if hasattr(imp, 'PY_CODERESOURCE'): | |||||
_ImpTypes[imp.PY_CODERESOURCE] = ModuleType.PY_CODERESOURCE | |||||
def _imp_type_to_module_type(imp_type): | |||||
return _ImpTypes[imp_type] | |||||
_ModuleSpec = collections.namedtuple('_ModuleSpec', 'name type location ' | |||||
'origin submodule_search_locations') | |||||
class ModuleSpec(_ModuleSpec): | |||||
"""Defines a class similar to PEP 420's ModuleSpec | |||||
A module spec defines a name of a module, its type, location | |||||
and where submodules can be found, if the module is a package. | |||||
""" | |||||
def __new__(cls, name, module_type, location=None, origin=None, | |||||
submodule_search_locations=None): | |||||
return _ModuleSpec.__new__(cls, name=name, type=module_type, | |||||
location=location, origin=origin, | |||||
submodule_search_locations=submodule_search_locations) | |||||
class Finder(object): | |||||
"""A finder is a class which knows how to find a particular module.""" | |||||
def __init__(self, path=None): | |||||
self._path = path or sys.path | |||||
@abc.abstractmethod | |||||
def find_module(self, modname, module_parts, processed, submodule_path): | |||||
"""Find the given module | |||||
Each finder is responsible for each protocol of finding, as long as | |||||
they all return a ModuleSpec. | |||||
:param str modname: The module which needs to be searched. | |||||
:param list module_parts: It should be a list of strings, | |||||
where each part contributes to the module's | |||||
namespace. | |||||
:param list processed: What parts from the module parts were processed | |||||
so far. | |||||
:param list submodule_path: A list of paths where the module | |||||
can be looked into. | |||||
:returns: A ModuleSpec, describing how and where the module was found, | |||||
None, otherwise. | |||||
""" | |||||
def contribute_to_path(self, spec, processed): | |||||
"""Get a list of extra paths where this finder can search.""" | |||||
class ImpFinder(Finder): | |||||
"""A finder based on the imp module.""" | |||||
def find_module(self, modname, module_parts, processed, submodule_path): | |||||
if submodule_path is not None: | |||||
submodule_path = list(submodule_path) | |||||
try: | |||||
stream, mp_filename, mp_desc = imp.find_module(modname, submodule_path) | |||||
except ImportError: | |||||
return None | |||||
# Close resources. | |||||
if stream: | |||||
stream.close() | |||||
return ModuleSpec(name=modname, location=mp_filename, | |||||
module_type=_imp_type_to_module_type(mp_desc[2])) | |||||
def contribute_to_path(self, spec, processed): | |||||
if spec.location is None: | |||||
# Builtin. | |||||
return None | |||||
if _is_setuptools_namespace(spec.location): | |||||
# extend_path is called, search sys.path for module/packages | |||||
# of this name see pkgutil.extend_path documentation | |||||
path = [os.path.join(p, *processed) for p in sys.path | |||||
if os.path.isdir(os.path.join(p, *processed))] | |||||
else: | |||||
path = [spec.location] | |||||
return path | |||||
class ExplicitNamespacePackageFinder(ImpFinder): | |||||
"""A finder for the explicit namespace packages, generated through pkg_resources.""" | |||||
def find_module(self, modname, module_parts, processed, submodule_path): | |||||
if processed: | |||||
modname = '.'.join(processed + [modname]) | |||||
if util.is_namespace(modname) and modname in sys.modules: | |||||
submodule_path = sys.modules[modname].__path__ | |||||
return ModuleSpec(name=modname, location='', | |||||
origin='namespace', | |||||
module_type=ModuleType.PY_NAMESPACE, | |||||
submodule_search_locations=submodule_path) | |||||
return None | |||||
def contribute_to_path(self, spec, processed): | |||||
return spec.submodule_search_locations | |||||
class ZipFinder(Finder): | |||||
"""Finder that knows how to find a module inside zip files.""" | |||||
def __init__(self, path): | |||||
super(ZipFinder, self).__init__(path) | |||||
self._zipimporters = _precache_zipimporters(path) | |||||
def find_module(self, modname, module_parts, processed, submodule_path): | |||||
try: | |||||
file_type, filename, path = _search_zip(module_parts, self._zipimporters) | |||||
except ImportError: | |||||
return None | |||||
return ModuleSpec(name=modname, location=filename, | |||||
origin='egg', module_type=file_type, | |||||
submodule_search_locations=path) | |||||
class PathSpecFinder(Finder): | |||||
"""Finder based on importlib.machinery.PathFinder.""" | |||||
def find_module(self, modname, module_parts, processed, submodule_path): | |||||
spec = importlib.machinery.PathFinder.find_spec(modname, path=submodule_path) | |||||
if spec: | |||||
location = spec.origin if spec.origin != 'namespace' else None | |||||
module_type = ModuleType.PY_NAMESPACE if spec.origin == 'namespace' else None | |||||
spec = ModuleSpec(name=spec.name, location=location, | |||||
origin=spec.origin, module_type=module_type, | |||||
submodule_search_locations=list(spec.submodule_search_locations | |||||
or [])) | |||||
return spec | |||||
def contribute_to_path(self, spec, processed): | |||||
if spec.type == ModuleType.PY_NAMESPACE: | |||||
return spec.submodule_search_locations | |||||
return None | |||||
_SPEC_FINDERS = ( | |||||
ImpFinder, | |||||
ZipFinder, | |||||
) | |||||
if _HAS_MACHINERY and sys.version_info[:2] >= (3, 4): | |||||
_SPEC_FINDERS += (PathSpecFinder, ) | |||||
_SPEC_FINDERS += (ExplicitNamespacePackageFinder, ) | |||||
def _is_setuptools_namespace(location): | |||||
try: | |||||
with open(os.path.join(location, '__init__.py'), 'rb') as stream: | |||||
data = stream.read(4096) | |||||
except IOError: | |||||
pass | |||||
else: | |||||
extend_path = b'pkgutil' in data and b'extend_path' in data | |||||
declare_namespace = ( | |||||
b"pkg_resources" in data | |||||
and b"declare_namespace(__name__)" in data) | |||||
return extend_path or declare_namespace | |||||
@lru_cache() | |||||
def _cached_set_diff(left, right): | |||||
result = set(left) | |||||
result.difference_update(right) | |||||
return result | |||||
def _precache_zipimporters(path=None): | |||||
pic = sys.path_importer_cache | |||||
# When measured, despite having the same complexity (O(n)), | |||||
# converting to tuples and then caching the conversion to sets | |||||
# and the set difference is faster than converting to sets | |||||
# and then only caching the set difference. | |||||
req_paths = tuple(path or sys.path) | |||||
cached_paths = tuple(pic) | |||||
new_paths = _cached_set_diff(req_paths, cached_paths) | |||||
for entry_path in new_paths: | |||||
try: | |||||
pic[entry_path] = zipimport.zipimporter(entry_path) | |||||
except zipimport.ZipImportError: | |||||
continue | |||||
return pic | |||||
def _search_zip(modpath, pic): | |||||
for filepath, importer in list(pic.items()): | |||||
if importer is not None: | |||||
found = importer.find_module(modpath[0]) | |||||
if found: | |||||
if not importer.find_module(os.path.sep.join(modpath)): | |||||
raise ImportError('No module named %s in %s/%s' % ( | |||||
'.'.join(modpath[1:]), filepath, modpath)) | |||||
#import code; code.interact(local=locals()) | |||||
return (ModuleType.PY_ZIPMODULE, | |||||
os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath), | |||||
filepath) | |||||
raise ImportError('No module named %s' % '.'.join(modpath)) | |||||
def _find_spec_with_path(search_path, modname, module_parts, processed, submodule_path): | |||||
finders = [finder(search_path) for finder in _SPEC_FINDERS] | |||||
for finder in finders: | |||||
spec = finder.find_module(modname, module_parts, processed, submodule_path) | |||||
if spec is None: | |||||
continue | |||||
return finder, spec | |||||
raise ImportError('No module named %s' % '.'.join(module_parts)) | |||||
def find_spec(modpath, path=None): | |||||
"""Find a spec for the given module. | |||||
:type modpath: list or tuple | |||||
:param modpath: | |||||
split module's name (i.e name of a module or package split | |||||
on '.'), with leading empty strings for explicit relative import | |||||
:type path: list or None | |||||
:param path: | |||||
optional list of path where the module or package should be | |||||
searched (use sys.path if nothing or None is given) | |||||
:rtype: ModuleSpec | |||||
:return: A module spec, which describes how the module was | |||||
found and where. | |||||
""" | |||||
_path = path or sys.path | |||||
# Need a copy for not mutating the argument. | |||||
modpath = modpath[:] | |||||
submodule_path = None | |||||
module_parts = modpath[:] | |||||
processed = [] | |||||
while modpath: | |||||
modname = modpath.pop(0) | |||||
finder, spec = _find_spec_with_path(_path, modname, | |||||
module_parts, processed, | |||||
submodule_path or path) | |||||
processed.append(modname) | |||||
if modpath: | |||||
submodule_path = finder.contribute_to_path(spec, processed) | |||||
if spec.type == ModuleType.PKG_DIRECTORY: | |||||
spec = spec._replace(submodule_search_locations=submodule_path) | |||||
return spec |
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
try: | |||||
import pkg_resources | |||||
except ImportError: | |||||
pkg_resources = None | |||||
def is_namespace(modname): | |||||
# pylint: disable=no-member; astroid issue #290, modifying globals at runtime. | |||||
return (pkg_resources is not None | |||||
and modname in pkg_resources._namespace_packages) |
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Contains logic for retrieving special methods. | |||||
This implementation does not rely on the dot attribute access | |||||
logic, found in ``.getattr()``. The difference between these two | |||||
is that the dunder methods are looked with the type slots | |||||
(you can find more about these here | |||||
http://lucumr.pocoo.org/2014/8/16/the-python-i-would-like-to-see/) | |||||
As such, the lookup for the special methods is actually simpler than | |||||
the dot attribute access. | |||||
""" | |||||
import itertools | |||||
import astroid | |||||
from astroid import exceptions | |||||
def _lookup_in_mro(node, name): | |||||
attrs = node.locals.get(name, []) | |||||
nodes = itertools.chain.from_iterable( | |||||
ancestor.locals.get(name, []) | |||||
for ancestor in node.ancestors(recurs=True) | |||||
) | |||||
values = list(itertools.chain(attrs, nodes)) | |||||
if not values: | |||||
raise exceptions.AttributeInferenceError( | |||||
attribute=name, | |||||
target=node | |||||
) | |||||
return values | |||||
def lookup(node, name): | |||||
"""Lookup the given special method name in the given *node* | |||||
If the special method was found, then a list of attributes | |||||
will be returned. Otherwise, `astroid.AttributeInferenceError` | |||||
is going to be raised. | |||||
""" | |||||
if isinstance(node, (astroid.List, | |||||
astroid.Tuple, | |||||
astroid.Const, | |||||
astroid.Dict, | |||||
astroid.Set)): | |||||
return _builtin_lookup(node, name) | |||||
elif isinstance(node, astroid.Instance): | |||||
return _lookup_in_mro(node, name) | |||||
elif isinstance(node, astroid.ClassDef): | |||||
return _class_lookup(node, name) | |||||
raise exceptions.AttributeInferenceError( | |||||
attribute=name, | |||||
target=node | |||||
) | |||||
def _class_lookup(node, name): | |||||
metaclass = node.metaclass() | |||||
if metaclass is None: | |||||
raise exceptions.AttributeInferenceError( | |||||
attribute=name, | |||||
target=node | |||||
) | |||||
return _lookup_in_mro(metaclass, name) | |||||
def _builtin_lookup(node, name): | |||||
values = node.locals.get(name, []) | |||||
if not values: | |||||
raise exceptions.AttributeInferenceError( | |||||
attribute=name, | |||||
target=node | |||||
) | |||||
return values |
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
""" | |||||
Data object model, as per https://docs.python.org/3/reference/datamodel.html. | |||||
This module describes, at least partially, a data object model for some | |||||
of astroid's nodes. The model contains special attributes that nodes such | |||||
as functions, classes, modules etc have, such as __doc__, __class__, | |||||
__module__ etc, being used when doing attribute lookups over nodes. | |||||
For instance, inferring `obj.__class__` will first trigger an inference | |||||
of the `obj` variable. If it was succesfully inferred, then an attribute | |||||
`__class__ will be looked for in the inferred object. This is the part | |||||
where the data model occurs. The model is attached to those nodes | |||||
and the lookup mechanism will try to see if attributes such as | |||||
`__class__` are defined by the model or not. If they are defined, | |||||
the model will be requested to return the corresponding value of that | |||||
attribute. Thus the model can be viewed as a special part of the lookup | |||||
mechanism. | |||||
""" | |||||
try: | |||||
from functools import lru_cache | |||||
except ImportError: | |||||
from backports.functools_lru_cache import lru_cache | |||||
import itertools | |||||
import pprint | |||||
import os | |||||
import types | |||||
import six | |||||
import astroid | |||||
from astroid import context as contextmod | |||||
from astroid import exceptions | |||||
from astroid import node_classes | |||||
def _dunder_dict(instance, attributes): | |||||
obj = node_classes.Dict(parent=instance) | |||||
# Convert the keys to node strings | |||||
keys = [node_classes.Const(value=value, parent=obj) | |||||
for value in list(attributes.keys())] | |||||
# The original attribute has a list of elements for each key, | |||||
# but that is not useful for retrieving the special attribute's value. | |||||
# In this case, we're picking the last value from each list. | |||||
values = [elem[-1] for elem in attributes.values()] | |||||
obj.postinit(list(zip(keys, values))) | |||||
return obj | |||||
class ObjectModel(object): | |||||
def __init__(self): | |||||
self._instance = None | |||||
def __repr__(self): | |||||
result = [] | |||||
cname = type(self).__name__ | |||||
string = '%(cname)s(%(fields)s)' | |||||
alignment = len(cname) + 1 | |||||
for field in sorted(self.attributes()): | |||||
width = 80 - len(field) - alignment | |||||
lines = pprint.pformat(field, indent=2, | |||||
width=width).splitlines(True) | |||||
inner = [lines[0]] | |||||
for line in lines[1:]: | |||||
inner.append(' ' * alignment + line) | |||||
result.append(field) | |||||
return string % {'cname': cname, | |||||
'fields': (',\n' + ' ' * alignment).join(result)} | |||||
def __call__(self, instance): | |||||
self._instance = instance | |||||
return self | |||||
def __get__(self, instance, cls=None): | |||||
# ObjectModel needs to be a descriptor so that just doing | |||||
# `special_attributes = SomeObjectModel` should be enough in the body of a node. | |||||
# But at the same time, node.special_attributes should return an object | |||||
# which can be used for manipulating the special attributes. That's the reason | |||||
# we pass the instance through which it got accessed to ObjectModel.__call__, | |||||
# returning itself afterwards, so we can still have access to the | |||||
# underlying data model and to the instance for which it got accessed. | |||||
return self(instance) | |||||
def __contains__(self, name): | |||||
return name in self.attributes() | |||||
@lru_cache(maxsize=None) | |||||
def attributes(self): | |||||
"""Get the attributes which are exported by this object model.""" | |||||
return [obj[2:] for obj in dir(self) if obj.startswith('py')] | |||||
def lookup(self, name): | |||||
"""Look up the given *name* in the current model | |||||
It should return an AST or an interpreter object, | |||||
but if the name is not found, then an AttributeInferenceError will be raised. | |||||
""" | |||||
if name in self.attributes(): | |||||
return getattr(self, "py" + name) | |||||
raise exceptions.AttributeInferenceError(target=self._instance, attribute=name) | |||||
class ModuleModel(ObjectModel): | |||||
def _builtins(self): | |||||
builtins = astroid.MANAGER.astroid_cache[six.moves.builtins.__name__] | |||||
return builtins.special_attributes.lookup('__dict__') | |||||
if six.PY3: | |||||
@property | |||||
def pybuiltins(self): | |||||
return self._builtins() | |||||
else: | |||||
@property | |||||
def py__builtin__(self): | |||||
return self._builtins() | |||||
# __path__ is a standard attribute on *packages* not | |||||
# non-package modules. The only mention of it in the | |||||
# official 2.7 documentation I can find is in the | |||||
# tutorial. | |||||
@property | |||||
def py__path__(self): | |||||
if not self._instance.package: | |||||
raise exceptions.AttributeInferenceError(target=self._instance, | |||||
attribute='__path__') | |||||
if isinstance(self._instance.path, list): | |||||
path_objs = [ | |||||
node_classes.Const(value=path, parent=self._instance) | |||||
for path in self._instance.path | |||||
] | |||||
else: | |||||
path = os.path.dirname(self._instance.path) | |||||
path_objs = [node_classes.Const(value=path, parent=self._instance)] | |||||
container = node_classes.List(parent=self._instance) | |||||
container.postinit(path_objs) | |||||
return container | |||||
@property | |||||
def py__name__(self): | |||||
return node_classes.Const(value=self._instance.name, | |||||
parent=self._instance) | |||||
@property | |||||
def py__doc__(self): | |||||
return node_classes.Const(value=self._instance.doc, | |||||
parent=self._instance) | |||||
@property | |||||
def py__file__(self): | |||||
return node_classes.Const(value=self._instance.file, | |||||
parent=self._instance) | |||||
@property | |||||
def py__dict__(self): | |||||
return _dunder_dict(self._instance, self._instance.globals) | |||||
# __package__ isn't mentioned anywhere outside a PEP: | |||||
# https://www.python.org/dev/peps/pep-0366/ | |||||
@property | |||||
def py__package__(self): | |||||
if not self._instance.package: | |||||
value = '' | |||||
else: | |||||
value = self._instance.name | |||||
return node_classes.Const(value=value, parent=self._instance) | |||||
# These are related to the Python 3 implementation of the | |||||
# import system, | |||||
# https://docs.python.org/3/reference/import.html#import-related-module-attributes | |||||
@property | |||||
def py__spec__(self): | |||||
# No handling for now. | |||||
return node_classes.Unknown() | |||||
@property | |||||
def py__loader__(self): | |||||
# No handling for now. | |||||
return node_classes.Unknown() | |||||
@property | |||||
def py__cached__(self): | |||||
# No handling for now. | |||||
return node_classes.Unknown() | |||||
class FunctionModel(ObjectModel): | |||||
@property | |||||
def py__name__(self): | |||||
return node_classes.Const(value=self._instance.name, | |||||
parent=self._instance) | |||||
@property | |||||
def py__doc__(self): | |||||
return node_classes.Const(value=self._instance.doc, | |||||
parent=self._instance) | |||||
@property | |||||
def py__qualname__(self): | |||||
return node_classes.Const(value=self._instance.qname(), | |||||
parent=self._instance) | |||||
@property | |||||
def py__defaults__(self): | |||||
func = self._instance | |||||
if not func.args.defaults: | |||||
return node_classes.Const(value=None, parent=func) | |||||
defaults_obj = node_classes.Tuple(parent=func) | |||||
defaults_obj.postinit(func.args.defaults) | |||||
return defaults_obj | |||||
@property | |||||
def py__annotations__(self): | |||||
obj = node_classes.Dict(parent=self._instance) | |||||
if not self._instance.returns: | |||||
returns = None | |||||
else: | |||||
returns = self._instance.returns | |||||
args = self._instance.args | |||||
pair_annotations = itertools.chain( | |||||
six.moves.zip(args.args or [], args.annotations), | |||||
six.moves.zip(args.kwonlyargs, args.kwonlyargs_annotations) | |||||
) | |||||
annotations = { | |||||
arg.name: annotation | |||||
for (arg, annotation) in pair_annotations | |||||
if annotation | |||||
} | |||||
if args.varargannotation: | |||||
annotations[args.vararg] = args.varargannotation | |||||
if args.kwargannotation: | |||||
annotations[args.kwarg] = args.kwargannotation | |||||
if returns: | |||||
annotations['return'] = returns | |||||
items = [(node_classes.Const(key, parent=obj), value) | |||||
for (key, value) in annotations.items()] | |||||
obj.postinit(items) | |||||
return obj | |||||
@property | |||||
def py__dict__(self): | |||||
return node_classes.Dict(parent=self._instance) | |||||
py__globals__ = py__dict__ | |||||
@property | |||||
def py__kwdefaults__(self): | |||||
def _default_args(args, parent): | |||||
for arg in args.kwonlyargs: | |||||
try: | |||||
default = args.default_value(arg.name) | |||||
except exceptions.NoDefault: | |||||
continue | |||||
name = node_classes.Const(arg.name, parent=parent) | |||||
yield name, default | |||||
args = self._instance.args | |||||
obj = node_classes.Dict(parent=self._instance) | |||||
defaults = dict(_default_args(args, obj)) | |||||
obj.postinit(list(defaults.items())) | |||||
return obj | |||||
@property | |||||
def py__module__(self): | |||||
return node_classes.Const(self._instance.root().qname()) | |||||
@property | |||||
def py__get__(self): | |||||
from astroid import bases | |||||
func = self._instance | |||||
class DescriptorBoundMethod(bases.BoundMethod): | |||||
"""Bound method which knows how to understand calling descriptor binding.""" | |||||
def infer_call_result(self, caller, context=None): | |||||
if len(caller.args) != 2: | |||||
raise exceptions.InferenceError( | |||||
"Invalid arguments for descriptor binding", | |||||
target=self, context=context) | |||||
context = contextmod.copy_context(context) | |||||
cls = next(caller.args[0].infer(context=context)) | |||||
# Rebuild the original value, but with the parent set as the | |||||
# class where it will be bound. | |||||
new_func = func.__class__(name=func.name, doc=func.doc, | |||||
lineno=func.lineno, col_offset=func.col_offset, | |||||
parent=cls) | |||||
# pylint: disable=no-member | |||||
new_func.postinit(func.args, func.body, | |||||
func.decorators, func.returns) | |||||
# Build a proper bound method that points to our newly built function. | |||||
proxy = bases.UnboundMethod(new_func) | |||||
yield bases.BoundMethod(proxy=proxy, bound=cls) | |||||
return DescriptorBoundMethod(proxy=self._instance, bound=self._instance) | |||||
# These are here just for completion. | |||||
@property | |||||
def py__ne__(self): | |||||
return node_classes.Unknown() | |||||
py__subclasshook__ = py__ne__ | |||||
py__str__ = py__ne__ | |||||
py__sizeof__ = py__ne__ | |||||
py__setattr__ = py__ne__ | |||||
py__repr__ = py__ne__ | |||||
py__reduce__ = py__ne__ | |||||
py__reduce_ex__ = py__ne__ | |||||
py__new__ = py__ne__ | |||||
py__lt__ = py__ne__ | |||||
py__eq__ = py__ne__ | |||||
py__gt__ = py__ne__ | |||||
py__format__ = py__ne__ | |||||
py__delattr__ = py__ne__ | |||||
py__getattribute__ = py__ne__ | |||||
py__hash__ = py__ne__ | |||||
py__init__ = py__ne__ | |||||
py__dir__ = py__ne__ | |||||
py__call__ = py__ne__ | |||||
py__class__ = py__ne__ | |||||
py__closure__ = py__ne__ | |||||
py__code__ = py__ne__ | |||||
if six.PY2: | |||||
pyfunc_name = py__name__ | |||||
pyfunc_doc = py__doc__ | |||||
pyfunc_globals = py__globals__ | |||||
pyfunc_dict = py__dict__ | |||||
pyfunc_defaults = py__defaults__ | |||||
pyfunc_code = py__code__ | |||||
pyfunc_closure = py__closure__ | |||||
class ClassModel(ObjectModel): | |||||
@property | |||||
def py__module__(self): | |||||
return node_classes.Const(self._instance.root().qname()) | |||||
@property | |||||
def py__name__(self): | |||||
return node_classes.Const(self._instance.name) | |||||
@property | |||||
def py__qualname__(self): | |||||
return node_classes.Const(self._instance.qname()) | |||||
@property | |||||
def py__doc__(self): | |||||
return node_classes.Const(self._instance.doc) | |||||
@property | |||||
def py__mro__(self): | |||||
if not self._instance.newstyle: | |||||
raise exceptions.AttributeInferenceError(target=self._instance, | |||||
attribute='__mro__') | |||||
mro = self._instance.mro() | |||||
obj = node_classes.Tuple(parent=self._instance) | |||||
obj.postinit(mro) | |||||
return obj | |||||
@property | |||||
def pymro(self): | |||||
if not self._instance.newstyle: | |||||
raise exceptions.AttributeInferenceError(target=self._instance, | |||||
attribute='mro') | |||||
from astroid import bases | |||||
other_self = self | |||||
# Cls.mro is a method and we need to return one in order to have a proper inference. | |||||
# The method we're returning is capable of inferring the underlying MRO though. | |||||
class MroBoundMethod(bases.BoundMethod): | |||||
def infer_call_result(self, caller, context=None): | |||||
yield other_self.py__mro__ | |||||
implicit_metaclass = self._instance.implicit_metaclass() | |||||
mro_method = implicit_metaclass.locals['mro'][0] | |||||
return MroBoundMethod(proxy=mro_method, bound=implicit_metaclass) | |||||
@property | |||||
def py__bases__(self): | |||||
obj = node_classes.Tuple() | |||||
context = contextmod.InferenceContext() | |||||
elts = list(self._instance._inferred_bases(context)) | |||||
obj.postinit(elts=elts) | |||||
return obj | |||||
@property | |||||
def py__class__(self): | |||||
from astroid import helpers | |||||
return helpers.object_type(self._instance) | |||||
@property | |||||
def py__subclasses__(self): | |||||
"""Get the subclasses of the underlying class | |||||
This looks only in the current module for retrieving the subclasses, | |||||
thus it might miss a couple of them. | |||||
""" | |||||
from astroid import bases | |||||
from astroid import scoped_nodes | |||||
if not self._instance.newstyle: | |||||
raise exceptions.AttributeInferenceError(target=self._instance, | |||||
attribute='__subclasses__') | |||||
qname = self._instance.qname() | |||||
root = self._instance.root() | |||||
classes = [cls for cls in root.nodes_of_class(scoped_nodes.ClassDef) | |||||
if cls != self._instance and cls.is_subtype_of(qname)] | |||||
obj = node_classes.List(parent=self._instance) | |||||
obj.postinit(classes) | |||||
class SubclassesBoundMethod(bases.BoundMethod): | |||||
def infer_call_result(self, caller, context=None): | |||||
yield obj | |||||
implicit_metaclass = self._instance.implicit_metaclass() | |||||
subclasses_method = implicit_metaclass.locals['__subclasses__'][0] | |||||
return SubclassesBoundMethod(proxy=subclasses_method, | |||||
bound=implicit_metaclass) | |||||
@property | |||||
def py__dict__(self): | |||||
return node_classes.Dict(parent=self._instance) | |||||
class SuperModel(ObjectModel): | |||||
@property | |||||
def py__thisclass__(self): | |||||
return self._instance.mro_pointer | |||||
@property | |||||
def py__self_class__(self): | |||||
return self._instance._self_class | |||||
@property | |||||
def py__self__(self): | |||||
return self._instance.type | |||||
@property | |||||
def py__class__(self): | |||||
return self._instance._proxied | |||||
class UnboundMethodModel(ObjectModel): | |||||
@property | |||||
def py__class__(self): | |||||
from astroid import helpers | |||||
return helpers.object_type(self._instance) | |||||
@property | |||||
def py__func__(self): | |||||
return self._instance._proxied | |||||
@property | |||||
def py__self__(self): | |||||
return node_classes.Const(value=None, parent=self._instance) | |||||
pyim_func = py__func__ | |||||
pyim_class = py__class__ | |||||
pyim_self = py__self__ | |||||
class BoundMethodModel(FunctionModel): | |||||
@property | |||||
def py__func__(self): | |||||
return self._instance._proxied._proxied | |||||
@property | |||||
def py__self__(self): | |||||
return self._instance.bound | |||||
class GeneratorModel(FunctionModel): | |||||
def __new__(cls, *args, **kwargs): | |||||
# Append the values from the GeneratorType unto this object. | |||||
ret = super(GeneratorModel, cls).__new__(cls, *args, **kwargs) | |||||
generator = astroid.MANAGER.astroid_cache[six.moves.builtins.__name__]['generator'] | |||||
for name, values in generator.locals.items(): | |||||
method = values[0] | |||||
patched = lambda cls, meth=method: meth | |||||
setattr(type(ret), 'py' + name, property(patched)) | |||||
return ret | |||||
@property | |||||
def py__name__(self): | |||||
return node_classes.Const(value=self._instance.parent.name, | |||||
parent=self._instance) | |||||
@property | |||||
def py__doc__(self): | |||||
return node_classes.Const(value=self._instance.parent.doc, | |||||
parent=self._instance) | |||||
class InstanceModel(ObjectModel): | |||||
@property | |||||
def py__class__(self): | |||||
return self._instance._proxied | |||||
@property | |||||
def py__module__(self): | |||||
return node_classes.Const(self._instance.root().qname()) | |||||
@property | |||||
def py__doc__(self): | |||||
return node_classes.Const(self._instance.doc) | |||||
@property | |||||
def py__dict__(self): | |||||
return _dunder_dict(self._instance, self._instance.instance_attrs) | |||||
class ExceptionInstanceModel(InstanceModel): | |||||
@property | |||||
def pyargs(self): | |||||
message = node_classes.Const('') | |||||
args = node_classes.Tuple(parent=self._instance) | |||||
args.postinit((message, )) | |||||
return args | |||||
if six.PY3: | |||||
# It's available only on Python 3. | |||||
@property | |||||
def py__traceback__(self): | |||||
builtins = astroid.MANAGER.astroid_cache[six.moves.builtins.__name__] | |||||
traceback_type = builtins[types.TracebackType.__name__] | |||||
return traceback_type.instantiate_class() | |||||
if six.PY2: | |||||
# It's available only on Python 2. | |||||
@property | |||||
def pymessage(self): | |||||
return node_classes.Const('') | |||||
class DictModel(ObjectModel): | |||||
@property | |||||
def py__class__(self): | |||||
return self._instance._proxied | |||||
def _generic_dict_attribute(self, obj, name): | |||||
"""Generate a bound method that can infer the given *obj*.""" | |||||
class DictMethodBoundMethod(astroid.BoundMethod): | |||||
def infer_call_result(self, caller, context=None): | |||||
yield obj | |||||
meth = next(self._instance._proxied.igetattr(name)) | |||||
return DictMethodBoundMethod(proxy=meth, bound=self._instance) | |||||
@property | |||||
def pyitems(self): | |||||
elems = [] | |||||
obj = node_classes.List(parent=self._instance) | |||||
for key, value in self._instance.items: | |||||
elem = node_classes.Tuple(parent=obj) | |||||
elem.postinit((key, value)) | |||||
elems.append(elem) | |||||
obj.postinit(elts=elems) | |||||
if six.PY3: | |||||
from astroid import objects | |||||
obj = objects.DictItems(obj) | |||||
return self._generic_dict_attribute(obj, 'items') | |||||
@property | |||||
def pykeys(self): | |||||
keys = [key for (key, _) in self._instance.items] | |||||
obj = node_classes.List(parent=self._instance) | |||||
obj.postinit(elts=keys) | |||||
if six.PY3: | |||||
from astroid import objects | |||||
obj = objects.DictKeys(obj) | |||||
return self._generic_dict_attribute(obj, 'keys') | |||||
@property | |||||
def pyvalues(self): | |||||
values = [value for (_, value) in self._instance.items] | |||||
obj = node_classes.List(parent=self._instance) | |||||
obj.postinit(values) | |||||
if six.PY3: | |||||
from astroid import objects | |||||
obj = objects.DictValues(obj) | |||||
return self._generic_dict_attribute(obj, 'values') |
# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr> | |||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Copyright (c) 2014 Google, Inc. | |||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""astroid manager: avoid multiple astroid build of a same module when | |||||
possible by providing a class responsible to get astroid representation | |||||
from various source and using a cache of built modules) | |||||
""" | |||||
import os | |||||
import sys | |||||
import zipimport | |||||
import six | |||||
from astroid import exceptions | |||||
from astroid.interpreter._import import spec | |||||
from astroid import modutils | |||||
from astroid import transforms | |||||
from astroid import util | |||||
def safe_repr(obj): | |||||
try: | |||||
return repr(obj) | |||||
except Exception: # pylint: disable=broad-except | |||||
return '???' | |||||
class AstroidManager(object): | |||||
"""the astroid manager, responsible to build astroid from files | |||||
or modules. | |||||
Use the Borg pattern. | |||||
""" | |||||
name = 'astroid loader' | |||||
brain = {} | |||||
def __init__(self): | |||||
self.__dict__ = AstroidManager.brain | |||||
if not self.__dict__: | |||||
# NOTE: cache entries are added by the [re]builder | |||||
self.astroid_cache = {} | |||||
self._mod_file_cache = {} | |||||
self._failed_import_hooks = [] | |||||
self.always_load_extensions = False | |||||
self.optimize_ast = False | |||||
self.extension_package_whitelist = set() | |||||
self._transform = transforms.TransformVisitor() | |||||
# Export these APIs for convenience | |||||
self.register_transform = self._transform.register_transform | |||||
self.unregister_transform = self._transform.unregister_transform | |||||
def visit_transforms(self, node): | |||||
"""Visit the transforms and apply them to the given *node*.""" | |||||
return self._transform.visit(node) | |||||
def ast_from_file(self, filepath, modname=None, fallback=True, source=False): | |||||
"""given a module name, return the astroid object""" | |||||
try: | |||||
filepath = modutils.get_source_file(filepath, include_no_ext=True) | |||||
source = True | |||||
except modutils.NoSourceFile: | |||||
pass | |||||
if modname is None: | |||||
try: | |||||
modname = '.'.join(modutils.modpath_from_file(filepath)) | |||||
except ImportError: | |||||
modname = filepath | |||||
if modname in self.astroid_cache and self.astroid_cache[modname].file == filepath: | |||||
return self.astroid_cache[modname] | |||||
if source: | |||||
from astroid.builder import AstroidBuilder | |||||
return AstroidBuilder(self).file_build(filepath, modname) | |||||
elif fallback and modname: | |||||
return self.ast_from_module_name(modname) | |||||
raise exceptions.AstroidBuildingError( | |||||
'Unable to build an AST for {path}.', path=filepath) | |||||
def _build_stub_module(self, modname): | |||||
from astroid.builder import AstroidBuilder | |||||
return AstroidBuilder(self).string_build('', modname) | |||||
def _build_namespace_module(self, modname, path): | |||||
from astroid.builder import build_namespace_package_module | |||||
return build_namespace_package_module(modname, path) | |||||
def _can_load_extension(self, modname): | |||||
if self.always_load_extensions: | |||||
return True | |||||
if modutils.is_standard_module(modname): | |||||
return True | |||||
parts = modname.split('.') | |||||
return any( | |||||
'.'.join(parts[:x]) in self.extension_package_whitelist | |||||
for x in range(1, len(parts) + 1)) | |||||
def ast_from_module_name(self, modname, context_file=None): | |||||
"""given a module name, return the astroid object""" | |||||
if modname in self.astroid_cache: | |||||
return self.astroid_cache[modname] | |||||
if modname == '__main__': | |||||
return self._build_stub_module(modname) | |||||
old_cwd = os.getcwd() | |||||
if context_file: | |||||
os.chdir(os.path.dirname(context_file)) | |||||
try: | |||||
found_spec = self.file_from_module_name(modname, context_file) | |||||
# pylint: disable=no-member | |||||
if found_spec.type == spec.ModuleType.PY_ZIPMODULE: | |||||
# pylint: disable=no-member | |||||
module = self.zip_import_data(found_spec.location) | |||||
if module is not None: | |||||
return module | |||||
elif found_spec.type in (spec.ModuleType.C_BUILTIN, | |||||
spec.ModuleType.C_EXTENSION): | |||||
# pylint: disable=no-member | |||||
if (found_spec.type == spec.ModuleType.C_EXTENSION | |||||
and not self._can_load_extension(modname)): | |||||
return self._build_stub_module(modname) | |||||
try: | |||||
module = modutils.load_module_from_name(modname) | |||||
except Exception as ex: # pylint: disable=broad-except | |||||
util.reraise(exceptions.AstroidImportError( | |||||
'Loading {modname} failed with:\n{error}', | |||||
modname=modname, path=found_spec.location, error=ex)) | |||||
return self.ast_from_module(module, modname) | |||||
elif found_spec.type == spec.ModuleType.PY_COMPILED: | |||||
raise exceptions.AstroidImportError( | |||||
"Unable to load compiled module {modname}.", | |||||
# pylint: disable=no-member | |||||
modname=modname, path=found_spec.location) | |||||
elif found_spec.type == spec.ModuleType.PY_NAMESPACE: | |||||
return self._build_namespace_module(modname, | |||||
# pylint: disable=no-member | |||||
found_spec.submodule_search_locations) | |||||
# pylint: disable=no-member | |||||
if found_spec.location is None: | |||||
raise exceptions.AstroidImportError( | |||||
"Can't find a file for module {modname}.", | |||||
modname=modname) | |||||
# pylint: disable=no-member | |||||
return self.ast_from_file(found_spec.location, modname, fallback=False) | |||||
except exceptions.AstroidBuildingError as e: | |||||
for hook in self._failed_import_hooks: | |||||
try: | |||||
return hook(modname) | |||||
except exceptions.AstroidBuildingError: | |||||
pass | |||||
raise e | |||||
finally: | |||||
os.chdir(old_cwd) | |||||
def zip_import_data(self, filepath): | |||||
if zipimport is None: | |||||
return None | |||||
from astroid.builder import AstroidBuilder | |||||
builder = AstroidBuilder(self) | |||||
for ext in ('.zip', '.egg'): | |||||
try: | |||||
eggpath, resource = filepath.rsplit(ext + os.path.sep, 1) | |||||
except ValueError: | |||||
continue | |||||
try: | |||||
importer = zipimport.zipimporter(eggpath + ext) | |||||
zmodname = resource.replace(os.path.sep, '.') | |||||
if importer.is_package(resource): | |||||
zmodname = zmodname + '.__init__' | |||||
module = builder.string_build(importer.get_source(resource), | |||||
zmodname, filepath) | |||||
return module | |||||
except Exception: # pylint: disable=broad-except | |||||
continue | |||||
return None | |||||
def file_from_module_name(self, modname, contextfile): | |||||
try: | |||||
value = self._mod_file_cache[(modname, contextfile)] | |||||
traceback = sys.exc_info()[2] | |||||
except KeyError: | |||||
try: | |||||
value = modutils.file_info_from_modpath( | |||||
modname.split('.'), context_file=contextfile) | |||||
traceback = sys.exc_info()[2] | |||||
except ImportError as ex: | |||||
value = exceptions.AstroidImportError( | |||||
'Failed to import module {modname} with error:\n{error}.', | |||||
modname=modname, error=ex) | |||||
traceback = sys.exc_info()[2] | |||||
self._mod_file_cache[(modname, contextfile)] = value | |||||
if isinstance(value, exceptions.AstroidBuildingError): | |||||
six.reraise(exceptions.AstroidBuildingError, | |||||
value, traceback) | |||||
return value | |||||
def ast_from_module(self, module, modname=None): | |||||
"""given an imported module, return the astroid object""" | |||||
modname = modname or module.__name__ | |||||
if modname in self.astroid_cache: | |||||
return self.astroid_cache[modname] | |||||
try: | |||||
# some builtin modules don't have __file__ attribute | |||||
filepath = module.__file__ | |||||
if modutils.is_python_source(filepath): | |||||
return self.ast_from_file(filepath, modname) | |||||
except AttributeError: | |||||
pass | |||||
from astroid.builder import AstroidBuilder | |||||
return AstroidBuilder(self).module_build(module, modname) | |||||
def ast_from_class(self, klass, modname=None): | |||||
"""get astroid for the given class""" | |||||
if modname is None: | |||||
try: | |||||
modname = klass.__module__ | |||||
except AttributeError: | |||||
util.reraise(exceptions.AstroidBuildingError( | |||||
'Unable to get module for class {class_name}.', | |||||
cls=klass, class_repr=safe_repr(klass), modname=modname)) | |||||
modastroid = self.ast_from_module_name(modname) | |||||
return modastroid.getattr(klass.__name__)[0] # XXX | |||||
def infer_ast_from_something(self, obj, context=None): | |||||
"""infer astroid for the given class""" | |||||
if hasattr(obj, '__class__') and not isinstance(obj, type): | |||||
klass = obj.__class__ | |||||
else: | |||||
klass = obj | |||||
try: | |||||
modname = klass.__module__ | |||||
except AttributeError: | |||||
util.reraise(exceptions.AstroidBuildingError( | |||||
'Unable to get module for {class_repr}.', | |||||
cls=klass, class_repr=safe_repr(klass))) | |||||
except Exception as ex: # pylint: disable=broad-except | |||||
util.reraise(exceptions.AstroidImportError( | |||||
'Unexpected error while retrieving module for {class_repr}:\n' | |||||
'{error}', cls=klass, class_repr=safe_repr(klass), error=ex)) | |||||
try: | |||||
name = klass.__name__ | |||||
except AttributeError: | |||||
util.reraise(exceptions.AstroidBuildingError( | |||||
'Unable to get name for {class_repr}:\n', | |||||
cls=klass, class_repr=safe_repr(klass))) | |||||
except Exception as ex: # pylint: disable=broad-except | |||||
util.reraise(exceptions.AstroidImportError( | |||||
'Unexpected error while retrieving name for {class_repr}:\n' | |||||
'{error}', cls=klass, class_repr=safe_repr(klass), error=ex)) | |||||
# take care, on living object __module__ is regularly wrong :( | |||||
modastroid = self.ast_from_module_name(modname) | |||||
if klass is obj: | |||||
for inferred in modastroid.igetattr(name, context): | |||||
yield inferred | |||||
else: | |||||
for inferred in modastroid.igetattr(name, context): | |||||
yield inferred.instantiate_class() | |||||
def register_failed_import_hook(self, hook): | |||||
"""Registers a hook to resolve imports that cannot be found otherwise. | |||||
`hook` must be a function that accepts a single argument `modname` which | |||||
contains the name of the module or package that could not be imported. | |||||
If `hook` can resolve the import, must return a node of type `astroid.Module`, | |||||
otherwise, it must raise `AstroidBuildingError`. | |||||
""" | |||||
self._failed_import_hooks.append(hook) | |||||
def cache_module(self, module): | |||||
"""Cache a module if no module with the same name is known yet.""" | |||||
self.astroid_cache.setdefault(module.name, module) | |||||
def clear_cache(self, astroid_builtin=None): | |||||
# XXX clear transforms | |||||
self.astroid_cache.clear() | |||||
# force bootstrap again, else we may ends up with cache inconsistency | |||||
# between the manager and CONST_PROXY, making | |||||
# unittest_lookup.LookupTC.test_builtin_lookup fail depending on the | |||||
# test order | |||||
import astroid.raw_building | |||||
astroid.raw_building._astroid_bootstrapping( | |||||
astroid_builtin=astroid_builtin) |
# Copyright (c) 2010-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr> | |||||
# Copyright (c) 2014 Google, Inc. | |||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""This module contains some mixins for the different nodes. | |||||
""" | |||||
import warnings | |||||
from astroid import decorators | |||||
from astroid import exceptions | |||||
class BlockRangeMixIn(object): | |||||
"""override block range """ | |||||
@decorators.cachedproperty | |||||
def blockstart_tolineno(self): | |||||
return self.lineno | |||||
def _elsed_block_range(self, lineno, orelse, last=None): | |||||
"""handle block line numbers range for try/finally, for, if and while | |||||
statements | |||||
""" | |||||
if lineno == self.fromlineno: | |||||
return lineno, lineno | |||||
if orelse: | |||||
if lineno >= orelse[0].fromlineno: | |||||
return lineno, orelse[-1].tolineno | |||||
return lineno, orelse[0].fromlineno - 1 | |||||
return lineno, last or self.tolineno | |||||
class FilterStmtsMixin(object): | |||||
"""Mixin for statement filtering and assignment type""" | |||||
def _get_filtered_stmts(self, _, node, _stmts, mystmt): | |||||
"""method used in _filter_stmts to get statements and trigger break""" | |||||
if self.statement() is mystmt: | |||||
# original node's statement is the assignment, only keep | |||||
# current node (gen exp, list comp) | |||||
return [node], True | |||||
return _stmts, False | |||||
def assign_type(self): | |||||
return self | |||||
def ass_type(self): | |||||
warnings.warn('%s.ass_type() is deprecated and slated for removal ' | |||||
'in astroid 2.0, use %s.assign_type() instead.' | |||||
% (type(self).__name__, type(self).__name__), | |||||
PendingDeprecationWarning, stacklevel=2) | |||||
return self.assign_type() | |||||
class AssignTypeMixin(object): | |||||
def assign_type(self): | |||||
return self | |||||
def ass_type(self): | |||||
warnings.warn('%s.ass_type() is deprecated and slated for removal ' | |||||
'in astroid 2.0, use %s.assign_type() instead.' | |||||
% (type(self).__name__, type(self).__name__), | |||||
PendingDeprecationWarning, stacklevel=2) | |||||
return self.assign_type() | |||||
def _get_filtered_stmts(self, lookup_node, node, _stmts, mystmt): | |||||
"""method used in filter_stmts""" | |||||
if self is mystmt: | |||||
return _stmts, True | |||||
if self.statement() is mystmt: | |||||
# original node's statement is the assignment, only keep | |||||
# current node (gen exp, list comp) | |||||
return [node], True | |||||
return _stmts, False | |||||
class ParentAssignTypeMixin(AssignTypeMixin): | |||||
def assign_type(self): | |||||
return self.parent.assign_type() | |||||
def ass_type(self): | |||||
warnings.warn('%s.ass_type() is deprecated and slated for removal ' | |||||
'in astroid 2.0, use %s.assign_type() instead.' | |||||
% (type(self).__name__, type(self).__name__), | |||||
PendingDeprecationWarning, stacklevel=2) | |||||
return self.assign_type() | |||||
class ImportFromMixin(FilterStmtsMixin): | |||||
"""MixIn for From and Import Nodes""" | |||||
def _infer_name(self, frame, name): | |||||
return name | |||||
def do_import_module(self, modname=None): | |||||
"""return the ast for a module whose name is <modname> imported by <self> | |||||
""" | |||||
# handle special case where we are on a package node importing a module | |||||
# using the same name as the package, which may end in an infinite loop | |||||
# on relative imports | |||||
# XXX: no more needed ? | |||||
mymodule = self.root() | |||||
level = getattr(self, 'level', None) # Import as no level | |||||
if modname is None: | |||||
modname = self.modname | |||||
# XXX we should investigate deeper if we really want to check | |||||
# importing itself: modname and mymodule.name be relative or absolute | |||||
if mymodule.relative_to_absolute_name(modname, level) == mymodule.name: | |||||
# FIXME: we used to raise InferenceError here, but why ? | |||||
return mymodule | |||||
return mymodule.import_module(modname, level=level, | |||||
relative_only=level and level >= 1) | |||||
def real_name(self, asname): | |||||
"""get name from 'as' name""" | |||||
for name, _asname in self.names: | |||||
if name == '*': | |||||
return asname | |||||
if not _asname: | |||||
name = name.split('.', 1)[0] | |||||
_asname = name | |||||
if asname == _asname: | |||||
return name | |||||
raise exceptions.AttributeInferenceError( | |||||
'Could not find original name for {attribute} in {target!r}', | |||||
target=self, attribute=asname) |
# -*- coding: utf-8 -*- | |||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Copyright (c) 2014 Google, Inc. | |||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org> | |||||
# Copyright (c) 2015 Radosław Ganczarek <radoslaw@ganczarek.in> | |||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Python modules manipulation utility functions. | |||||
:type PY_SOURCE_EXTS: tuple(str) | |||||
:var PY_SOURCE_EXTS: list of possible python source file extension | |||||
:type STD_LIB_DIRS: set of str | |||||
:var STD_LIB_DIRS: directories where standard modules are located | |||||
:type BUILTIN_MODULES: dict | |||||
:var BUILTIN_MODULES: dictionary with builtin module names has key | |||||
""" | |||||
import imp | |||||
import os | |||||
import platform | |||||
import sys | |||||
from distutils.sysconfig import get_python_lib # pylint: disable=import-error | |||||
# pylint: disable=import-error, no-name-in-module | |||||
from distutils.errors import DistutilsPlatformError | |||||
# distutils is replaced by virtualenv with a module that does | |||||
# weird path manipulations in order to get to the | |||||
# real distutils module. | |||||
import six | |||||
from .interpreter._import import spec | |||||
from .interpreter._import import util | |||||
if sys.platform.startswith('win'): | |||||
PY_SOURCE_EXTS = ('py', 'pyw') | |||||
PY_COMPILED_EXTS = ('dll', 'pyd') | |||||
else: | |||||
PY_SOURCE_EXTS = ('py',) | |||||
PY_COMPILED_EXTS = ('so',) | |||||
try: | |||||
# The explicit sys.prefix is to work around a patch in virtualenv that | |||||
# replaces the 'real' sys.prefix (i.e. the location of the binary) | |||||
# with the prefix from which the virtualenv was created. This throws | |||||
# off the detection logic for standard library modules, thus the | |||||
# workaround. | |||||
STD_LIB_DIRS = set([ | |||||
get_python_lib(standard_lib=True, prefix=sys.prefix), | |||||
# Take care of installations where exec_prefix != prefix. | |||||
get_python_lib(standard_lib=True, prefix=sys.exec_prefix), | |||||
get_python_lib(standard_lib=True)]) | |||||
# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to | |||||
# non-valid path, see https://bugs.pypy.org/issue1164 | |||||
except DistutilsPlatformError: | |||||
STD_LIB_DIRS = set() | |||||
if os.name == 'nt': | |||||
STD_LIB_DIRS.add(os.path.join(sys.prefix, 'dlls')) | |||||
try: | |||||
# real_prefix is defined when running inside virtual environments, | |||||
# created with the **virtualenv** library. | |||||
STD_LIB_DIRS.add(os.path.join(sys.real_prefix, 'dlls')) | |||||
except AttributeError: | |||||
# sys.base_exec_prefix is always defined, but in a virtual environment | |||||
# created with the stdlib **venv** module, it points to the original | |||||
# installation, if the virtual env is activated. | |||||
try: | |||||
STD_LIB_DIRS.add(os.path.join(sys.base_exec_prefix, 'dlls')) | |||||
except AttributeError: | |||||
pass | |||||
if platform.python_implementation() == 'PyPy': | |||||
_root = os.path.join(sys.prefix, 'lib_pypy') | |||||
STD_LIB_DIRS.add(_root) | |||||
try: | |||||
# real_prefix is defined when running inside virtualenv. | |||||
STD_LIB_DIRS.add(os.path.join(sys.real_prefix, 'lib_pypy')) | |||||
except AttributeError: | |||||
pass | |||||
del _root | |||||
if os.name == 'posix': | |||||
# Need the real prefix is we're under a virtualenv, otherwise | |||||
# the usual one will do. | |||||
try: | |||||
prefix = sys.real_prefix | |||||
except AttributeError: | |||||
prefix = sys.prefix | |||||
def _posix_path(path): | |||||
base_python = 'python%d.%d' % sys.version_info[:2] | |||||
return os.path.join(prefix, path, base_python) | |||||
STD_LIB_DIRS.add(_posix_path('lib')) | |||||
if sys.maxsize > 2**32: | |||||
# This tries to fix a problem with /usr/lib64 builds, | |||||
# where systems are running both 32-bit and 64-bit code | |||||
# on the same machine, which reflects into the places where | |||||
# standard library could be found. More details can be found | |||||
# here http://bugs.python.org/issue1294959. | |||||
# An easy reproducing case would be | |||||
# https://github.com/PyCQA/pylint/issues/712#issuecomment-163178753 | |||||
STD_LIB_DIRS.add(_posix_path('lib64')) | |||||
EXT_LIB_DIR = get_python_lib() | |||||
IS_JYTHON = platform.python_implementation() == 'Jython' | |||||
BUILTIN_MODULES = dict.fromkeys(sys.builtin_module_names, True) | |||||
class NoSourceFile(Exception): | |||||
"""exception raised when we are not able to get a python | |||||
source file for a precompiled file | |||||
""" | |||||
def _normalize_path(path): | |||||
return os.path.normcase(os.path.abspath(path)) | |||||
def _canonicalize_path(path): | |||||
return os.path.realpath(os.path.expanduser(path)) | |||||
def _path_from_filename(filename, is_jython=IS_JYTHON): | |||||
if not is_jython: | |||||
if sys.version_info > (3, 0): | |||||
return filename | |||||
else: | |||||
if filename.endswith(".pyc"): | |||||
return filename[:-1] | |||||
return filename | |||||
head, has_pyclass, _ = filename.partition("$py.class") | |||||
if has_pyclass: | |||||
return head + ".py" | |||||
return filename | |||||
def _handle_blacklist(blacklist, dirnames, filenames): | |||||
"""remove files/directories in the black list | |||||
dirnames/filenames are usually from os.walk | |||||
""" | |||||
for norecurs in blacklist: | |||||
if norecurs in dirnames: | |||||
dirnames.remove(norecurs) | |||||
elif norecurs in filenames: | |||||
filenames.remove(norecurs) | |||||
_NORM_PATH_CACHE = {} | |||||
def _cache_normalize_path(path): | |||||
"""abspath with caching""" | |||||
# _module_file calls abspath on every path in sys.path every time it's | |||||
# called; on a larger codebase this easily adds up to half a second just | |||||
# assembling path components. This cache alleviates that. | |||||
try: | |||||
return _NORM_PATH_CACHE[path] | |||||
except KeyError: | |||||
if not path: # don't cache result for '' | |||||
return _normalize_path(path) | |||||
result = _NORM_PATH_CACHE[path] = _normalize_path(path) | |||||
return result | |||||
def load_module_from_name(dotted_name, path=None, use_sys=True): | |||||
"""Load a Python module from its name. | |||||
:type dotted_name: str | |||||
:param dotted_name: python name of a module or package | |||||
:type path: list or None | |||||
:param path: | |||||
optional list of path where the module or package should be | |||||
searched (use sys.path if nothing or None is given) | |||||
:type use_sys: bool | |||||
:param use_sys: | |||||
boolean indicating whether the sys.modules dictionary should be | |||||
used or not | |||||
:raise ImportError: if the module or package is not found | |||||
:rtype: module | |||||
:return: the loaded module | |||||
""" | |||||
return load_module_from_modpath(dotted_name.split('.'), path, use_sys) | |||||
def load_module_from_modpath(parts, path=None, use_sys=1): | |||||
"""Load a python module from its split name. | |||||
:type parts: list(str) or tuple(str) | |||||
:param parts: | |||||
python name of a module or package split on '.' | |||||
:type path: list or None | |||||
:param path: | |||||
optional list of path where the module or package should be | |||||
searched (use sys.path if nothing or None is given) | |||||
:type use_sys: bool | |||||
:param use_sys: | |||||
boolean indicating whether the sys.modules dictionary should be used or not | |||||
:raise ImportError: if the module or package is not found | |||||
:rtype: module | |||||
:return: the loaded module | |||||
""" | |||||
if use_sys: | |||||
try: | |||||
return sys.modules['.'.join(parts)] | |||||
except KeyError: | |||||
pass | |||||
modpath = [] | |||||
prevmodule = None | |||||
for part in parts: | |||||
modpath.append(part) | |||||
curname = '.'.join(modpath) | |||||
module = None | |||||
if len(modpath) != len(parts): | |||||
# even with use_sys=False, should try to get outer packages from sys.modules | |||||
module = sys.modules.get(curname) | |||||
elif use_sys: | |||||
# because it may have been indirectly loaded through a parent | |||||
module = sys.modules.get(curname) | |||||
if module is None: | |||||
mp_file, mp_filename, mp_desc = imp.find_module(part, path) | |||||
module = imp.load_module(curname, mp_file, mp_filename, mp_desc) | |||||
# mp_file still needs to be closed. | |||||
if mp_file: | |||||
mp_file.close() | |||||
if prevmodule: | |||||
setattr(prevmodule, part, module) | |||||
_file = getattr(module, '__file__', '') | |||||
prevmodule = module | |||||
if not _file and util.is_namespace(curname): | |||||
continue | |||||
if not _file and len(modpath) != len(parts): | |||||
raise ImportError('no module in %s' % '.'.join(parts[len(modpath):])) | |||||
path = [os.path.dirname(_file)] | |||||
return module | |||||
def load_module_from_file(filepath, path=None, use_sys=True, extrapath=None): | |||||
"""Load a Python module from it's path. | |||||
:type filepath: str | |||||
:param filepath: path to the python module or package | |||||
:type path: list or None | |||||
:param path: | |||||
optional list of path where the module or package should be | |||||
searched (use sys.path if nothing or None is given) | |||||
:type use_sys: bool | |||||
:param use_sys: | |||||
boolean indicating whether the sys.modules dictionary should be | |||||
used or not | |||||
:raise ImportError: if the module or package is not found | |||||
:rtype: module | |||||
:return: the loaded module | |||||
""" | |||||
modpath = modpath_from_file(filepath, extrapath) | |||||
return load_module_from_modpath(modpath, path, use_sys) | |||||
def check_modpath_has_init(path, mod_path): | |||||
"""check there are some __init__.py all along the way""" | |||||
modpath = [] | |||||
for part in mod_path: | |||||
modpath.append(part) | |||||
path = os.path.join(path, part) | |||||
if not _has_init(path): | |||||
old_namespace = util.is_namespace('.'.join(modpath)) | |||||
if not old_namespace: | |||||
return False | |||||
return True | |||||
def modpath_from_file_with_callback(filename, extrapath=None, is_package_cb=None): | |||||
filename = _path_from_filename(filename) | |||||
filename = os.path.realpath(os.path.expanduser(filename)) | |||||
base = os.path.splitext(filename)[0] | |||||
if extrapath is not None: | |||||
for path_ in six.moves.map(_canonicalize_path, extrapath): | |||||
path = os.path.abspath(path_) | |||||
if path and os.path.normcase(base[:len(path)]) == os.path.normcase(path): | |||||
submodpath = [pkg for pkg in base[len(path):].split(os.sep) | |||||
if pkg] | |||||
if is_package_cb(path, submodpath[:-1]): | |||||
return extrapath[path_].split('.') + submodpath | |||||
for path in six.moves.map(_canonicalize_path, sys.path): | |||||
path = _cache_normalize_path(path) | |||||
if path and os.path.normcase(base).startswith(path): | |||||
modpath = [pkg for pkg in base[len(path):].split(os.sep) if pkg] | |||||
if is_package_cb(path, modpath[:-1]): | |||||
return modpath | |||||
raise ImportError('Unable to find module for %s in %s' % ( | |||||
filename, ', \n'.join(sys.path))) | |||||
def modpath_from_file(filename, extrapath=None): | |||||
"""given a file path return the corresponding split module's name | |||||
(i.e name of a module or package split on '.') | |||||
:type filename: str | |||||
:param filename: file's path for which we want the module's name | |||||
:type extrapath: dict | |||||
:param extrapath: | |||||
optional extra search path, with path as key and package name for the path | |||||
as value. This is usually useful to handle package split in multiple | |||||
directories using __path__ trick. | |||||
:raise ImportError: | |||||
if the corresponding module's name has not been found | |||||
:rtype: list(str) | |||||
:return: the corresponding split module's name | |||||
""" | |||||
return modpath_from_file_with_callback(filename, extrapath, check_modpath_has_init) | |||||
def file_from_modpath(modpath, path=None, context_file=None): | |||||
return file_info_from_modpath(modpath, path, context_file).location | |||||
def file_info_from_modpath(modpath, path=None, context_file=None): | |||||
"""given a mod path (i.e. split module / package name), return the | |||||
corresponding file, giving priority to source file over precompiled | |||||
file if it exists | |||||
:type modpath: list or tuple | |||||
:param modpath: | |||||
split module's name (i.e name of a module or package split | |||||
on '.') | |||||
(this means explicit relative imports that start with dots have | |||||
empty strings in this list!) | |||||
:type path: list or None | |||||
:param path: | |||||
optional list of path where the module or package should be | |||||
searched (use sys.path if nothing or None is given) | |||||
:type context_file: str or None | |||||
:param context_file: | |||||
context file to consider, necessary if the identifier has been | |||||
introduced using a relative import unresolvable in the actual | |||||
context (i.e. modutils) | |||||
:raise ImportError: if there is no such module in the directory | |||||
:rtype: (str or None, import type) | |||||
:return: | |||||
the path to the module's file or None if it's an integrated | |||||
builtin module such as 'sys' | |||||
""" | |||||
if context_file is not None: | |||||
context = os.path.dirname(context_file) | |||||
else: | |||||
context = context_file | |||||
if modpath[0] == 'xml': | |||||
# handle _xmlplus | |||||
try: | |||||
return _spec_from_modpath(['_xmlplus'] + modpath[1:], path, context) | |||||
except ImportError: | |||||
return _spec_from_modpath(modpath, path, context) | |||||
elif modpath == ['os', 'path']: | |||||
# FIXME: currently ignoring search_path... | |||||
return spec.ModuleSpec(name='os.path', location=os.path.__file__, module_type=imp.PY_SOURCE) | |||||
return _spec_from_modpath(modpath, path, context) | |||||
def get_module_part(dotted_name, context_file=None): | |||||
"""given a dotted name return the module part of the name : | |||||
>>> get_module_part('astroid.as_string.dump') | |||||
'astroid.as_string' | |||||
:type dotted_name: str | |||||
:param dotted_name: full name of the identifier we are interested in | |||||
:type context_file: str or None | |||||
:param context_file: | |||||
context file to consider, necessary if the identifier has been | |||||
introduced using a relative import unresolvable in the actual | |||||
context (i.e. modutils) | |||||
:raise ImportError: if there is no such module in the directory | |||||
:rtype: str or None | |||||
:return: | |||||
the module part of the name or None if we have not been able at | |||||
all to import the given name | |||||
XXX: deprecated, since it doesn't handle package precedence over module | |||||
(see #10066) | |||||
""" | |||||
# os.path trick | |||||
if dotted_name.startswith('os.path'): | |||||
return 'os.path' | |||||
parts = dotted_name.split('.') | |||||
if context_file is not None: | |||||
# first check for builtin module which won't be considered latter | |||||
# in that case (path != None) | |||||
if parts[0] in BUILTIN_MODULES: | |||||
if len(parts) > 2: | |||||
raise ImportError(dotted_name) | |||||
return parts[0] | |||||
# don't use += or insert, we want a new list to be created ! | |||||
path = None | |||||
starti = 0 | |||||
if parts[0] == '': | |||||
assert context_file is not None, \ | |||||
'explicit relative import, but no context_file?' | |||||
path = [] # prevent resolving the import non-relatively | |||||
starti = 1 | |||||
while parts[starti] == '': # for all further dots: change context | |||||
starti += 1 | |||||
context_file = os.path.dirname(context_file) | |||||
for i in range(starti, len(parts)): | |||||
try: | |||||
file_from_modpath(parts[starti:i+1], path=path, | |||||
context_file=context_file) | |||||
except ImportError: | |||||
if i < max(1, len(parts) - 2): | |||||
raise | |||||
return '.'.join(parts[:i]) | |||||
return dotted_name | |||||
def get_module_files(src_directory, blacklist, list_all=False): | |||||
"""given a package directory return a list of all available python | |||||
module's files in the package and its subpackages | |||||
:type src_directory: str | |||||
:param src_directory: | |||||
path of the directory corresponding to the package | |||||
:type blacklist: list or tuple | |||||
:param blacklist: iterable | |||||
list of files or directories to ignore. | |||||
:type list_all: bool | |||||
:param list_all: | |||||
get files from all paths, including ones without __init__.py | |||||
:rtype: list | |||||
:return: | |||||
the list of all available python module's files in the package and | |||||
its subpackages | |||||
""" | |||||
files = [] | |||||
for directory, dirnames, filenames in os.walk(src_directory): | |||||
if directory in blacklist: | |||||
continue | |||||
_handle_blacklist(blacklist, dirnames, filenames) | |||||
# check for __init__.py | |||||
if not list_all and '__init__.py' not in filenames: | |||||
dirnames[:] = () | |||||
continue | |||||
for filename in filenames: | |||||
if _is_python_file(filename): | |||||
src = os.path.join(directory, filename) | |||||
files.append(src) | |||||
return files | |||||
def get_source_file(filename, include_no_ext=False): | |||||
"""given a python module's file name return the matching source file | |||||
name (the filename will be returned identically if it's a already an | |||||
absolute path to a python source file...) | |||||
:type filename: str | |||||
:param filename: python module's file name | |||||
:raise NoSourceFile: if no source file exists on the file system | |||||
:rtype: str | |||||
:return: the absolute path of the source file if it exists | |||||
""" | |||||
filename = os.path.abspath(_path_from_filename(filename)) | |||||
base, orig_ext = os.path.splitext(filename) | |||||
for ext in PY_SOURCE_EXTS: | |||||
source_path = '%s.%s' % (base, ext) | |||||
if os.path.exists(source_path): | |||||
return source_path | |||||
if include_no_ext and not orig_ext and os.path.exists(base): | |||||
return base | |||||
raise NoSourceFile(filename) | |||||
def is_python_source(filename): | |||||
""" | |||||
rtype: bool | |||||
return: True if the filename is a python source file | |||||
""" | |||||
return os.path.splitext(filename)[1][1:] in PY_SOURCE_EXTS | |||||
def is_standard_module(modname, std_path=None): | |||||
"""try to guess if a module is a standard python module (by default, | |||||
see `std_path` parameter's description) | |||||
:type modname: str | |||||
:param modname: name of the module we are interested in | |||||
:type std_path: list(str) or tuple(str) | |||||
:param std_path: list of path considered has standard | |||||
:rtype: bool | |||||
:return: | |||||
true if the module: | |||||
- is located on the path listed in one of the directory in `std_path` | |||||
- is a built-in module | |||||
""" | |||||
modname = modname.split('.')[0] | |||||
try: | |||||
filename = file_from_modpath([modname]) | |||||
except ImportError: | |||||
# import failed, i'm probably not so wrong by supposing it's | |||||
# not standard... | |||||
return False | |||||
# modules which are not living in a file are considered standard | |||||
# (sys and __builtin__ for instance) | |||||
if filename is None: | |||||
# we assume there are no namespaces in stdlib | |||||
return not util.is_namespace(modname) | |||||
filename = _normalize_path(filename) | |||||
if filename.startswith(_cache_normalize_path(EXT_LIB_DIR)): | |||||
return False | |||||
if std_path is None: | |||||
std_path = STD_LIB_DIRS | |||||
for path in std_path: | |||||
if filename.startswith(_cache_normalize_path(path)): | |||||
return True | |||||
return False | |||||
def is_relative(modname, from_file): | |||||
"""return true if the given module name is relative to the given | |||||
file name | |||||
:type modname: str | |||||
:param modname: name of the module we are interested in | |||||
:type from_file: str | |||||
:param from_file: | |||||
path of the module from which modname has been imported | |||||
:rtype: bool | |||||
:return: | |||||
true if the module has been imported relatively to `from_file` | |||||
""" | |||||
if not os.path.isdir(from_file): | |||||
from_file = os.path.dirname(from_file) | |||||
if from_file in sys.path: | |||||
return False | |||||
try: | |||||
stream, _, _ = imp.find_module(modname.split('.')[0], [from_file]) | |||||
# Close the stream to avoid ResourceWarnings. | |||||
if stream: | |||||
stream.close() | |||||
return True | |||||
except ImportError: | |||||
return False | |||||
# internal only functions ##################################################### | |||||
def _spec_from_modpath(modpath, path=None, context=None): | |||||
"""given a mod path (i.e. split module / package name), return the | |||||
corresponding spec | |||||
this function is used internally, see `file_from_modpath`'s | |||||
documentation for more information | |||||
""" | |||||
assert modpath | |||||
location = None | |||||
if context is not None: | |||||
try: | |||||
found_spec = spec.find_spec(modpath, [context]) | |||||
location = found_spec.location | |||||
except ImportError: | |||||
found_spec = spec.find_spec(modpath, path) | |||||
location = found_spec.location | |||||
else: | |||||
found_spec = spec.find_spec(modpath, path) | |||||
if found_spec.type == spec.ModuleType.PY_COMPILED: | |||||
try: | |||||
location = get_source_file(found_spec.location) | |||||
return found_spec._replace(location=location, type=spec.ModuleType.PY_SOURCE) | |||||
except NoSourceFile: | |||||
return found_spec._replace(location=location) | |||||
elif found_spec.type == spec.ModuleType.C_BUILTIN: | |||||
# integrated builtin module | |||||
return found_spec._replace(location=None) | |||||
elif found_spec.type == spec.ModuleType.PKG_DIRECTORY: | |||||
location = _has_init(found_spec.location) | |||||
return found_spec._replace(location=location, type=spec.ModuleType.PY_SOURCE) | |||||
return found_spec | |||||
def _is_python_file(filename): | |||||
"""return true if the given filename should be considered as a python file | |||||
.pyc and .pyo are ignored | |||||
""" | |||||
for ext in ('.py', '.so', '.pyd', '.pyw'): | |||||
if filename.endswith(ext): | |||||
return True | |||||
return False | |||||
def _has_init(directory): | |||||
"""if the given directory has a valid __init__ file, return its path, | |||||
else return None | |||||
""" | |||||
mod_or_pack = os.path.join(directory, '__init__') | |||||
for ext in PY_SOURCE_EXTS + ('pyc', 'pyo'): | |||||
if os.path.exists(mod_or_pack + '.' + ext): | |||||
return mod_or_pack + '.' + ext | |||||
return None | |||||
def is_namespace(specobj): | |||||
return specobj.type == spec.ModuleType.PY_NAMESPACE | |||||
def is_directory(specobj): | |||||
return specobj.type == spec.ModuleType.PKG_DIRECTORY |
# Copyright (c) 2006-2011, 2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr> | |||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Copyright (c) 2014 Google, Inc. | |||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""Every available node class. | |||||
.. seealso:: | |||||
:doc:`ast documentation <green_tree_snakes:nodes>` | |||||
All nodes inherit from :class:`~astroid.node_classes.NodeNG`. | |||||
""" | |||||
# pylint: disable=unused-import,redefined-builtin | |||||
from astroid.node_classes import ( | |||||
Arguments, AssignAttr, Assert, Assign, AnnAssign, | |||||
AssignName, AugAssign, Repr, BinOp, BoolOp, Break, Call, Compare, | |||||
Comprehension, Const, Continue, Decorators, DelAttr, DelName, Delete, | |||||
Dict, Expr, Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, For, | |||||
ImportFrom, Attribute, Global, If, IfExp, Import, Index, Keyword, | |||||
List, Name, Nonlocal, Pass, Print, Raise, Return, Set, Slice, Starred, Subscript, | |||||
TryExcept, TryFinally, Tuple, UnaryOp, While, With, Yield, YieldFrom, | |||||
const_factory, | |||||
AsyncFor, Await, AsyncWith, | |||||
FormattedValue, JoinedStr, | |||||
# Backwards-compatibility aliases | |||||
Backquote, Discard, AssName, AssAttr, Getattr, CallFunc, From, | |||||
# Node not present in the builtin ast module. | |||||
DictUnpack, | |||||
Unknown, | |||||
) | |||||
from astroid.scoped_nodes import ( | |||||
Module, GeneratorExp, Lambda, DictComp, | |||||
ListComp, SetComp, FunctionDef, ClassDef, | |||||
AsyncFunctionDef, | |||||
# Backwards-compatibility aliases | |||||
Class, Function, GenExpr, | |||||
) | |||||
ALL_NODE_CLASSES = ( | |||||
AsyncFunctionDef, AsyncFor, AsyncWith, Await, | |||||
Arguments, AssignAttr, Assert, Assign, AnnAssign, AssignName, AugAssign, | |||||
Repr, BinOp, BoolOp, Break, | |||||
Call, ClassDef, Compare, Comprehension, Const, Continue, | |||||
Decorators, DelAttr, DelName, Delete, | |||||
Dict, DictComp, DictUnpack, Expr, | |||||
Ellipsis, EmptyNode, ExceptHandler, Exec, ExtSlice, | |||||
For, ImportFrom, FunctionDef, | |||||
Attribute, GeneratorExp, Global, | |||||
If, IfExp, Import, Index, | |||||
Keyword, | |||||
Lambda, List, ListComp, | |||||
Name, Nonlocal, | |||||
Module, | |||||
Pass, Print, | |||||
Raise, Return, | |||||
Set, SetComp, Slice, Starred, Subscript, | |||||
TryExcept, TryFinally, Tuple, | |||||
UnaryOp, | |||||
While, With, | |||||
Yield, YieldFrom, | |||||
FormattedValue, JoinedStr, | |||||
) |
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
""" | |||||
Inference objects are a way to represent composite AST nodes, | |||||
which are used only as inference results, so they can't be found in the | |||||
original AST tree. For instance, inferring the following frozenset use, | |||||
leads to an inferred FrozenSet: | |||||
Call(func=Name('frozenset'), args=Tuple(...)) | |||||
""" | |||||
import six | |||||
from astroid import bases | |||||
from astroid import decorators | |||||
from astroid import exceptions | |||||
from astroid import MANAGER | |||||
from astroid import node_classes | |||||
from astroid import scoped_nodes | |||||
from astroid import util | |||||
BUILTINS = six.moves.builtins.__name__ | |||||
objectmodel = util.lazy_import('interpreter.objectmodel') | |||||
class FrozenSet(node_classes._BaseContainer): | |||||
"""class representing a FrozenSet composite node""" | |||||
def pytype(self): | |||||
return '%s.frozenset' % BUILTINS | |||||
def _infer(self, context=None): | |||||
yield self | |||||
@decorators.cachedproperty | |||||
def _proxied(self): # pylint: disable=method-hidden | |||||
builtins = MANAGER.astroid_cache[BUILTINS] | |||||
return builtins.getattr('frozenset')[0] | |||||
class Super(node_classes.NodeNG): | |||||
"""Proxy class over a super call. | |||||
This class offers almost the same behaviour as Python's super, | |||||
which is MRO lookups for retrieving attributes from the parents. | |||||
The *mro_pointer* is the place in the MRO from where we should | |||||
start looking, not counting it. *mro_type* is the object which | |||||
provides the MRO, it can be both a type or an instance. | |||||
*self_class* is the class where the super call is, while | |||||
*scope* is the function where the super call is. | |||||
""" | |||||
# pylint: disable=unnecessary-lambda | |||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.SuperModel()) | |||||
# pylint: disable=super-init-not-called | |||||
def __init__(self, mro_pointer, mro_type, self_class, scope): | |||||
self.type = mro_type | |||||
self.mro_pointer = mro_pointer | |||||
self._class_based = False | |||||
self._self_class = self_class | |||||
self._scope = scope | |||||
def _infer(self, context=None): | |||||
yield self | |||||
def super_mro(self): | |||||
"""Get the MRO which will be used to lookup attributes in this super.""" | |||||
if not isinstance(self.mro_pointer, scoped_nodes.ClassDef): | |||||
raise exceptions.SuperError( | |||||
"The first argument to super must be a subtype of " | |||||
"type, not {mro_pointer}.", super_=self) | |||||
if isinstance(self.type, scoped_nodes.ClassDef): | |||||
# `super(type, type)`, most likely in a class method. | |||||
self._class_based = True | |||||
mro_type = self.type | |||||
else: | |||||
mro_type = getattr(self.type, '_proxied', None) | |||||
if not isinstance(mro_type, (bases.Instance, scoped_nodes.ClassDef)): | |||||
raise exceptions.SuperError( | |||||
"The second argument to super must be an " | |||||
"instance or subtype of type, not {type}.", | |||||
super_=self) | |||||
if not mro_type.newstyle: | |||||
raise exceptions.SuperError("Unable to call super on old-style classes.", super_=self) | |||||
mro = mro_type.mro() | |||||
if self.mro_pointer not in mro: | |||||
raise exceptions.SuperError( | |||||
"The second argument to super must be an " | |||||
"instance or subtype of type, not {type}.", | |||||
super_=self) | |||||
index = mro.index(self.mro_pointer) | |||||
return mro[index + 1:] | |||||
@decorators.cachedproperty | |||||
def _proxied(self): | |||||
builtins = MANAGER.astroid_cache[BUILTINS] | |||||
return builtins.getattr('super')[0] | |||||
def pytype(self): | |||||
return '%s.super' % BUILTINS | |||||
def display_type(self): | |||||
return 'Super of' | |||||
@property | |||||
def name(self): | |||||
"""Get the name of the MRO pointer.""" | |||||
return self.mro_pointer.name | |||||
def igetattr(self, name, context=None): | |||||
"""Retrieve the inferred values of the given attribute name.""" | |||||
if name in self.special_attributes: | |||||
yield self.special_attributes.lookup(name) | |||||
return | |||||
try: | |||||
mro = self.super_mro() | |||||
# Don't let invalid MROs or invalid super calls | |||||
# leak out as is from this function. | |||||
except exceptions.SuperError as exc: | |||||
util.reraise(exceptions.AttributeInferenceError( | |||||
('Lookup for {name} on {target!r} because super call {super!r} ' | |||||
'is invalid.'), | |||||
target=self, attribute=name, context=context, super_=exc.super_)) | |||||
except exceptions.MroError as exc: | |||||
util.reraise(exceptions.AttributeInferenceError( | |||||
('Lookup for {name} on {target!r} failed because {cls!r} has an ' | |||||
'invalid MRO.'), | |||||
target=self, attribute=name, context=context, mros=exc.mros, | |||||
cls=exc.cls)) | |||||
found = False | |||||
for cls in mro: | |||||
if name not in cls.locals: | |||||
continue | |||||
found = True | |||||
for inferred in bases._infer_stmts([cls[name]], context, frame=self): | |||||
if not isinstance(inferred, scoped_nodes.FunctionDef): | |||||
yield inferred | |||||
continue | |||||
# We can obtain different descriptors from a super depending | |||||
# on what we are accessing and where the super call is. | |||||
if inferred.type == 'classmethod': | |||||
yield bases.BoundMethod(inferred, cls) | |||||
elif self._scope.type == 'classmethod' and inferred.type == 'method': | |||||
yield inferred | |||||
elif self._class_based or inferred.type == 'staticmethod': | |||||
yield inferred | |||||
elif bases._is_property(inferred): | |||||
# TODO: support other descriptors as well. | |||||
for value in inferred.infer_call_result(self, context): | |||||
yield value | |||||
else: | |||||
yield bases.BoundMethod(inferred, cls) | |||||
if not found: | |||||
raise exceptions.AttributeInferenceError(target=self, | |||||
attribute=name, | |||||
context=context) | |||||
def getattr(self, name, context=None): | |||||
return list(self.igetattr(name, context=context)) | |||||
class ExceptionInstance(bases.Instance): | |||||
"""Class for instances of exceptions | |||||
It has special treatment for some of the exceptions's attributes, | |||||
which are transformed at runtime into certain concrete objects, such as | |||||
the case of .args. | |||||
""" | |||||
# pylint: disable=unnecessary-lambda | |||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.ExceptionInstanceModel()) | |||||
class DictInstance(bases.Instance): | |||||
"""Special kind of instances for dictionaries | |||||
This instance knows the underlying object model of the dictionaries, which means | |||||
that methods such as .values or .items can be properly inferred. | |||||
""" | |||||
# pylint: disable=unnecessary-lambda | |||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.DictModel()) | |||||
# Custom objects tailored for dictionaries, which are used to | |||||
# disambiguate between the types of Python 2 dict's method returns | |||||
# and Python 3 (where they return set like objects). | |||||
class DictItems(bases.Proxy): | |||||
__str__ = node_classes.NodeNG.__str__ | |||||
__repr__ = node_classes.NodeNG.__repr__ | |||||
class DictKeys(bases.Proxy): | |||||
__str__ = node_classes.NodeNG.__str__ | |||||
__repr__ = node_classes.NodeNG.__repr__ | |||||
class DictValues(bases.Proxy): | |||||
__str__ = node_classes.NodeNG.__str__ | |||||
__repr__ = node_classes.NodeNG.__repr__ | |||||
# TODO: Hack to solve the circular import problem between node_classes and objects | |||||
# This is not needed in 2.0, which has a cleaner design overall | |||||
node_classes.Dict.__bases__ = (node_classes.NodeNG, DictInstance) |
# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr> | |||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Copyright (c) 2014 Google, Inc. | |||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""this module contains a set of functions to handle python protocols for nodes | |||||
where it makes sense. | |||||
""" | |||||
import collections | |||||
import operator as operator_mod | |||||
import sys | |||||
import six | |||||
from astroid import arguments | |||||
from astroid import bases | |||||
from astroid import context as contextmod | |||||
from astroid import exceptions | |||||
from astroid import decorators | |||||
from astroid import node_classes | |||||
from astroid import helpers | |||||
from astroid import nodes | |||||
from astroid import util | |||||
raw_building = util.lazy_import('raw_building') | |||||
objects = util.lazy_import('objects') | |||||
def _reflected_name(name): | |||||
return "__r" + name[2:] | |||||
def _augmented_name(name): | |||||
return "__i" + name[2:] | |||||
_CONTEXTLIB_MGR = 'contextlib.contextmanager' | |||||
BIN_OP_METHOD = {'+': '__add__', | |||||
'-': '__sub__', | |||||
'/': '__div__' if six.PY2 else '__truediv__', | |||||
'//': '__floordiv__', | |||||
'*': '__mul__', | |||||
'**': '__pow__', | |||||
'%': '__mod__', | |||||
'&': '__and__', | |||||
'|': '__or__', | |||||
'^': '__xor__', | |||||
'<<': '__lshift__', | |||||
'>>': '__rshift__', | |||||
'@': '__matmul__' | |||||
} | |||||
REFLECTED_BIN_OP_METHOD = { | |||||
key: _reflected_name(value) | |||||
for (key, value) in BIN_OP_METHOD.items() | |||||
} | |||||
AUGMENTED_OP_METHOD = { | |||||
key + "=": _augmented_name(value) | |||||
for (key, value) in BIN_OP_METHOD.items() | |||||
} | |||||
UNARY_OP_METHOD = {'+': '__pos__', | |||||
'-': '__neg__', | |||||
'~': '__invert__', | |||||
'not': None, # XXX not '__nonzero__' | |||||
} | |||||
_UNARY_OPERATORS = { | |||||
'+': operator_mod.pos, | |||||
'-': operator_mod.neg, | |||||
'~': operator_mod.invert, | |||||
'not': operator_mod.not_, | |||||
} | |||||
def _infer_unary_op(obj, op): | |||||
func = _UNARY_OPERATORS[op] | |||||
value = func(obj) | |||||
return nodes.const_factory(value) | |||||
nodes.Tuple.infer_unary_op = lambda self, op: _infer_unary_op(tuple(self.elts), op) | |||||
nodes.List.infer_unary_op = lambda self, op: _infer_unary_op(self.elts, op) | |||||
nodes.Set.infer_unary_op = lambda self, op: _infer_unary_op(set(self.elts), op) | |||||
nodes.Const.infer_unary_op = lambda self, op: _infer_unary_op(self.value, op) | |||||
nodes.Dict.infer_unary_op = lambda self, op: _infer_unary_op(dict(self.items), op) | |||||
# Binary operations | |||||
BIN_OP_IMPL = {'+': lambda a, b: a + b, | |||||
'-': lambda a, b: a - b, | |||||
'/': lambda a, b: a / b, | |||||
'//': lambda a, b: a // b, | |||||
'*': lambda a, b: a * b, | |||||
'**': lambda a, b: a ** b, | |||||
'%': lambda a, b: a % b, | |||||
'&': lambda a, b: a & b, | |||||
'|': lambda a, b: a | b, | |||||
'^': lambda a, b: a ^ b, | |||||
'<<': lambda a, b: a << b, | |||||
'>>': lambda a, b: a >> b, | |||||
} | |||||
if sys.version_info >= (3, 5): | |||||
# MatMult is available since Python 3.5+. | |||||
BIN_OP_IMPL['@'] = operator_mod.matmul | |||||
for _KEY, _IMPL in list(BIN_OP_IMPL.items()): | |||||
BIN_OP_IMPL[_KEY + '='] = _IMPL | |||||
@decorators.yes_if_nothing_inferred | |||||
def const_infer_binary_op(self, opnode, operator, other, context, _): | |||||
not_implemented = nodes.Const(NotImplemented) | |||||
if isinstance(other, nodes.Const): | |||||
try: | |||||
impl = BIN_OP_IMPL[operator] | |||||
try: | |||||
yield nodes.const_factory(impl(self.value, other.value)) | |||||
except TypeError: | |||||
# ArithmeticError is not enough: float >> float is a TypeError | |||||
yield not_implemented | |||||
except Exception: # pylint: disable=broad-except | |||||
yield util.Uninferable | |||||
except TypeError: | |||||
yield not_implemented | |||||
elif isinstance(self.value, six.string_types) and operator == '%': | |||||
# TODO(cpopa): implement string interpolation later on. | |||||
yield util.Uninferable | |||||
else: | |||||
yield not_implemented | |||||
nodes.Const.infer_binary_op = const_infer_binary_op | |||||
def _multiply_seq_by_int(self, opnode, other, context): | |||||
node = self.__class__(parent=opnode) | |||||
elts = [] | |||||
filtered_elts = (elt for elt in self.elts if elt is not util.Uninferable) | |||||
for elt in filtered_elts: | |||||
infered = helpers.safe_infer(elt, context) | |||||
if infered is None: | |||||
infered = util.Uninferable | |||||
elts.append(infered) | |||||
node.elts = elts * other.value | |||||
return node | |||||
def _filter_uninferable_nodes(elts, context): | |||||
for elt in elts: | |||||
if elt is util.Uninferable: | |||||
yield nodes.Unknown() | |||||
else: | |||||
for inferred in elt.infer(context): | |||||
if inferred is not util.Uninferable: | |||||
yield inferred | |||||
else: | |||||
yield nodes.Unknown() | |||||
@decorators.yes_if_nothing_inferred | |||||
def tl_infer_binary_op(self, opnode, operator, other, context, method): | |||||
not_implemented = nodes.Const(NotImplemented) | |||||
if isinstance(other, self.__class__) and operator == '+': | |||||
node = self.__class__(parent=opnode) | |||||
elts = list(_filter_uninferable_nodes(self.elts, context)) | |||||
elts += list(_filter_uninferable_nodes(other.elts, context)) | |||||
node.elts = elts | |||||
yield node | |||||
elif isinstance(other, nodes.Const) and operator == '*': | |||||
if not isinstance(other.value, int): | |||||
yield not_implemented | |||||
return | |||||
yield _multiply_seq_by_int(self, opnode, other, context) | |||||
elif isinstance(other, bases.Instance) and operator == '*': | |||||
# Verify if the instance supports __index__. | |||||
as_index = helpers.class_instance_as_index(other) | |||||
if not as_index: | |||||
yield util.Uninferable | |||||
else: | |||||
yield _multiply_seq_by_int(self, opnode, as_index, context) | |||||
else: | |||||
yield not_implemented | |||||
nodes.Tuple.infer_binary_op = tl_infer_binary_op | |||||
nodes.List.infer_binary_op = tl_infer_binary_op | |||||
@decorators.yes_if_nothing_inferred | |||||
def instance_class_infer_binary_op(self, opnode, operator, other, context, method): | |||||
return method.infer_call_result(self, context) | |||||
bases.Instance.infer_binary_op = instance_class_infer_binary_op | |||||
nodes.ClassDef.infer_binary_op = instance_class_infer_binary_op | |||||
# assignment ################################################################## | |||||
"""the assigned_stmts method is responsible to return the assigned statement | |||||
(e.g. not inferred) according to the assignment type. | |||||
The `asspath` argument is used to record the lhs path of the original node. | |||||
For instance if we want assigned statements for 'c' in 'a, (b,c)', asspath | |||||
will be [1, 1] once arrived to the Assign node. | |||||
The `context` argument is the current inference context which should be given | |||||
to any intermediary inference necessary. | |||||
""" | |||||
def _resolve_looppart(parts, asspath, context): | |||||
"""recursive function to resolve multiple assignments on loops""" | |||||
asspath = asspath[:] | |||||
index = asspath.pop(0) | |||||
for part in parts: | |||||
if part is util.Uninferable: | |||||
continue | |||||
# XXX handle __iter__ and log potentially detected errors | |||||
if not hasattr(part, 'itered'): | |||||
continue | |||||
try: | |||||
itered = part.itered() | |||||
except TypeError: | |||||
continue # XXX log error | |||||
for stmt in itered: | |||||
index_node = nodes.Const(index) | |||||
try: | |||||
assigned = stmt.getitem(index_node, context) | |||||
except (AttributeError, | |||||
exceptions.AstroidTypeError, | |||||
exceptions.AstroidIndexError): | |||||
continue | |||||
if not asspath: | |||||
# we achieved to resolved the assignment path, | |||||
# don't infer the last part | |||||
yield assigned | |||||
elif assigned is util.Uninferable: | |||||
break | |||||
else: | |||||
# we are not yet on the last part of the path | |||||
# search on each possibly inferred value | |||||
try: | |||||
for inferred in _resolve_looppart(assigned.infer(context), | |||||
asspath, context): | |||||
yield inferred | |||||
except exceptions.InferenceError: | |||||
break | |||||
@decorators.raise_if_nothing_inferred | |||||
def for_assigned_stmts(self, node=None, context=None, asspath=None): | |||||
if isinstance(self, nodes.AsyncFor) or getattr(self, 'is_async', False): | |||||
# Skip inferring of async code for now | |||||
raise StopIteration(dict(node=self, unknown=node, | |||||
assign_path=asspath, context=context)) | |||||
if asspath is None: | |||||
for lst in self.iter.infer(context): | |||||
if isinstance(lst, (nodes.Tuple, nodes.List)): | |||||
for item in lst.elts: | |||||
yield item | |||||
else: | |||||
for inferred in _resolve_looppart(self.iter.infer(context), | |||||
asspath, context): | |||||
yield inferred | |||||
# Explicit StopIteration to return error information, see comment | |||||
# in raise_if_nothing_inferred. | |||||
raise StopIteration(dict(node=self, unknown=node, | |||||
assign_path=asspath, context=context)) | |||||
nodes.For.assigned_stmts = for_assigned_stmts | |||||
nodes.Comprehension.assigned_stmts = for_assigned_stmts | |||||
def sequence_assigned_stmts(self, node=None, context=None, asspath=None): | |||||
if asspath is None: | |||||
asspath = [] | |||||
try: | |||||
index = self.elts.index(node) | |||||
except ValueError: | |||||
util.reraise(exceptions.InferenceError( | |||||
'Tried to retrieve a node {node!r} which does not exist', | |||||
node=self, assign_path=asspath, context=context)) | |||||
asspath.insert(0, index) | |||||
return self.parent.assigned_stmts(node=self, context=context, asspath=asspath) | |||||
nodes.Tuple.assigned_stmts = sequence_assigned_stmts | |||||
nodes.List.assigned_stmts = sequence_assigned_stmts | |||||
def assend_assigned_stmts(self, node=None, context=None, asspath=None): | |||||
return self.parent.assigned_stmts(node=self, context=context) | |||||
nodes.AssignName.assigned_stmts = assend_assigned_stmts | |||||
nodes.AssignAttr.assigned_stmts = assend_assigned_stmts | |||||
def _arguments_infer_argname(self, name, context): | |||||
# arguments information may be missing, in which case we can't do anything | |||||
# more | |||||
if not (self.args or self.vararg or self.kwarg): | |||||
yield util.Uninferable | |||||
return | |||||
# first argument of instance/class method | |||||
if self.args and getattr(self.args[0], 'name', None) == name: | |||||
functype = self.parent.type | |||||
cls = self.parent.parent.scope() | |||||
is_metaclass = isinstance(cls, nodes.ClassDef) and cls.type == 'metaclass' | |||||
# If this is a metaclass, then the first argument will always | |||||
# be the class, not an instance. | |||||
if is_metaclass or functype == 'classmethod': | |||||
yield cls | |||||
return | |||||
if functype == 'method': | |||||
yield bases.Instance(self.parent.parent.frame()) | |||||
return | |||||
if context and context.callcontext: | |||||
call_site = arguments.CallSite(context.callcontext) | |||||
for value in call_site.infer_argument(self.parent, name, context): | |||||
yield value | |||||
return | |||||
# TODO: just provide the type here, no need to have an empty Dict. | |||||
if name == self.vararg: | |||||
vararg = nodes.const_factory(()) | |||||
vararg.parent = self | |||||
yield vararg | |||||
return | |||||
if name == self.kwarg: | |||||
kwarg = nodes.const_factory({}) | |||||
kwarg.parent = self | |||||
yield kwarg | |||||
return | |||||
# if there is a default value, yield it. And then yield Uninferable to reflect | |||||
# we can't guess given argument value | |||||
try: | |||||
context = contextmod.copy_context(context) | |||||
for inferred in self.default_value(name).infer(context): | |||||
yield inferred | |||||
yield util.Uninferable | |||||
except exceptions.NoDefault: | |||||
yield util.Uninferable | |||||
def arguments_assigned_stmts(self, node=None, context=None, asspath=None): | |||||
if context.callcontext: | |||||
# reset call context/name | |||||
callcontext = context.callcontext | |||||
context = contextmod.copy_context(context) | |||||
context.callcontext = None | |||||
args = arguments.CallSite(callcontext) | |||||
return args.infer_argument(self.parent, node.name, context) | |||||
return _arguments_infer_argname(self, node.name, context) | |||||
nodes.Arguments.assigned_stmts = arguments_assigned_stmts | |||||
@decorators.raise_if_nothing_inferred | |||||
def assign_assigned_stmts(self, node=None, context=None, asspath=None): | |||||
if not asspath: | |||||
yield self.value | |||||
return | |||||
for inferred in _resolve_asspart(self.value.infer(context), asspath, context): | |||||
yield inferred | |||||
# Explicit StopIteration to return error information, see comment | |||||
# in raise_if_nothing_inferred. | |||||
raise StopIteration(dict(node=self, unknown=node, | |||||
assign_path=asspath, context=context)) | |||||
def assign_annassigned_stmts(self, node=None, context=None, asspath=None): | |||||
for inferred in assign_assigned_stmts(self, node, context, asspath): | |||||
if inferred is None: | |||||
yield util.Uninferable | |||||
else: | |||||
yield inferred | |||||
nodes.Assign.assigned_stmts = assign_assigned_stmts | |||||
nodes.AnnAssign.assigned_stmts = assign_annassigned_stmts | |||||
nodes.AugAssign.assigned_stmts = assign_assigned_stmts | |||||
def _resolve_asspart(parts, asspath, context): | |||||
"""recursive function to resolve multiple assignments""" | |||||
asspath = asspath[:] | |||||
index = asspath.pop(0) | |||||
for part in parts: | |||||
if hasattr(part, 'getitem'): | |||||
index_node = nodes.Const(index) | |||||
try: | |||||
assigned = part.getitem(index_node, context) | |||||
# XXX raise a specific exception to avoid potential hiding of | |||||
# unexpected exception ? | |||||
except (exceptions.AstroidTypeError, exceptions.AstroidIndexError): | |||||
return | |||||
if not asspath: | |||||
# we achieved to resolved the assignment path, don't infer the | |||||
# last part | |||||
yield assigned | |||||
elif assigned is util.Uninferable: | |||||
return | |||||
else: | |||||
# we are not yet on the last part of the path search on each | |||||
# possibly inferred value | |||||
try: | |||||
for inferred in _resolve_asspart(assigned.infer(context), | |||||
asspath, context): | |||||
yield inferred | |||||
except exceptions.InferenceError: | |||||
return | |||||
@decorators.raise_if_nothing_inferred | |||||
def excepthandler_assigned_stmts(self, node=None, context=None, asspath=None): | |||||
for assigned in node_classes.unpack_infer(self.type): | |||||
if isinstance(assigned, nodes.ClassDef): | |||||
assigned = objects.ExceptionInstance(assigned) | |||||
yield assigned | |||||
# Explicit StopIteration to return error information, see comment | |||||
# in raise_if_nothing_inferred. | |||||
raise StopIteration(dict(node=self, unknown=node, | |||||
assign_path=asspath, context=context)) | |||||
nodes.ExceptHandler.assigned_stmts = excepthandler_assigned_stmts | |||||
def _infer_context_manager(self, mgr, context): | |||||
try: | |||||
inferred = next(mgr.infer(context=context)) | |||||
except exceptions.InferenceError: | |||||
return | |||||
if isinstance(inferred, bases.Generator): | |||||
# Check if it is decorated with contextlib.contextmanager. | |||||
func = inferred.parent | |||||
if not func.decorators: | |||||
return | |||||
for decorator_node in func.decorators.nodes: | |||||
decorator = next(decorator_node.infer(context)) | |||||
if isinstance(decorator, nodes.FunctionDef): | |||||
if decorator.qname() == _CONTEXTLIB_MGR: | |||||
break | |||||
else: | |||||
# It doesn't interest us. | |||||
return | |||||
# Get the first yield point. If it has multiple yields, | |||||
# then a RuntimeError will be raised. | |||||
# TODO(cpopa): Handle flows. | |||||
possible_yield_points = func.nodes_of_class(nodes.Yield) | |||||
# Ignore yields in nested functions | |||||
yield_point = next((node for node in possible_yield_points | |||||
if node.scope() == func), None) | |||||
if yield_point: | |||||
if not yield_point.value: | |||||
# TODO(cpopa): an empty yield. Should be wrapped to Const. | |||||
const = nodes.Const(None) | |||||
const.parent = yield_point | |||||
const.lineno = yield_point.lineno | |||||
yield const | |||||
else: | |||||
for inferred in yield_point.value.infer(context=context): | |||||
yield inferred | |||||
elif isinstance(inferred, bases.Instance): | |||||
try: | |||||
enter = next(inferred.igetattr('__enter__', context=context)) | |||||
except (exceptions.InferenceError, exceptions.AttributeInferenceError): | |||||
return | |||||
if not isinstance(enter, bases.BoundMethod): | |||||
return | |||||
if not context.callcontext: | |||||
context.callcontext = contextmod.CallContext(args=[inferred]) | |||||
for result in enter.infer_call_result(self, context): | |||||
yield result | |||||
@decorators.raise_if_nothing_inferred | |||||
def with_assigned_stmts(self, node=None, context=None, asspath=None): | |||||
"""Infer names and other nodes from a *with* statement. | |||||
This enables only inference for name binding in a *with* statement. | |||||
For instance, in the following code, inferring `func` will return | |||||
the `ContextManager` class, not whatever ``__enter__`` returns. | |||||
We are doing this intentionally, because we consider that the context | |||||
manager result is whatever __enter__ returns and what it is binded | |||||
using the ``as`` keyword. | |||||
class ContextManager(object): | |||||
def __enter__(self): | |||||
return 42 | |||||
with ContextManager() as f: | |||||
pass | |||||
# ContextManager().infer() will return ContextManager | |||||
# f.infer() will return 42. | |||||
Arguments: | |||||
self: nodes.With | |||||
node: The target of the assignment, `as (a, b)` in `with foo as (a, b)`. | |||||
context: TODO | |||||
asspath: TODO | |||||
""" | |||||
mgr = next(mgr for (mgr, vars) in self.items if vars == node) | |||||
if asspath is None: | |||||
for result in _infer_context_manager(self, mgr, context): | |||||
yield result | |||||
else: | |||||
for result in _infer_context_manager(self, mgr, context): | |||||
# Walk the asspath and get the item at the final index. | |||||
obj = result | |||||
for index in asspath: | |||||
if not hasattr(obj, 'elts'): | |||||
raise exceptions.InferenceError( | |||||
'Wrong type ({targets!r}) for {node!r} assignment', | |||||
node=self, targets=node, assign_path=asspath, | |||||
context=context) | |||||
try: | |||||
obj = obj.elts[index] | |||||
except IndexError: | |||||
util.reraise(exceptions.InferenceError( | |||||
'Tried to infer a nonexistent target with index {index} ' | |||||
'in {node!r}.', node=self, targets=node, | |||||
assign_path=asspath, context=context)) | |||||
except TypeError: | |||||
util.reraise(exceptions.InferenceError( | |||||
'Tried to unpack an non-iterable value ' | |||||
'in {node!r}.', node=self, targets=node, | |||||
assign_path=asspath, context=context)) | |||||
yield obj | |||||
# Explicit StopIteration to return error information, see comment | |||||
# in raise_if_nothing_inferred. | |||||
raise StopIteration(dict(node=self, unknown=node, | |||||
assign_path=asspath, context=context)) | |||||
nodes.With.assigned_stmts = with_assigned_stmts | |||||
@decorators.yes_if_nothing_inferred | |||||
def starred_assigned_stmts(self, node=None, context=None, asspath=None): | |||||
""" | |||||
Arguments: | |||||
self: nodes.Starred | |||||
node: TODO | |||||
context: TODO | |||||
asspath: TODO | |||||
""" | |||||
stmt = self.statement() | |||||
if not isinstance(stmt, (nodes.Assign, nodes.For)): | |||||
raise exceptions.InferenceError('Statement {stmt!r} enclosing {node!r} ' | |||||
'must be an Assign or For node.', | |||||
node=self, stmt=stmt, unknown=node, | |||||
context=context) | |||||
if isinstance(stmt, nodes.Assign): | |||||
value = stmt.value | |||||
lhs = stmt.targets[0] | |||||
if sum(1 for node in lhs.nodes_of_class(nodes.Starred)) > 1: | |||||
raise exceptions.InferenceError('Too many starred arguments in the ' | |||||
' assignment targets {lhs!r}.', | |||||
node=self, targets=lhs, | |||||
unknown=node, context=context) | |||||
if context is None: | |||||
context = contextmod.InferenceContext() | |||||
try: | |||||
rhs = next(value.infer(context)) | |||||
except exceptions.InferenceError: | |||||
yield util.Uninferable | |||||
return | |||||
if rhs is util.Uninferable or not hasattr(rhs, 'elts'): | |||||
# Not interested in inferred values without elts. | |||||
yield util.Uninferable | |||||
return | |||||
elts = collections.deque(rhs.elts[:]) | |||||
if len(lhs.elts) > len(rhs.elts): | |||||
raise exceptions.InferenceError('More targets, {targets!r}, than ' | |||||
'values to unpack, {values!r}.', | |||||
node=self, targets=lhs, | |||||
values=rhs, unknown=node, | |||||
context=context) | |||||
# Unpack iteratively the values from the rhs of the assignment, | |||||
# until the find the starred node. What will remain will | |||||
# be the list of values which the Starred node will represent | |||||
# This is done in two steps, from left to right to remove | |||||
# anything before the starred node and from right to left | |||||
# to remove anything after the starred node. | |||||
for index, left_node in enumerate(lhs.elts): | |||||
if not isinstance(left_node, nodes.Starred): | |||||
elts.popleft() | |||||
continue | |||||
lhs_elts = collections.deque(reversed(lhs.elts[index:])) | |||||
for right_node in lhs_elts: | |||||
if not isinstance(right_node, nodes.Starred): | |||||
elts.pop() | |||||
continue | |||||
# We're done | |||||
packed = nodes.List() | |||||
packed.elts = elts | |||||
packed.parent = self | |||||
yield packed | |||||
break | |||||
nodes.Starred.assigned_stmts = starred_assigned_stmts |
# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr> | |||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com> | |||||
# Copyright (c) 2014 Google, Inc. | |||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com> | |||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html | |||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER | |||||
"""this module contains a set of functions to create astroid trees from scratch | |||||
(build_* functions) or from living object (object_build_* functions) | |||||
""" | |||||
import inspect | |||||
import logging | |||||
import os | |||||
import sys | |||||
import types | |||||
import six | |||||
from astroid import bases | |||||
from astroid import manager | |||||
from astroid import node_classes | |||||
from astroid import nodes | |||||
MANAGER = manager.AstroidManager() | |||||
# the keys of CONST_CLS eg python builtin types | |||||
_CONSTANTS = tuple(node_classes.CONST_CLS) | |||||
_JYTHON = os.name == 'java' | |||||
_BUILTINS = vars(six.moves.builtins) | |||||
_LOG = logging.getLogger(__name__) | |||||
def _io_discrepancy(member): | |||||
# _io module names itself `io`: http://bugs.python.org/issue18602 | |||||
member_self = getattr(member, '__self__', None) | |||||
return (member_self and | |||||
inspect.ismodule(member_self) and | |||||
member_self.__name__ == '_io' and | |||||
member.__module__ == 'io') | |||||
def _attach_local_node(parent, node, name): | |||||
node.name = name # needed by add_local_node | |||||
parent.add_local_node(node) | |||||
def _add_dunder_class(func, member): | |||||
"""Add a __class__ member to the given func node, if we can determine it.""" | |||||
python_cls = member.__class__ | |||||
cls_name = getattr(python_cls, '__name__', None) | |||||
if not cls_name: | |||||
return | |||||
cls_bases = [ancestor.__name__ for ancestor in python_cls.__bases__] | |||||
ast_klass = build_class(cls_name, cls_bases, python_cls.__doc__) | |||||
func.instance_attrs['__class__'] = [ast_klass] | |||||
_marker = object() | |||||
def attach_dummy_node(node, name, runtime_object=_marker): | |||||
"""create a dummy node and register it in the locals of the given | |||||
node with the specified name | |||||
""" | |||||
enode = nodes.EmptyNode() | |||||
enode.object = runtime_object | |||||
_attach_local_node(node, enode, name) | |||||
def _has_underlying_object(self): | |||||
return self.object is not None and self.object is not _marker | |||||
nodes.EmptyNode.has_underlying_object = _has_underlying_object | |||||
def attach_const_node(node, name, value): | |||||
"""create a Const node and register it in the locals of the given | |||||
node with the specified name | |||||
""" | |||||
if name not in node.special_attributes: | |||||
_attach_local_node(node, nodes.const_factory(value), name) | |||||
def attach_import_node(node, modname, membername): | |||||
"""create a ImportFrom node and register it in the locals of the given | |||||
node with the specified name | |||||
""" | |||||
from_node = nodes.ImportFrom(modname, [(membername, None)]) | |||||
_attach_local_node(node, from_node, membername) | |||||
def build_module(name, doc=None): | |||||
"""create and initialize a astroid Module node""" | |||||
node = nodes.Module(name, doc, pure_python=False) | |||||
node.package = False | |||||
node.parent = None | |||||
return node | |||||
def build_class(name, basenames=(), doc=None): | |||||
"""create and initialize a astroid ClassDef node""" | |||||
node = nodes.ClassDef(name, doc) | |||||
for base in basenames: | |||||
basenode = nodes.Name() | |||||
basenode.name = base | |||||
node.bases.append(basenode) | |||||
basenode.parent = node | |||||
return node | |||||
def build_function(name, args=None, defaults=None, doc=None): | |||||
"""create and initialize a astroid FunctionDef node""" | |||||
args, defaults = args or [], defaults or [] | |||||
# first argument is now a list of decorators | |||||
func = nodes.FunctionDef(name, doc) | |||||
func.args = argsnode = nodes.Arguments() | |||||
argsnode.args = [] | |||||
for arg in args: | |||||
argsnode.args.append(nodes.Name()) | |||||
argsnode.args[-1].name = arg | |||||
argsnode.args[-1].parent = argsnode | |||||
argsnode.defaults = [] | |||||
for default in defaults: | |||||
argsnode.defaults.append(nodes.const_factory(default)) | |||||
argsnode.defaults[-1].parent = argsnode | |||||
argsnode.kwarg = None | |||||
argsnode.vararg = None | |||||
argsnode.parent = func | |||||
if args: | |||||
register_arguments(func) | |||||
return func | |||||
def build_from_import(fromname, names): | |||||
"""create and initialize an astroid ImportFrom import statement""" | |||||
return nodes.ImportFrom(fromname, [(name, None) for name in names]) | |||||
def register_arguments(func, args=None): | |||||
"""add given arguments to local | |||||
args is a list that may contains nested lists | |||||
(i.e. def func(a, (b, c, d)): ...) | |||||
""" | |||||
if args is None: | |||||
args = func.args.args | |||||
if func.args.vararg: | |||||
func.set_local(func.args.vararg, func.args) | |||||
if func.args.kwarg: | |||||
func.set_local(func.args.kwarg, func.args) | |||||
for arg in args: | |||||
if isinstance(arg, nodes.Name): | |||||
func.set_local(arg.name, arg) | |||||
else: | |||||
register_arguments(func, arg.elts) | |||||
def object_build_class(node, member, localname): | |||||
"""create astroid for a living class object""" | |||||
basenames = [base.__name__ for base in member.__bases__] | |||||
return _base_class_object_build(node, member, basenames, | |||||
localname=localname) | |||||
def object_build_function(node, member, localname): | |||||
"""create astroid for a living function object""" | |||||
# pylint: disable=deprecated-method; completely removed in 2.0 | |||||
args, varargs, varkw, defaults = inspect.getargspec(member) | |||||
if varargs is not None: | |||||
args.append(varargs) | |||||
if varkw is not None: | |||||
args.append(varkw) | |||||
func = build_function(getattr(member, '__name__', None) or localname, args, | |||||
defaults, member.__doc__) | |||||
node.add_local_node(func, localname) | |||||
def object_build_datadescriptor(node, member, name): | |||||
"""create astroid for a living data descriptor object""" | |||||
return _base_class_object_build(node, member, [], name) | |||||
def object_build_methoddescriptor(node, member, localname): | |||||
"""create astroid for a living method descriptor object""" | |||||
# FIXME get arguments ? | |||||
func = build_function(getattr(member, '__name__', None) or localname, | |||||
doc=member.__doc__) | |||||
# set node's arguments to None to notice that we have no information, not | |||||
# and empty argument list | |||||
func.args.args = None | |||||
node.add_local_node(func, localname) | |||||
_add_dunder_class(func, member) | |||||
def _base_class_object_build(node, member, basenames, name=None, localname=None): | |||||
"""create astroid for a living class object, with a given set of base names | |||||
(e.g. ancestors) | |||||
""" | |||||
klass = build_class(name or getattr(member, '__name__', None) or localname, | |||||
basenames, member.__doc__) | |||||
klass._newstyle = isinstance(member, type) | |||||
node.add_local_node(klass, localname) | |||||
try: | |||||
# limit the instantiation trick since it's too dangerous | |||||
# (such as infinite test execution...) | |||||
# this at least resolves common case such as Exception.args, | |||||
# OSError.errno | |||||
if issubclass(member, Exception): | |||||
instdict = member().__dict__ | |||||
else: | |||||
raise TypeError | |||||
except: # pylint: disable=bare-except | |||||
pass | |||||
else: | |||||
for item_name, obj in instdict.items(): | |||||
valnode = nodes.EmptyNode() | |||||
valnode.object = obj | |||||
valnode.parent = klass | |||||
valnode.lineno = 1 | |||||
klass.instance_attrs[item_name] = [valnode] | |||||
return klass | |||||
def _build_from_function(node, name, member, module): | |||||
# verify this is not an imported function | |||||
try: | |||||
code = six.get_function_code(member) | |||||
except AttributeError: | |||||
# Some implementations don't provide the code object, | |||||
# such as Jython. | |||||
code = None | |||||
filename = getattr(code, 'co_filename', None) | |||||
if filename is None: | |||||
assert isinstance(member, object) | |||||
object_build_methoddescriptor(node, member, name) | |||||
elif filename != getattr(module, '__file__', None): | |||||
attach_dummy_node(node, name, member) | |||||
else: | |||||
object_build_function(node, member, name) | |||||
class InspectBuilder(object): | |||||
"""class for building nodes from living object | |||||
this is actually a really minimal representation, including only Module, | |||||
FunctionDef and ClassDef nodes and some others as guessed. | |||||
""" | |||||
# astroid from living objects ############################################### | |||||
def __init__(self): | |||||
self._done = {} | |||||
self._module = None | |||||
def inspect_build(self, module, modname=None, path=None): | |||||
"""build astroid from a living module (i.e. using inspect) | |||||
this is used when there is no python source code available (either | |||||
because it's a built-in module or because the .py is not available) | |||||
""" | |||||
self._module = module | |||||
if modname is None: | |||||
modname = module.__name__ | |||||
try: | |||||
node = build_module(modname, module.__doc__) | |||||
except AttributeError: | |||||
# in jython, java modules have no __doc__ (see #109562) | |||||
node = build_module(modname) | |||||
node.file = node.path = os.path.abspath(path) if path else path | |||||
node.name = modname | |||||
MANAGER.cache_module(node) | |||||
node.package = hasattr(module, '__path__') | |||||
self._done = {} | |||||
self.object_build(node, module) | |||||
return node | |||||
def object_build(self, node, obj): | |||||
"""recursive method which create a partial ast from real objects | |||||
(only function, class, and method are handled) | |||||
""" | |||||
if obj in self._done: | |||||
return self._done[obj] | |||||
self._done[obj] = node | |||||
for name in dir(obj): | |||||
try: | |||||
member = getattr(obj, name) | |||||
except AttributeError: | |||||
# damned ExtensionClass.Base, I know you're there ! | |||||
attach_dummy_node(node, name) | |||||
continue | |||||
if inspect.ismethod(member): | |||||
member = six.get_method_function(member) | |||||
if inspect.isfunction(member): | |||||
_build_from_function(node, name, member, self._module) | |||||
elif inspect.isbuiltin(member): | |||||
if (not _io_discrepancy(member) and | |||||
self.imported_member(node, member, name)): | |||||
continue | |||||
object_build_methoddescriptor(node, member, name) | |||||
elif inspect.isclass(member): | |||||
if self.imported_member(node, member, name): | |||||
continue | |||||
if member in self._done: | |||||
class_node = self._done[member] | |||||
if class_node not in node.locals.get(name, ()): | |||||
node.add_local_node(class_node, name) | |||||
else: | |||||
class_node = object_build_class(node, member, name) | |||||
# recursion | |||||
self.object_build(class_node, member) | |||||
if name == '__class__' and class_node.parent is None: | |||||
class_node.parent = self._done[self._module] | |||||
elif inspect.ismethoddescriptor(member): | |||||
assert isinstance(member, object) | |||||
object_build_methoddescriptor(node, member, name) | |||||
elif inspect.isdatadescriptor(member): | |||||
assert isinstance(member, object) | |||||
object_build_datadescriptor(node, member, name) | |||||
elif isinstance(member, _CONSTANTS): | |||||
attach_const_node(node, name, member) | |||||
elif inspect.isroutine(member): | |||||
# This should be called for Jython, where some builtin | |||||
# methods aren't caught by isbuiltin branch. | |||||
_build_from_function(node, name, member, self._module) | |||||
else: | |||||
# create an empty node so that the name is actually defined | |||||
attach_dummy_node(node, name, member) | |||||
return None | |||||
def imported_member(self, node, member, name): | |||||
"""verify this is not an imported class or handle it""" | |||||
# /!\ some classes like ExtensionClass doesn't have a __module__ | |||||
# attribute ! Also, this may trigger an exception on badly built module | |||||
# (see http://www.logilab.org/ticket/57299 for instance) | |||||
try: | |||||
modname = getattr(member, '__module__', None) | |||||
except: # pylint: disable=bare-except | |||||
_LOG.exception('unexpected error while building ' | |||||
'astroid from living object') | |||||
modname = None | |||||
if modname is None: | |||||
if (name in ('__new__', '__subclasshook__') | |||||
or (name in _BUILTINS and _JYTHON)): | |||||
# Python 2.5.1 (r251:54863, Sep 1 2010, 22:03:14) | |||||
# >>> print object.__new__.__module__ | |||||
# None | |||||
modname = six.moves.builtins.__name__ | |||||
else: | |||||
attach_dummy_node(node, name, member) | |||||
return True | |||||
real_name = { | |||||
'gtk': 'gtk_gtk', | |||||
'_io': 'io', | |||||
}.get(modname, modname) | |||||
if real_name != self._module.__name__: | |||||
# check if it sounds valid and then add an import node, else use a | |||||
# dummy node | |||||
try: | |||||
getattr(sys.modules[modname], name) | |||||
except (KeyError, AttributeError): | |||||
attach_dummy_node(node, name, member) | |||||
else: | |||||
attach_import_node(node, modname, name) | |||||
return True | |||||
return False | |||||
### astroid bootstrapping ###################################################### | |||||
Astroid_BUILDER = InspectBuilder() | |||||
_CONST_PROXY = {} | |||||
def _astroid_bootstrapping(astroid_builtin=None): | |||||
"""astroid boot strapping the builtins module""" | |||||
# this boot strapping is necessary since we need the Const nodes to | |||||
# inspect_build builtins, and then we can proxy Const | |||||
if astroid_builtin is None: | |||||
from six.moves import builtins | |||||
astroid_builtin = Astroid_BUILDER.inspect_build(builtins) | |||||
# pylint: disable=redefined-outer-name | |||||
for cls, node_cls in node_classes.CONST_CLS.items(): | |||||
if cls is type(None): | |||||
proxy = build_class('NoneType') | |||||
proxy.parent = astroid_builtin | |||||
elif cls is type(NotImplemented): | |||||
proxy = build_class('NotImplementedType') | |||||
proxy.parent = astroid_builtin | |||||
else: | |||||
proxy = astroid_builtin.getattr(cls.__name__)[0] | |||||
if cls in (dict, list, set, tuple): | |||||
node_cls._proxied = proxy | |||||
else: | |||||
_CONST_PROXY[cls] = proxy | |||||
_astroid_bootstrapping() | |||||
# TODO : find a nicer way to handle this situation; | |||||
# However __proxied introduced an | |||||
# infinite recursion (see https://bugs.launchpad.net/pylint/+bug/456870) | |||||
def _set_proxied(const): | |||||
return _CONST_PROXY[const.value.__class__] | |||||
nodes.Const._proxied = property(_set_proxied) | |||||
_GeneratorType = nodes.ClassDef(types.GeneratorType.__name__, types.GeneratorType.__doc__) | |||||
_GeneratorType.parent = MANAGER.astroid_cache[six.moves.builtins.__name__] | |||||
bases.Generator._proxied = _GeneratorType | |||||
Astroid_BUILDER.object_build(bases.Generator._proxied, types.GeneratorType) | |||||
_builtins = MANAGER.astroid_cache[six.moves.builtins.__name__] | |||||
BUILTIN_TYPES = (types.GetSetDescriptorType, types.GeneratorType, | |||||
types.MemberDescriptorType, type(None), type(NotImplemented), | |||||
types.FunctionType, types.MethodType, | |||||
types.BuiltinFunctionType, types.ModuleType, types.TracebackType) | |||||
for _type in BUILTIN_TYPES: | |||||
if _type.__name__ not in _builtins: | |||||
cls = nodes.ClassDef(_type.__name__, _type.__doc__) | |||||
cls.parent = MANAGER.astroid_cache[six.moves.builtins.__name__] | |||||
Astroid_BUILDER.object_build(cls, _type) | |||||
_builtins[_type.__name__] = cls |