added git and edited settings.py
This commit is contained in:
commit
0bc13ada46
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
*.pyc
|
||||
__pycache__
|
||||
myvenv
|
||||
db.sqlite3
|
||||
.DS_Store
|
3
.vscode/settings.json
vendored
Normal file
3
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"python.pythonPath": "${workspaceFolder}/thesisenv/bin/python"
|
||||
}
|
0
application/__init__.py
Normal file
0
application/__init__.py
Normal file
3
application/admin.py
Normal file
3
application/admin.py
Normal file
@ -0,0 +1,3 @@
|
||||
from django.contrib import admin
|
||||
|
||||
# Register your models here.
|
5
application/apps.py
Normal file
5
application/apps.py
Normal file
@ -0,0 +1,5 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ApplicationConfig(AppConfig):
|
||||
name = 'application'
|
0
application/migrations/__init__.py
Normal file
0
application/migrations/__init__.py
Normal file
19
application/models.py
Normal file
19
application/models.py
Normal file
@ -0,0 +1,19 @@
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
class Post(models.Model):
|
||||
author = models.ForeignKey('auth.User', on_delete=models.CASCADE)
|
||||
title = models.CharField(max_length=200)
|
||||
text = models.TextField()
|
||||
created_date = models.DateTimeField(
|
||||
default=timezone.now)
|
||||
published_date = models.DateTimeField(
|
||||
blank=True, null=True)
|
||||
|
||||
def publish(self):
|
||||
self.published_date = timezone.now()
|
||||
self.save()
|
||||
|
||||
def __str__(self):
|
||||
return self.title
|
3
application/tests.py
Normal file
3
application/tests.py
Normal file
@ -0,0 +1,3 @@
|
||||
from django.test import TestCase
|
||||
|
||||
# Create your tests here.
|
3
application/views.py
Normal file
3
application/views.py
Normal file
@ -0,0 +1,3 @@
|
||||
from django.shortcuts import render
|
||||
|
||||
# Create your views here.
|
15
manage.py
Executable file
15
manage.py
Executable file
@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings")
|
||||
try:
|
||||
from django.core.management import execute_from_command_line
|
||||
except ImportError as exc:
|
||||
raise ImportError(
|
||||
"Couldn't import Django. Are you sure it's installed and "
|
||||
"available on your PYTHONPATH environment variable? Did you "
|
||||
"forget to activate a virtual environment?"
|
||||
) from exc
|
||||
execute_from_command_line(sys.argv)
|
0
mysite/__init__.py
Normal file
0
mysite/__init__.py
Normal file
167
mysite/settings.py
Normal file
167
mysite/settings.py
Normal file
@ -0,0 +1,167 @@
|
||||
"""
|
||||
Django settings for mysite project.
|
||||
|
||||
Generated by 'django-admin startproject' using Django 2.0.6.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/2.0/topics/settings/
|
||||
|
||||
For the full list of settings and their values, see
|
||||
https://docs.djangoproject.com/en/2.0/ref/settings/
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
|
||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
|
||||
# Quick-start development settings - unsuitable for production
|
||||
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
|
||||
|
||||
# SECURITY WARNING: keep the secret key used in production secret!
|
||||
SECRET_KEY = 'rh2cynsps7=3fb-bmb!+6g(!a(j5i3dq54ps08y2^py8z*49ct'
|
||||
|
||||
# SECURITY WARNING: don't run with debug turned on in production!
|
||||
DEBUG = True
|
||||
|
||||
ALLOWED_HOSTS = []
|
||||
|
||||
# Development or Production
|
||||
r = re.search(r'^172.17', socket.gethostbyname(socket.gethostname()))
|
||||
DEVELOPMENT = (r == None)
|
||||
|
||||
|
||||
# Application definition
|
||||
|
||||
INSTALLED_APPS = [
|
||||
'django.contrib.admin',
|
||||
'django.contrib.auth',
|
||||
'django.contrib.contenttypes',
|
||||
'django.contrib.sessions',
|
||||
'django.contrib.messages',
|
||||
'django.contrib.staticfiles',
|
||||
'application',
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
'django.middleware.security.SecurityMiddleware',
|
||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
'django.middleware.csrf.CsrfViewMiddleware',
|
||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'django.contrib.messages.middleware.MessageMiddleware',
|
||||
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||
]
|
||||
|
||||
ROOT_URLCONF = 'mysite.urls'
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [],
|
||||
'APP_DIRS': True,
|
||||
'OPTIONS': {
|
||||
'context_processors': [
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.request',
|
||||
'django.contrib.auth.context_processors.auth',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
WSGI_APPLICATION = 'mysite.wsgi.application'
|
||||
|
||||
|
||||
# Database
|
||||
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
|
||||
|
||||
if DEVELOPMENT:
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
|
||||
}
|
||||
}
|
||||
else:
|
||||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.mysql',
|
||||
'NAME': 'django-app',
|
||||
'USER': 'django-app',
|
||||
'PASSWORD': '*******',
|
||||
'HOST': 'mysql',
|
||||
'PORT': '3306',
|
||||
'OPTIONS': {
|
||||
'init_command': "SET sql_mode='STRICT_TRANS_TABLES'"
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# Password validation
|
||||
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
|
||||
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
# Internationalization
|
||||
# https://docs.djangoproject.com/en/2.0/topics/i18n/
|
||||
|
||||
LANGUAGE_CODE = 'en-us'
|
||||
|
||||
TIME_ZONE = 'Europe/Berlin'
|
||||
|
||||
USE_I18N = True
|
||||
|
||||
USE_L10N = True
|
||||
|
||||
USE_TZ = True
|
||||
|
||||
|
||||
# Static files (CSS, JavaScript, Images)
|
||||
# https://docs.djangoproject.com/en/2.0/howto/static-files/
|
||||
|
||||
STATIC_URL = '/static/'
|
||||
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
|
||||
|
||||
# Konfiguration des Auth-Systems
|
||||
|
||||
LDAP_DOMAIN = 'ADS1'
|
||||
LDAP_SERVER = 'gso1.ads1.fh-nuernberg.de'
|
||||
|
||||
if DEVELOPMENT:
|
||||
LOGIN_REDIRECT_URL = '/'
|
||||
LOGOUT_REDIRECT_URL = '/'
|
||||
LOGIN_URL = "/accounts/login/"
|
||||
else:
|
||||
LOGIN_REDIRECT_URL = '/app/'
|
||||
LOGOUT_REDIRECT_URL = '/app/'
|
||||
LOGIN_URL = "/app/accounts/login/"
|
||||
|
||||
if DEVELOPMENT:
|
||||
AUTHENTICATION_BACKENDS = [
|
||||
'django.contrib.auth.backends.ModelBackend',
|
||||
]
|
||||
else:
|
||||
AUTHENTICATION_BACKENDS = [
|
||||
'django.contrib.auth.backends.ModelBackend',
|
||||
'medinf.ldap_backend.LdapBackend',
|
||||
]
|
21
mysite/urls.py
Normal file
21
mysite/urls.py
Normal file
@ -0,0 +1,21 @@
|
||||
"""mysite URL Configuration
|
||||
|
||||
The `urlpatterns` list routes URLs to views. For more information please see:
|
||||
https://docs.djangoproject.com/en/2.0/topics/http/urls/
|
||||
Examples:
|
||||
Function views
|
||||
1. Add an import: from my_app import views
|
||||
2. Add a URL to urlpatterns: path('', views.home, name='home')
|
||||
Class-based views
|
||||
1. Add an import: from other_app.views import Home
|
||||
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
|
||||
Including another URLconf
|
||||
1. Import the include() function: from django.urls import include, path
|
||||
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
|
||||
"""
|
||||
from django.contrib import admin
|
||||
from django.urls import path
|
||||
|
||||
urlpatterns = [
|
||||
path('admin/', admin.site.urls),
|
||||
]
|
16
mysite/wsgi.py
Normal file
16
mysite/wsgi.py
Normal file
@ -0,0 +1,16 @@
|
||||
"""
|
||||
WSGI config for mysite project.
|
||||
|
||||
It exposes the WSGI callable as a module-level variable named ``application``.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/2.0/howto/deployment/wsgi/
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings")
|
||||
|
||||
application = get_wsgi_application()
|
76
thesisenv/bin/activate
Normal file
76
thesisenv/bin/activate
Normal file
@ -0,0 +1,76 @@
|
||||
# This file must be used with "source bin/activate" *from bash*
|
||||
# you cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r
|
||||
fi
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
if [ ! "$1" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
VIRTUAL_ENV="/Users/Esthi/thesis_ek/thesisenv"
|
||||
export VIRTUAL_ENV
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
if [ "x(thesisenv) " != x ] ; then
|
||||
PS1="(thesisenv) ${PS1:-}"
|
||||
else
|
||||
if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
|
||||
# special case for Aspen magic directories
|
||||
# see http://www.zetadev.com/software/aspen/
|
||||
PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
|
||||
else
|
||||
PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
|
||||
fi
|
||||
fi
|
||||
export PS1
|
||||
fi
|
||||
|
||||
# This should detect bash and zsh, which have a hash command that must
|
||||
# be called to get it to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||
hash -r
|
||||
fi
|
37
thesisenv/bin/activate.csh
Normal file
37
thesisenv/bin/activate.csh
Normal file
@ -0,0 +1,37 @@
|
||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV "/Users/Esthi/thesis_ek/thesisenv"
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
if ("thesisenv" != "") then
|
||||
set env_name = "thesisenv"
|
||||
else
|
||||
if (`basename "VIRTUAL_ENV"` == "__") then
|
||||
# special case for Aspen magic directories
|
||||
# see http://www.zetadev.com/software/aspen/
|
||||
set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
|
||||
else
|
||||
set env_name = `basename "$VIRTUAL_ENV"`
|
||||
endif
|
||||
endif
|
||||
set prompt = "[$env_name] $prompt"
|
||||
unset env_name
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
75
thesisenv/bin/activate.fish
Normal file
75
thesisenv/bin/activate.fish
Normal file
@ -0,0 +1,75 @@
|
||||
# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
|
||||
# you cannot run it directly
|
||||
|
||||
function deactivate -d "Exit virtualenv and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
functions -e fish_prompt
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV "/Users/Esthi/thesis_ek/thesisenv"
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# save the current fish_prompt function as the function _old_fish_prompt
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# with the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command
|
||||
set -l old_status $status
|
||||
|
||||
# Prompt override?
|
||||
if test -n "(thesisenv) "
|
||||
printf "%s%s" "(thesisenv) " (set_color normal)
|
||||
else
|
||||
# ...Otherwise, prepend env
|
||||
set -l _checkbase (basename "$VIRTUAL_ENV")
|
||||
if test $_checkbase = "__"
|
||||
# special case for Aspen magic directories
|
||||
# see http://www.zetadev.com/software/aspen/
|
||||
printf "%s[%s]%s " (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal)
|
||||
else
|
||||
printf "%s(%s)%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal)
|
||||
end
|
||||
end
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
end
|
11
thesisenv/bin/django-admin
Executable file
11
thesisenv/bin/django-admin
Executable file
@ -0,0 +1,11 @@
|
||||
#!/Users/Esthi/thesis_ek/thesisenv/bin/python3
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(execute_from_command_line())
|
5
thesisenv/bin/django-admin.py
Executable file
5
thesisenv/bin/django-admin.py
Executable file
@ -0,0 +1,5 @@
|
||||
#!/Users/Esthi/thesis_ek/thesisenv/bin/python3
|
||||
from django.core import management
|
||||
|
||||
if __name__ == "__main__":
|
||||
management.execute_from_command_line()
|
11
thesisenv/bin/easy_install
Executable file
11
thesisenv/bin/easy_install
Executable file
@ -0,0 +1,11 @@
|
||||
#!/Users/Esthi/thesis_ek/thesisenv/bin/python3
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from setuptools.command.easy_install import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
11
thesisenv/bin/easy_install-3.6
Executable file
11
thesisenv/bin/easy_install-3.6
Executable file
@ -0,0 +1,11 @@
|
||||
#!/Users/Esthi/thesis_ek/thesisenv/bin/python3
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from setuptools.command.easy_install import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
11
thesisenv/bin/epylint
Executable file
11
thesisenv/bin/epylint
Executable file
@ -0,0 +1,11 @@
|
||||
#!/Users/Esthi/thesis_ek/thesisenv/bin/python
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pylint import run_epylint
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(run_epylint())
|
11
thesisenv/bin/isort
Executable file
11
thesisenv/bin/isort
Executable file
@ -0,0 +1,11 @@
|
||||
#!/Users/Esthi/thesis_ek/thesisenv/bin/python
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from isort.main import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
11
thesisenv/bin/pip
Executable file
11
thesisenv/bin/pip
Executable file
@ -0,0 +1,11 @@
|
||||
#!/Users/Esthi/thesis_ek/thesisenv/bin/python3
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pip._internal import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
11
thesisenv/bin/pip3
Executable file
11
thesisenv/bin/pip3
Executable file
@ -0,0 +1,11 @@
|
||||
#!/Users/Esthi/thesis_ek/thesisenv/bin/python3
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pip._internal import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
11
thesisenv/bin/pip3.6
Executable file
11
thesisenv/bin/pip3.6
Executable file
@ -0,0 +1,11 @@
|
||||
#!/Users/Esthi/thesis_ek/thesisenv/bin/python3
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pip._internal import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
11
thesisenv/bin/pylint
Executable file
11
thesisenv/bin/pylint
Executable file
@ -0,0 +1,11 @@
|
||||
#!/Users/Esthi/thesis_ek/thesisenv/bin/python
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pylint import run_pylint
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(run_pylint())
|
11
thesisenv/bin/pyreverse
Executable file
11
thesisenv/bin/pyreverse
Executable file
@ -0,0 +1,11 @@
|
||||
#!/Users/Esthi/thesis_ek/thesisenv/bin/python
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pylint import run_pyreverse
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(run_pyreverse())
|
1
thesisenv/bin/python
Symbolic link
1
thesisenv/bin/python
Symbolic link
@ -0,0 +1 @@
|
||||
python3
|
1
thesisenv/bin/python3
Symbolic link
1
thesisenv/bin/python3
Symbolic link
@ -0,0 +1 @@
|
||||
/usr/local/bin/python3
|
11
thesisenv/bin/symilar
Executable file
11
thesisenv/bin/symilar
Executable file
@ -0,0 +1,11 @@
|
||||
#!/Users/Esthi/thesis_ek/thesisenv/bin/python
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pylint import run_symilar
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(run_symilar())
|
@ -0,0 +1,45 @@
|
||||
Django is a high-level Python Web framework that encourages rapid development
|
||||
and clean, pragmatic design. Thanks for checking it out.
|
||||
|
||||
All documentation is in the "``docs``" directory and online at
|
||||
https://docs.djangoproject.com/en/stable/. If you're just getting started,
|
||||
here's how we recommend you read the docs:
|
||||
|
||||
* First, read ``docs/intro/install.txt`` for instructions on installing Django.
|
||||
|
||||
* Next, work through the tutorials in order (``docs/intro/tutorial01.txt``,
|
||||
``docs/intro/tutorial02.txt``, etc.).
|
||||
|
||||
* If you want to set up an actual deployment server, read
|
||||
``docs/howto/deployment/index.txt`` for instructions.
|
||||
|
||||
* You'll probably want to read through the topical guides (in ``docs/topics``)
|
||||
next; from there you can jump to the HOWTOs (in ``docs/howto``) for specific
|
||||
problems, and check out the reference (``docs/ref``) for gory details.
|
||||
|
||||
* See ``docs/README`` for instructions on building an HTML version of the docs.
|
||||
|
||||
Docs are updated rigorously. If you find any problems in the docs, or think
|
||||
they should be clarified in any way, please take 30 seconds to fill out a
|
||||
ticket here: https://code.djangoproject.com/newticket
|
||||
|
||||
To get more help:
|
||||
|
||||
* Join the ``#django`` channel on irc.freenode.net. Lots of helpful people hang out
|
||||
there. Read the archives at https://botbot.me/freenode/django/.
|
||||
|
||||
* Join the django-users mailing list, or read the archives, at
|
||||
https://groups.google.com/group/django-users.
|
||||
|
||||
To contribute to Django:
|
||||
|
||||
* Check out https://docs.djangoproject.com/en/dev/internals/contributing/ for
|
||||
information about getting involved.
|
||||
|
||||
To run Django's test suite:
|
||||
|
||||
* Follow the instructions in the "Unit tests" section of
|
||||
``docs/internals/contributing/writing-code/unit-tests.txt``, published online at
|
||||
https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/unit-tests/#running-the-unit-tests
|
||||
|
||||
|
@ -0,0 +1 @@
|
||||
pip
|
@ -0,0 +1,27 @@
|
||||
Copyright (c) Django Software Foundation and individual contributors.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice,
|
||||
this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of Django nor the names of its contributors may be used
|
||||
to endorse or promote products derived from this software without
|
||||
specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
@ -0,0 +1,83 @@
|
||||
Metadata-Version: 2.0
|
||||
Name: Django
|
||||
Version: 2.0.6
|
||||
Summary: A high-level Python Web framework that encourages rapid development and clean, pragmatic design.
|
||||
Home-page: https://www.djangoproject.com/
|
||||
Author: Django Software Foundation
|
||||
Author-email: foundation@djangoproject.com
|
||||
License: BSD
|
||||
Project-URL: Documentation, https://docs.djangoproject.com/
|
||||
Project-URL: Funding, https://www.djangoproject.com/fundraising/
|
||||
Project-URL: Source, https://github.com/django/django
|
||||
Project-URL: Tracker, https://code.djangoproject.com/
|
||||
Description-Content-Type: UNKNOWN
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Framework :: Django
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: 3 :: Only
|
||||
Classifier: Topic :: Internet :: WWW/HTTP
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
|
||||
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Requires-Python: >=3.4
|
||||
Requires-Dist: pytz
|
||||
Provides-Extra: argon2
|
||||
Requires-Dist: argon2-cffi (>=16.1.0); extra == 'argon2'
|
||||
Provides-Extra: bcrypt
|
||||
Requires-Dist: bcrypt; extra == 'bcrypt'
|
||||
|
||||
Django is a high-level Python Web framework that encourages rapid development
|
||||
and clean, pragmatic design. Thanks for checking it out.
|
||||
|
||||
All documentation is in the "``docs``" directory and online at
|
||||
https://docs.djangoproject.com/en/stable/. If you're just getting started,
|
||||
here's how we recommend you read the docs:
|
||||
|
||||
* First, read ``docs/intro/install.txt`` for instructions on installing Django.
|
||||
|
||||
* Next, work through the tutorials in order (``docs/intro/tutorial01.txt``,
|
||||
``docs/intro/tutorial02.txt``, etc.).
|
||||
|
||||
* If you want to set up an actual deployment server, read
|
||||
``docs/howto/deployment/index.txt`` for instructions.
|
||||
|
||||
* You'll probably want to read through the topical guides (in ``docs/topics``)
|
||||
next; from there you can jump to the HOWTOs (in ``docs/howto``) for specific
|
||||
problems, and check out the reference (``docs/ref``) for gory details.
|
||||
|
||||
* See ``docs/README`` for instructions on building an HTML version of the docs.
|
||||
|
||||
Docs are updated rigorously. If you find any problems in the docs, or think
|
||||
they should be clarified in any way, please take 30 seconds to fill out a
|
||||
ticket here: https://code.djangoproject.com/newticket
|
||||
|
||||
To get more help:
|
||||
|
||||
* Join the ``#django`` channel on irc.freenode.net. Lots of helpful people hang out
|
||||
there. Read the archives at https://botbot.me/freenode/django/.
|
||||
|
||||
* Join the django-users mailing list, or read the archives, at
|
||||
https://groups.google.com/group/django-users.
|
||||
|
||||
To contribute to Django:
|
||||
|
||||
* Check out https://docs.djangoproject.com/en/dev/internals/contributing/ for
|
||||
information about getting involved.
|
||||
|
||||
To run Django's test suite:
|
||||
|
||||
* Follow the instructions in the "Unit tests" section of
|
||||
``docs/internals/contributing/writing-code/unit-tests.txt``, published online at
|
||||
https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/unit-tests/#running-the-unit-tests
|
||||
|
||||
|
4174
thesisenv/lib/python3.6/site-packages/Django-2.0.6.dist-info/RECORD
Normal file
4174
thesisenv/lib/python3.6/site-packages/Django-2.0.6.dist-info/RECORD
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.30.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
@ -0,0 +1,3 @@
|
||||
[console_scripts]
|
||||
django-admin = django.core.management:execute_from_command_line
|
||||
|
@ -0,0 +1 @@
|
||||
{"classifiers": ["Development Status :: 5 - Production/Stable", "Environment :: Web Environment", "Framework :: Django", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3 :: Only", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Internet :: WWW/HTTP :: WSGI", "Topic :: Software Development :: Libraries :: Application Frameworks", "Topic :: Software Development :: Libraries :: Python Modules"], "description_content_type": "UNKNOWN", "extensions": {"python.commands": {"wrap_console": {"django-admin": "django.core.management:execute_from_command_line"}}, "python.details": {"contacts": [{"email": "foundation@djangoproject.com", "name": "Django Software Foundation", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://www.djangoproject.com/"}}, "python.exports": {"console_scripts": {"django-admin": "django.core.management:execute_from_command_line"}}}, "extras": ["argon2", "bcrypt"], "generator": "bdist_wheel (0.30.0)", "license": "BSD", "metadata_version": "2.0", "name": "Django", "project_url": "Documentation, https://docs.djangoproject.com/", "requires_python": ">=3.4", "run_requires": [{"extra": "argon2", "requires": ["argon2-cffi (>=16.1.0)"]}, {"extra": "bcrypt", "requires": ["bcrypt"]}, {"requires": ["pytz"]}], "summary": "A high-level Python Web framework that encourages rapid development and clean, pragmatic design.", "version": "2.0.6"}
|
@ -0,0 +1 @@
|
||||
django
|
@ -0,0 +1,69 @@
|
||||
Astroid
|
||||
=======
|
||||
|
||||
.. image:: https://travis-ci.org/PyCQA/astroid.svg?branch=master
|
||||
:target: https://travis-ci.org/PyCQA/astroid
|
||||
|
||||
.. image:: https://ci.appveyor.com/api/projects/status/co3u42kunguhbh6l/branch/master?svg=true
|
||||
:alt: AppVeyor Build Status
|
||||
:target: https://ci.appveyor.com/project/PCManticore/astroid
|
||||
|
||||
.. image:: https://coveralls.io/repos/github/PyCQA/astroid/badge.svg?branch=master
|
||||
:target: https://coveralls.io/github/PyCQA/astroid?branch=master
|
||||
|
||||
.. image:: https://readthedocs.org/projects/astroid/badge/?version=latest
|
||||
:target: http://astroid.readthedocs.io/en/latest/?badge=latest
|
||||
:alt: Documentation Status
|
||||
|
||||
|
||||
|
||||
What's this?
|
||||
------------
|
||||
|
||||
The aim of this module is to provide a common base representation of
|
||||
python source code for projects such as pychecker, pyreverse,
|
||||
pylint... Well, actually the development of this library is essentially
|
||||
governed by pylint's needs. It used to be called logilab-astng.
|
||||
|
||||
It provides a compatible representation which comes from the `_ast`
|
||||
module. It rebuilds the tree generated by the builtin _ast module by
|
||||
recursively walking down the AST and building an extended ast. The new
|
||||
node classes have additional methods and attributes for different
|
||||
usages. They include some support for static inference and local name
|
||||
scopes. Furthermore, astroid builds partial trees by inspecting living
|
||||
objects.
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
Extract the tarball, jump into the created directory and run::
|
||||
|
||||
python setup.py install
|
||||
|
||||
For installation options, see::
|
||||
|
||||
python setup.py install --help
|
||||
|
||||
|
||||
If you have any questions, please mail the code-quality@python.org
|
||||
mailing list for support. See
|
||||
http://mail.python.org/mailman/listinfo/code-quality for subscription
|
||||
information and archives. You may find older archives at
|
||||
http://lists.logilab.org/mailman/listinfo/python-projects .
|
||||
|
||||
Python Versions
|
||||
---------------
|
||||
|
||||
astroid is compatible with Python 2.7 as well as 3.4 and later. astroid uses
|
||||
the same code base for both Python versions, using six.
|
||||
|
||||
Test
|
||||
----
|
||||
|
||||
Tests are in the 'test' subdirectory. To launch the whole tests suite
|
||||
at once, you can use unittest discover::
|
||||
|
||||
python -m unittest discover -p "unittest*.py"
|
||||
|
||||
|
@ -0,0 +1 @@
|
||||
pip
|
@ -0,0 +1,97 @@
|
||||
Metadata-Version: 2.0
|
||||
Name: astroid
|
||||
Version: 1.6.5
|
||||
Summary: A abstract syntax tree for Python with inference support.
|
||||
Home-page: https://github.com/PyCQA/astroid
|
||||
Author: Python Code Quality Authority
|
||||
Author-email: code-quality@python.org
|
||||
License: LGPL
|
||||
Platform: UNKNOWN
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Topic :: Software Development :: Quality Assurance
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
Classifier: Programming Language :: Python :: 3.5
|
||||
Classifier: Programming Language :: Python :: 3.6
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
|
||||
Requires-Dist: lazy-object-proxy
|
||||
Requires-Dist: six
|
||||
Requires-Dist: wrapt
|
||||
Requires-Dist: enum34 (>=1.1.3); python_version<"3.4"
|
||||
Requires-Dist: singledispatch; python_version<"3.4"
|
||||
Requires-Dist: backports.functools-lru-cache; python_version<"3.4"
|
||||
|
||||
Astroid
|
||||
=======
|
||||
|
||||
.. image:: https://travis-ci.org/PyCQA/astroid.svg?branch=master
|
||||
:target: https://travis-ci.org/PyCQA/astroid
|
||||
|
||||
.. image:: https://ci.appveyor.com/api/projects/status/co3u42kunguhbh6l/branch/master?svg=true
|
||||
:alt: AppVeyor Build Status
|
||||
:target: https://ci.appveyor.com/project/PCManticore/astroid
|
||||
|
||||
.. image:: https://coveralls.io/repos/github/PyCQA/astroid/badge.svg?branch=master
|
||||
:target: https://coveralls.io/github/PyCQA/astroid?branch=master
|
||||
|
||||
.. image:: https://readthedocs.org/projects/astroid/badge/?version=latest
|
||||
:target: http://astroid.readthedocs.io/en/latest/?badge=latest
|
||||
:alt: Documentation Status
|
||||
|
||||
|
||||
|
||||
What's this?
|
||||
------------
|
||||
|
||||
The aim of this module is to provide a common base representation of
|
||||
python source code for projects such as pychecker, pyreverse,
|
||||
pylint... Well, actually the development of this library is essentially
|
||||
governed by pylint's needs. It used to be called logilab-astng.
|
||||
|
||||
It provides a compatible representation which comes from the `_ast`
|
||||
module. It rebuilds the tree generated by the builtin _ast module by
|
||||
recursively walking down the AST and building an extended ast. The new
|
||||
node classes have additional methods and attributes for different
|
||||
usages. They include some support for static inference and local name
|
||||
scopes. Furthermore, astroid builds partial trees by inspecting living
|
||||
objects.
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
Extract the tarball, jump into the created directory and run::
|
||||
|
||||
python setup.py install
|
||||
|
||||
For installation options, see::
|
||||
|
||||
python setup.py install --help
|
||||
|
||||
|
||||
If you have any questions, please mail the code-quality@python.org
|
||||
mailing list for support. See
|
||||
http://mail.python.org/mailman/listinfo/code-quality for subscription
|
||||
information and archives. You may find older archives at
|
||||
http://lists.logilab.org/mailman/listinfo/python-projects .
|
||||
|
||||
Python Versions
|
||||
---------------
|
||||
|
||||
astroid is compatible with Python 2.7 as well as 3.4 and later. astroid uses
|
||||
the same code base for both Python versions, using six.
|
||||
|
||||
Test
|
||||
----
|
||||
|
||||
Tests are in the 'test' subdirectory. To launch the whole tests suite
|
||||
at once, you can use unittest discover::
|
||||
|
||||
python -m unittest discover -p "unittest*.py"
|
||||
|
||||
|
@ -0,0 +1,387 @@
|
||||
astroid/__init__.py,sha256=nJa-PtJVjGLGS3llvtbRxCyUVQevswmcDZe8wF8Wndw,5628
|
||||
astroid/__pkginfo__.py,sha256=X5WIIY-hxNeAfmC4zGhyG2dpsbFYDZiA5jllJI67hqg,2603
|
||||
astroid/arguments.py,sha256=47OYPbIe1llGi-bWxuFDpMrhgnjWMTM_1h4KXbLwNwQ,11305
|
||||
astroid/as_string.py,sha256=eGy2-yU4tGYUiUWkfJhnvx8gGDPFifcOdSMqBXggqT4,19917
|
||||
astroid/astpeephole.py,sha256=N-vMldr_kuvu_gJ4gG6vXdr6CYYL2svI1V3ctRX8i0I,2446
|
||||
astroid/bases.py,sha256=2CbjwGgIHYeHkRICtXwVjHkkbeQ8hGQsEHJezDY3hO8,17127
|
||||
astroid/builder.py,sha256=B8x4wix1pcvDL0A1YcxReZJWUQc0zetHHEeGJfFzDxo,16324
|
||||
astroid/context.py,sha256=gquYYshu40royBm4KdFvQEfzsedZY-jkDLRyaRzUjSY,3327
|
||||
astroid/decorators.py,sha256=wsNx_s7YCDgM90cFhgTkwIpl6ZtBQQj_PcIx4p5wHJE,5174
|
||||
astroid/exceptions.py,sha256=aO6KMw78I0RhjlHgy9zCmLnni0_HsTyRvZcGaaEZG5Y,6925
|
||||
astroid/helpers.py,sha256=f-3GWhh9mNXtG1BDa6pCxPoAURHNOTdMORnA2ZIbsXs,5480
|
||||
astroid/inference.py,sha256=f8uK0QTGBi7mHl2Vof5_a6CQi5wedx_qAZVHVU9HSRQ,30515
|
||||
astroid/manager.py,sha256=buJqHYjz3UJ4OlwtsKUoXlHn8T_x9vh7Qi86CeaXaQU,12332
|
||||
astroid/mixins.py,sha256=jSmxJRasbIU_7dX2VXLMYEbEbiIhTvOnq5UJlXH2GJg,4832
|
||||
astroid/modutils.py,sha256=Gz1Apy25EjWAVJe8i9O6ZxuvFuLTVWVyD9rYk50GbyI,21858
|
||||
astroid/node_classes.py,sha256=8fg0A-oU_bERV8IdgwpKJbpWk6oeGgsrXrHFLC-17QM,129539
|
||||
astroid/nodes.py,sha256=2NctPYDrplpV1Iy0Ze2S5YH13B2bZi7f3tuxX-B_t0w,2400
|
||||
astroid/objects.py,sha256=cMOSw957M3l86OBRmnxnLUq8c91dJS5br5LN7nRmxnU,8062
|
||||
astroid/protocols.py,sha256=NMmtzstAJpDI7KYjmZUqZS4ddbnIfW7pGuZX9bJOrxk,22996
|
||||
astroid/raw_building.py,sha256=LAR3Wt5GgNf-9CaMHFNCaUmwY1tHt71HpTgYiLPBhpM,15753
|
||||
astroid/rebuilder.py,sha256=Cij4R5eehuOwV1LOcNLpjYwuG42bPf5wG0YM4wYYdis,39135
|
||||
astroid/scoped_nodes.py,sha256=fyO6aBhyQmCb-9ARipDq_4AyB2blX5CT9URZdwyuGZE,91147
|
||||
astroid/test_utils.py,sha256=MYum03eaauNc1XCJKoFzzj4Z2FeoOjVGzoISYiaISMk,2046
|
||||
astroid/transforms.py,sha256=rOnQae4Zz21Rk5j6tUiBJWoJ2WmGJ5e0iBO9wkMDEe0,3227
|
||||
astroid/util.py,sha256=Yx1qPfK1bf7CCE3I4X8nysRtXv1XyYsIBKEgoia4kyc,4288
|
||||
astroid/brain/brain_attrs.py,sha256=bLgG9gB5mLhvDnj6OZnjnI21gZJjBy2mcUO1_pbAW_U,1766
|
||||
astroid/brain/brain_builtin_inference.py,sha256=nfJqjeaC3zBL_GtuXy2gAf20eTDWtxQ7N0knxEobJ78,17469
|
||||
astroid/brain/brain_collections.py,sha256=kiAyoVhoOLV2gjiX1DNcDn9IRmCKpysekMSe49uHxrI,2227
|
||||
astroid/brain/brain_curses.py,sha256=P9Ay_ZZqCtZND7Q1t3PLuhLGaaHbo6pBYtcL8Pc8U5E,3289
|
||||
astroid/brain/brain_dateutil.py,sha256=ZflUChhczpnxIWeKrWLYXqZdEe_3ktT-Ay_elaexlWg,714
|
||||
astroid/brain/brain_fstrings.py,sha256=_Y0sap2S1J7nrW9YSl6SaXGUvTL0Y1Q6-BJTTODQH_w,1963
|
||||
astroid/brain/brain_functools.py,sha256=CmaRIplk_6G7xbLyyCIXd2ULUm2pDkFRXUpZH5qzHpY,2323
|
||||
astroid/brain/brain_gi.py,sha256=EjyjllMJ3EQP0NPulpIko0Hclr_4U-txjjYkWFMe_0w,6326
|
||||
astroid/brain/brain_hashlib.py,sha256=c230J0Cdnyav341dDxboxCKuwMBUM2f46k4xjGeuT_A,1056
|
||||
astroid/brain/brain_io.py,sha256=veIF0061yjZyAinn7ILkOt7GTPmEzcIRNUybm2e-hsA,1589
|
||||
astroid/brain/brain_mechanize.py,sha256=afG7eL64YvviwRgGWvJqSTgDck2huAD_w4v4s9t3CWQ,712
|
||||
astroid/brain/brain_multiprocessing.py,sha256=XGVujYTqNpttuhhsj_wV-L5ovJRDusRagZtiBo8Jmns,3128
|
||||
astroid/brain/brain_namedtuple_enum.py,sha256=5ZjyxEcoCBtIHfcHviI1ry-L89qmekAotxwl23UzFHQ,10543
|
||||
astroid/brain/brain_nose.py,sha256=NwqOAv_2-eZu11J_jbHN4k_xn1Y6mrQhnbDYjCERM_Q,2179
|
||||
astroid/brain/brain_numpy.py,sha256=W3hQPZx81EcakGZapqX5Wlr5H-UjPrz8Zq1hooYDhQI,7208
|
||||
astroid/brain/brain_pkg_resources.py,sha256=rtSzNUy775IIWONW-Oa3aqr1QNuzD76ew_iKG1NrbxA,2164
|
||||
astroid/brain/brain_pytest.py,sha256=BtfAfrbf4KA1So5N9XEPeA_eQpbJgi38Bf-OkEmRKBE,2224
|
||||
astroid/brain/brain_qt.py,sha256=FwzpsjGGrhCtbW39X9dRvpO0a-z-cvQQBEeBxv0fQGM,1634
|
||||
astroid/brain/brain_random.py,sha256=MM4lhpkad1aEXlser7W2GWFHtQ76C55LAj4vVUcFk4Y,2678
|
||||
astroid/brain/brain_re.py,sha256=v2Ul259C-xKfYcu1Tw5gHCifzbA8Beur6gRwVGWVVwo,1106
|
||||
astroid/brain/brain_six.py,sha256=U4X7Y1JF5dIJtEMdKwTpu4F8fjn19CfFdC2avMyk9sQ,11251
|
||||
astroid/brain/brain_ssl.py,sha256=dYNvkEAIV5TsByufeUyUfy5UW8ledXCYaUY5WJoujPw,3492
|
||||
astroid/brain/brain_subprocess.py,sha256=PxwnCOwQlOQvt6e94AwZBDUEHLPaLhGYlSzOumU3Rf4,3314
|
||||
astroid/brain/brain_threading.py,sha256=sqyPOgIqoFnNfthxzyRsaSaGNFON4I9BM7pY5jvOPCk,656
|
||||
astroid/brain/brain_typing.py,sha256=6CvVNjeriY4NIUz4jgu3h7odqz1l4emEP6IMu0PipJY,2602
|
||||
astroid/brain/brain_uuid.py,sha256=o6_9_TjFSJBbiMX2YU2_cLyNmqoCLgiWsERUH8dghss,572
|
||||
astroid/interpreter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/interpreter/dunder_lookup.py,sha256=zCbbcMOuFDmTwM9fKbY9Ov31shU4pvUyD5VAZup73v8,2372
|
||||
astroid/interpreter/objectmodel.py,sha256=UmNRaibOIbeYnIjL_OvWFHeHKk2hKp-87ebkr1T0c68,19988
|
||||
astroid/interpreter/_import/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/interpreter/_import/spec.py,sha256=VevLd8YyjUMD_N-nXxdKNlGUWVXh72Zl5iCdidtJ2f4,10402
|
||||
astroid/interpreter/_import/util.py,sha256=oYFwYfp56jeq4HJmvukgIypg7imJqx5fhm2-fePoGjE,346
|
||||
astroid/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/resources.py,sha256=gOCYQkxYk0FhAMjBtvvHXpW1eQhU7uUWw9v1Veh3Q10,1950
|
||||
astroid/tests/unittest_brain.py,sha256=_82F52B-YwO6PhOah5bMUFrofErqOEBZ92cmN2Q27UY,30911
|
||||
astroid/tests/unittest_brain_numpy.py,sha256=qyeciUNKLj9wsXNeatjgo-5AUYyNAP2MklXczJrnIhs,10948
|
||||
astroid/tests/unittest_builder.py,sha256=FvwPTnFJHIAPC-Y4F4yyEUqkDqgg8SxonhbI6PPvTkU,28928
|
||||
astroid/tests/unittest_helpers.py,sha256=Wx1UP8rKjRhdY4GzObKXzSJnK5tYQCBqDKHuMvFL67Y,9201
|
||||
astroid/tests/unittest_inference.py,sha256=UP4zFkvGdYLhri-QzGtqaxlAQt1dSdRuqZlB54oIGoA,147345
|
||||
astroid/tests/unittest_lookup.py,sha256=N51VbkAH6sKgjGeMlWPAZPmfIyAaZHHsoEOXV-40oKI,12521
|
||||
astroid/tests/unittest_manager.py,sha256=v4WsPD49vB7RLJIGBosA9WPB-Sj5B_9B0a98zmo9yOM,11818
|
||||
astroid/tests/unittest_modutils.py,sha256=8-qDLAKj46YOScA6QXqlwYMZx7ZMgc2RBAFm5JdfxjI,10566
|
||||
astroid/tests/unittest_nodes.py,sha256=sL6vlDMHYmCDxQRq3GRzCoM8wmqmDxQLsXibgiWoW8Q,30227
|
||||
astroid/tests/unittest_object_model.py,sha256=vu2gNDiqsaqhe4zehkcEx91xbBdzEBvB-ZaYTuaUbQs,20504
|
||||
astroid/tests/unittest_objects.py,sha256=jkY9rXp66dL1Ox468sgHhqm7N4_DnXnTXznOnlhsGh8,19235
|
||||
astroid/tests/unittest_peephole.py,sha256=asVwyvtf2Gc3CcIjL_lXHJtrp3OdAW1HkrpMNImv5Dg,3276
|
||||
astroid/tests/unittest_protocols.py,sha256=LY2K4NPDFYoJo7zggjU5YZPtZ0ISdOdmehCA5rDTWn8,7587
|
||||
astroid/tests/unittest_python3.py,sha256=uE0firQjt4XFvjZt0bmHddR4jkjPEPTMnu8PCcXkN-w,13502
|
||||
astroid/tests/unittest_raw_building.py,sha256=EoblIBaKFxwEnUCCs83ERpm22MXDTug5IwXrAneFtC0,3425
|
||||
astroid/tests/unittest_regrtest.py,sha256=IXwRCmsbOTE6RQpAZ4m5ZEhIal33YCUpyMvHENXMplE,11079
|
||||
astroid/tests/unittest_scoped_nodes.py,sha256=3KJ9a7hUwqpB8yCY8IIbT03uSzMc25e4me1snkgvOeU,65436
|
||||
astroid/tests/unittest_transforms.py,sha256=V2c5H18k7sKJU_cbVaiCcwvuvN17XSxrS9hM5SevL1w,8122
|
||||
astroid/tests/unittest_utils.py,sha256=kAApZ25LzvXJbVSW4fMjfp--9BnLDzUPyRf_WapcYls,3866
|
||||
astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.egg,sha256=hZPSxlunnE-5kTg-yII7E10WG-nsV3DELfKv_xYnBeI,1222
|
||||
astroid/tests/testdata/python2/data/MyPyPa-0.1.0-py2.5.zip,sha256=hZPSxlunnE-5kTg-yII7E10WG-nsV3DELfKv_xYnBeI,1222
|
||||
astroid/tests/testdata/python2/data/__init__.py,sha256=UUgQFilI5GXd3tVo42wvC99xr-OWdiFwd4AToVWMKJg,68
|
||||
astroid/tests/testdata/python2/data/absimport.py,sha256=Dl1v3sCTUuy5NjWsvk6xfXDGqG8dJxYky66oH_16y1U,78
|
||||
astroid/tests/testdata/python2/data/all.py,sha256=9hzh93N-w2OoWmuWFFPe4NfLPtN0CcQUWyJU9G2kki8,106
|
||||
astroid/tests/testdata/python2/data/descriptor_crash.py,sha256=c9dmcN0XSB1WiDINWLjfA0SYY87UzMIpETXHBdcgJ0Y,217
|
||||
astroid/tests/testdata/python2/data/email.py,sha256=bA18WU0kAWGxsPlWJjD6LgXj9NK4RDLjqaN5-EievLw,70
|
||||
astroid/tests/testdata/python2/data/foogle_fax-0.12.5-py2.7-nspkg.pth,sha256=kv-1ZyITNtu-FjiACe5_1ZhKcQnVSKROTwzfHyeIwPA,629
|
||||
astroid/tests/testdata/python2/data/format.py,sha256=Se18tU4br95nCnBg7DIYonoRIXAZi3u2RvyoKwupAXk,421
|
||||
astroid/tests/testdata/python2/data/invalid_encoding.py,sha256=m1_U4_CIbs71SvS2kGVVYDdqYGizzXm9yrXjDWZsO2g,22
|
||||
astroid/tests/testdata/python2/data/joined_strings.py,sha256=5nO3HMS9TAB0jZml1cSBv_b-1m4GTJ_12hD8WYMugBw,72168
|
||||
astroid/tests/testdata/python2/data/module.py,sha256=jaS47E_rOtpGIECwWYYl3ZBzBUZt0fvyCs7tG99SxgU,1804
|
||||
astroid/tests/testdata/python2/data/module2.py,sha256=gNaybt93hMTRFCnOh3gjW0niEDP5nVO8TrpixkHWW5o,1960
|
||||
astroid/tests/testdata/python2/data/noendingnewline.py,sha256=cVu_K7C5NnjnEvmMUxVGeeguyFcHBuNFEO3ueF9X9LI,503
|
||||
astroid/tests/testdata/python2/data/nonregr.py,sha256=0M3kW2tiTQdfuIUU9CNZHDBd1qC6Sxms6b_QZLLGtro,1150
|
||||
astroid/tests/testdata/python2/data/notall.py,sha256=lOzkx4qf1Gm6SmTSXnrCT9C13WDF1UkzvStAnTSkjU0,74
|
||||
astroid/tests/testdata/python2/data/recursion.py,sha256=ZuYyd9K4DyZxXg3L-B1Dl7k9q8OpIfVDwN9kJ52xLDk,52
|
||||
astroid/tests/testdata/python2/data/tmp__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python2/data/SSL1/Connection1.py,sha256=rOKmOG_JTouiVawzB5kty493I64pBM9WJDinQn-_Y5c,343
|
||||
astroid/tests/testdata/python2/data/SSL1/__init__.py,sha256=ZlvNUty1pEZy7wHMAM83YwYcdE4ypNHh0W2ijB3mqO8,35
|
||||
astroid/tests/testdata/python2/data/absimp/__init__.py,sha256=CTlFm8G4kKecaa0NpFb4X25NNZ9FNorigSG65GAvvYA,89
|
||||
astroid/tests/testdata/python2/data/absimp/string.py,sha256=liyEyorFV0OJFr-HcECPPRfVmLd0lO4YrGFnZz0_T0M,83
|
||||
astroid/tests/testdata/python2/data/absimp/sidepackage/__init__.py,sha256=9E8Vj_jbaQ7tm80sIxyruqZPjzlVLNbd3qQxbvj39rI,42
|
||||
astroid/tests/testdata/python2/data/appl/__init__.py,sha256=9OoDa7y4MPXKZenN5CA2wmwsG7vUqiO4ImtTjsNs6YY,13
|
||||
astroid/tests/testdata/python2/data/appl/myConnection.py,sha256=Zc3RQ_GjoZ91k3LkaIfV4_1SePpwKUU2cOFAzN5Iq6Y,303
|
||||
astroid/tests/testdata/python2/data/contribute_to_namespace/namespace_pep_420/submodule.py,sha256=5rdq57wop6pxdC9TpMnh8L7gNANMMsNFujBhL9rGhBw,8
|
||||
astroid/tests/testdata/python2/data/find_test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python2/data/find_test/module.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python2/data/find_test/module2.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python2/data/find_test/noendingnewline.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python2/data/find_test/nonregr.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python2/data/foogle/fax/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python2/data/foogle/fax/a.py,sha256=j_Q23vFFEoVZmhsa1wgASTuNyv3ikS4aODRWMwVOTCY,5
|
||||
astroid/tests/testdata/python2/data/lmfp/__init__.py,sha256=JmYecBTypWpPdKNy76pDWYliy-gWg3PPOOMcUdMAvzo,51
|
||||
astroid/tests/testdata/python2/data/lmfp/foo.py,sha256=ePynel7303gG6wq8wb6kRmaV75Q7mR9A_X7SZVP0YWM,170
|
||||
astroid/tests/testdata/python2/data/module1abs/__init__.py,sha256=RTMiBz8OgkD3dy2Sehwv6am35Xzlf6X8SQJcfo-m2sA,113
|
||||
astroid/tests/testdata/python2/data/module1abs/core.py,sha256=xRdXeFHEieRauuJZElbEBASgXG0ZzU1a5_0isAhM7Gw,11
|
||||
astroid/tests/testdata/python2/data/namespace_pep_420/module.py,sha256=1NhbQAf4THdW94o7nrSzPkEJFy0XvISnlF_-OTTiwRk,43
|
||||
astroid/tests/testdata/python2/data/notamodule/file.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python2/data/package/__init__.py,sha256=U50oVo2CraRtPYheia534Z0iPVQMDT2C6Qwj2ZWAmO0,57
|
||||
astroid/tests/testdata/python2/data/package/absimport.py,sha256=cTkLoSR4oIJtQ8yVLAgdopJXro0qFsehlMGYLCfiPvo,172
|
||||
astroid/tests/testdata/python2/data/package/hello.py,sha256=sTddKXRfLNAysty0r625S8QysSDOmtF8oXDvbl3Cywk,20
|
||||
astroid/tests/testdata/python2/data/package/import_package_subpackage_module.py,sha256=U6BsMb_ygFb8RqImsTrWEGJihU7nJgELPH6AvWM-zaU,2242
|
||||
astroid/tests/testdata/python2/data/package/subpackage/__init__.py,sha256=XtKilaAqziUI-ImaSw4V6Aic40domt4v_If7lAZYhSE,25
|
||||
astroid/tests/testdata/python2/data/package/subpackage/module.py,sha256=WAtPIk13pW6tYI6rSgNHcCgTu0EXhX6i5CugdHPH8N0,32
|
||||
astroid/tests/testdata/python2/data/path_pkg_resources_1/package/__init__.py,sha256=1kQs82-WyvhNmr6z4VEklhVXMJulr_OO2RXRAqT40Z4,55
|
||||
astroid/tests/testdata/python2/data/path_pkg_resources_1/package/foo.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python2/data/path_pkg_resources_2/package/__init__.py,sha256=1kQs82-WyvhNmr6z4VEklhVXMJulr_OO2RXRAqT40Z4,55
|
||||
astroid/tests/testdata/python2/data/path_pkg_resources_2/package/bar.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python2/data/path_pkg_resources_3/package/__init__.py,sha256=1kQs82-WyvhNmr6z4VEklhVXMJulr_OO2RXRAqT40Z4,55
|
||||
astroid/tests/testdata/python2/data/path_pkg_resources_3/package/baz.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python2/data/path_pkgutil_1/package/__init__.py,sha256=doH9fwYoYwbugpsdT3QT3lEja4mw1-USY24J1TSboCs,74
|
||||
astroid/tests/testdata/python2/data/path_pkgutil_1/package/foo.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python2/data/path_pkgutil_2/package/__init__.py,sha256=doH9fwYoYwbugpsdT3QT3lEja4mw1-USY24J1TSboCs,74
|
||||
astroid/tests/testdata/python2/data/path_pkgutil_2/package/bar.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python2/data/path_pkgutil_3/package/__init__.py,sha256=doH9fwYoYwbugpsdT3QT3lEja4mw1-USY24J1TSboCs,74
|
||||
astroid/tests/testdata/python2/data/path_pkgutil_3/package/baz.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python2/data/unicode_package/__init__.py,sha256=Qq8Rv1-47xfh9UMnDqtU6MYCoZbK2DF1zxAvxlkhCNU,17
|
||||
astroid/tests/testdata/python2/data/unicode_package/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.egg,sha256=hZPSxlunnE-5kTg-yII7E10WG-nsV3DELfKv_xYnBeI,1222
|
||||
astroid/tests/testdata/python3/data/MyPyPa-0.1.0-py2.5.zip,sha256=hZPSxlunnE-5kTg-yII7E10WG-nsV3DELfKv_xYnBeI,1222
|
||||
astroid/tests/testdata/python3/data/__init__.py,sha256=UUgQFilI5GXd3tVo42wvC99xr-OWdiFwd4AToVWMKJg,68
|
||||
astroid/tests/testdata/python3/data/absimport.py,sha256=-CKa6uxNJwTox5JoeWFe_hnxPcp1BT_vgPrXjsk4c-w,40
|
||||
astroid/tests/testdata/python3/data/all.py,sha256=96OFTf0wN5cad6Zt4WvJ6OxHTUncQyPyghPMRxGV9B8,107
|
||||
astroid/tests/testdata/python3/data/descriptor_crash.py,sha256=c9dmcN0XSB1WiDINWLjfA0SYY87UzMIpETXHBdcgJ0Y,217
|
||||
astroid/tests/testdata/python3/data/email.py,sha256=bA18WU0kAWGxsPlWJjD6LgXj9NK4RDLjqaN5-EievLw,70
|
||||
astroid/tests/testdata/python3/data/foogle_fax-0.12.5-py2.7-nspkg.pth,sha256=kv-1ZyITNtu-FjiACe5_1ZhKcQnVSKROTwzfHyeIwPA,629
|
||||
astroid/tests/testdata/python3/data/format.py,sha256=Se18tU4br95nCnBg7DIYonoRIXAZi3u2RvyoKwupAXk,421
|
||||
astroid/tests/testdata/python3/data/invalid_encoding.py,sha256=m1_U4_CIbs71SvS2kGVVYDdqYGizzXm9yrXjDWZsO2g,22
|
||||
astroid/tests/testdata/python3/data/joined_strings.py,sha256=5nO3HMS9TAB0jZml1cSBv_b-1m4GTJ_12hD8WYMugBw,72168
|
||||
astroid/tests/testdata/python3/data/module.py,sha256=gmtEr1dRdtYP5oyUwvl-Bmk498D3q9fpPSMcEGeoPPc,1799
|
||||
astroid/tests/testdata/python3/data/module2.py,sha256=VOuXghmJXG0kxFfMufQV55G8vcd-f9qVorvd2CTRjLo,2016
|
||||
astroid/tests/testdata/python3/data/noendingnewline.py,sha256=PaqOTMH1fn703GRn8_lZox2ByExWci0LiXfEKZjKgGU,506
|
||||
astroid/tests/testdata/python3/data/nonregr.py,sha256=oCCrE6UTcDUmFcLnde2N34Fxv1PQ8Ck3WqE0or1Jqqk,1101
|
||||
astroid/tests/testdata/python3/data/notall.py,sha256=DftFceOP1cQfe2imrwTWcsbuxugJx9mDFFM57cCPUnA,75
|
||||
astroid/tests/testdata/python3/data/recursion.py,sha256=ZuYyd9K4DyZxXg3L-B1Dl7k9q8OpIfVDwN9kJ52xLDk,52
|
||||
astroid/tests/testdata/python3/data/tmp__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python3/data/SSL1/Connection1.py,sha256=bvnJLQ3Ey3FzNDCR2mEeU8G44-c4iw9vOHBKOXHuGJM,306
|
||||
astroid/tests/testdata/python3/data/SSL1/__init__.py,sha256=3Flw6M01FPCVMhiVC_yk-NQbOaQW6K4H_H9wqx6c1do,36
|
||||
astroid/tests/testdata/python3/data/absimp/__init__.py,sha256=CTlFm8G4kKecaa0NpFb4X25NNZ9FNorigSG65GAvvYA,89
|
||||
astroid/tests/testdata/python3/data/absimp/string.py,sha256=liyEyorFV0OJFr-HcECPPRfVmLd0lO4YrGFnZz0_T0M,83
|
||||
astroid/tests/testdata/python3/data/absimp/sidepackage/__init__.py,sha256=9E8Vj_jbaQ7tm80sIxyruqZPjzlVLNbd3qQxbvj39rI,42
|
||||
astroid/tests/testdata/python3/data/appl/__init__.py,sha256=9OoDa7y4MPXKZenN5CA2wmwsG7vUqiO4ImtTjsNs6YY,13
|
||||
astroid/tests/testdata/python3/data/appl/myConnection.py,sha256=mWi72c6yYuIXoyRXo-uKFwY7NSj-lok_NRlNc9N2hfM,261
|
||||
astroid/tests/testdata/python3/data/contribute_to_namespace/namespace_pep_420/submodule.py,sha256=5rdq57wop6pxdC9TpMnh8L7gNANMMsNFujBhL9rGhBw,8
|
||||
astroid/tests/testdata/python3/data/find_test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python3/data/find_test/module.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python3/data/find_test/module2.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python3/data/find_test/noendingnewline.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python3/data/find_test/nonregr.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python3/data/foogle/fax/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python3/data/foogle/fax/a.py,sha256=j_Q23vFFEoVZmhsa1wgASTuNyv3ikS4aODRWMwVOTCY,5
|
||||
astroid/tests/testdata/python3/data/lmfp/__init__.py,sha256=JmYecBTypWpPdKNy76pDWYliy-gWg3PPOOMcUdMAvzo,51
|
||||
astroid/tests/testdata/python3/data/lmfp/foo.py,sha256=ePynel7303gG6wq8wb6kRmaV75Q7mR9A_X7SZVP0YWM,170
|
||||
astroid/tests/testdata/python3/data/module1abs/__init__.py,sha256=qeBmkE-gZ07oAuq_fgcaMP8217AdA-FGOR73iB5lltg,59
|
||||
astroid/tests/testdata/python3/data/module1abs/core.py,sha256=xRdXeFHEieRauuJZElbEBASgXG0ZzU1a5_0isAhM7Gw,11
|
||||
astroid/tests/testdata/python3/data/namespace_pep_420/module.py,sha256=1NhbQAf4THdW94o7nrSzPkEJFy0XvISnlF_-OTTiwRk,43
|
||||
astroid/tests/testdata/python3/data/notamodule/file.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python3/data/package/__init__.py,sha256=U50oVo2CraRtPYheia534Z0iPVQMDT2C6Qwj2ZWAmO0,57
|
||||
astroid/tests/testdata/python3/data/package/absimport.py,sha256=cTkLoSR4oIJtQ8yVLAgdopJXro0qFsehlMGYLCfiPvo,172
|
||||
astroid/tests/testdata/python3/data/package/hello.py,sha256=sTddKXRfLNAysty0r625S8QysSDOmtF8oXDvbl3Cywk,20
|
||||
astroid/tests/testdata/python3/data/package/import_package_subpackage_module.py,sha256=U6BsMb_ygFb8RqImsTrWEGJihU7nJgELPH6AvWM-zaU,2242
|
||||
astroid/tests/testdata/python3/data/package/subpackage/__init__.py,sha256=XtKilaAqziUI-ImaSw4V6Aic40domt4v_If7lAZYhSE,25
|
||||
astroid/tests/testdata/python3/data/package/subpackage/module.py,sha256=WAtPIk13pW6tYI6rSgNHcCgTu0EXhX6i5CugdHPH8N0,32
|
||||
astroid/tests/testdata/python3/data/path_pkg_resources_1/package/__init__.py,sha256=1kQs82-WyvhNmr6z4VEklhVXMJulr_OO2RXRAqT40Z4,55
|
||||
astroid/tests/testdata/python3/data/path_pkg_resources_1/package/foo.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python3/data/path_pkg_resources_2/package/__init__.py,sha256=1kQs82-WyvhNmr6z4VEklhVXMJulr_OO2RXRAqT40Z4,55
|
||||
astroid/tests/testdata/python3/data/path_pkg_resources_2/package/bar.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python3/data/path_pkg_resources_3/package/__init__.py,sha256=1kQs82-WyvhNmr6z4VEklhVXMJulr_OO2RXRAqT40Z4,55
|
||||
astroid/tests/testdata/python3/data/path_pkg_resources_3/package/baz.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python3/data/path_pkgutil_1/package/__init__.py,sha256=doH9fwYoYwbugpsdT3QT3lEja4mw1-USY24J1TSboCs,74
|
||||
astroid/tests/testdata/python3/data/path_pkgutil_1/package/foo.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python3/data/path_pkgutil_2/package/__init__.py,sha256=doH9fwYoYwbugpsdT3QT3lEja4mw1-USY24J1TSboCs,74
|
||||
astroid/tests/testdata/python3/data/path_pkgutil_2/package/bar.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python3/data/path_pkgutil_3/package/__init__.py,sha256=doH9fwYoYwbugpsdT3QT3lEja4mw1-USY24J1TSboCs,74
|
||||
astroid/tests/testdata/python3/data/path_pkgutil_3/package/baz.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid/tests/testdata/python3/data/unicode_package/__init__.py,sha256=Qq8Rv1-47xfh9UMnDqtU6MYCoZbK2DF1zxAvxlkhCNU,17
|
||||
astroid/tests/testdata/python3/data/unicode_package/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
astroid-1.6.5.dist-info/DESCRIPTION.rst,sha256=XNH2sQxCrEZ-yMkktkQxOBoIoa2-pk1oWK0TDIDLQxo,2186
|
||||
astroid-1.6.5.dist-info/METADATA,sha256=fXGdzK8FVIH9Lu-EpzGGdxAbm-TNhEFLoIgc6uKPq44,3378
|
||||
astroid-1.6.5.dist-info/RECORD,,
|
||||
astroid-1.6.5.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110
|
||||
astroid-1.6.5.dist-info/metadata.json,sha256=Mtualpf3xthECcJ3bCjb3K8RHAWXzHK3xR0i_czO5Wc,1268
|
||||
astroid-1.6.5.dist-info/top_level.txt,sha256=HsdW4O2x7ZXRj6k-agi3RaQybGLobI3VSE-jt4vQUXM,8
|
||||
astroid-1.6.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
astroid/interpreter/_import/__pycache__/util.cpython-36.pyc,,
|
||||
astroid/interpreter/_import/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/interpreter/_import/__pycache__/spec.cpython-36.pyc,,
|
||||
astroid/interpreter/__pycache__/objectmodel.cpython-36.pyc,,
|
||||
astroid/interpreter/__pycache__/dunder_lookup.cpython-36.pyc,,
|
||||
astroid/interpreter/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/absimp/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/absimp/__pycache__/string.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/absimp/sidepackage/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/path_pkg_resources_1/package/__pycache__/foo.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/path_pkg_resources_1/package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/path_pkgutil_3/package/__pycache__/baz.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/path_pkgutil_3/package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/path_pkgutil_2/package/__pycache__/bar.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/path_pkgutil_2/package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/lmfp/__pycache__/foo.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/lmfp/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/appl/__pycache__/myConnection.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/appl/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/foogle/fax/__pycache__/a.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/foogle/fax/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/__pycache__/tmp__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/__pycache__/noendingnewline.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/__pycache__/joined_strings.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/__pycache__/notall.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/__pycache__/recursion.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/__pycache__/descriptor_crash.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/__pycache__/module2.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/__pycache__/absimport.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/__pycache__/all.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/__pycache__/module.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/__pycache__/nonregr.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/__pycache__/email.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/__pycache__/format.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/unicode_package/core/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/unicode_package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/module1abs/__pycache__/core.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/module1abs/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/notamodule/__pycache__/file.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/SSL1/__pycache__/Connection1.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/SSL1/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/namespace_pep_420/__pycache__/module.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/path_pkg_resources_2/package/__pycache__/bar.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/path_pkg_resources_2/package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/find_test/__pycache__/noendingnewline.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/find_test/__pycache__/module2.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/find_test/__pycache__/module.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/find_test/__pycache__/nonregr.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/find_test/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/path_pkgutil_1/package/__pycache__/foo.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/path_pkgutil_1/package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/path_pkg_resources_3/package/__pycache__/baz.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/path_pkg_resources_3/package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/contribute_to_namespace/namespace_pep_420/__pycache__/submodule.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/package/__pycache__/hello.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/package/__pycache__/import_package_subpackage_module.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/package/__pycache__/absimport.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/package/subpackage/__pycache__/module.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python3/data/package/subpackage/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/absimp/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/absimp/__pycache__/string.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/absimp/sidepackage/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/path_pkg_resources_1/package/__pycache__/foo.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/path_pkg_resources_1/package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/path_pkgutil_3/package/__pycache__/baz.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/path_pkgutil_3/package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/path_pkgutil_2/package/__pycache__/bar.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/path_pkgutil_2/package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/lmfp/__pycache__/foo.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/lmfp/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/appl/__pycache__/myConnection.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/appl/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/foogle/fax/__pycache__/a.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/foogle/fax/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/__pycache__/tmp__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/__pycache__/joined_strings.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/__pycache__/notall.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/__pycache__/recursion.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/__pycache__/descriptor_crash.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/__pycache__/absimport.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/__pycache__/nonregr.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/__pycache__/email.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/__pycache__/format.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/unicode_package/core/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/unicode_package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/module1abs/__pycache__/core.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/module1abs/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/notamodule/__pycache__/file.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/SSL1/__pycache__/Connection1.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/SSL1/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/namespace_pep_420/__pycache__/module.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/path_pkg_resources_2/package/__pycache__/bar.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/path_pkg_resources_2/package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/find_test/__pycache__/noendingnewline.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/find_test/__pycache__/module2.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/find_test/__pycache__/module.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/find_test/__pycache__/nonregr.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/find_test/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/path_pkgutil_1/package/__pycache__/foo.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/path_pkgutil_1/package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/path_pkg_resources_3/package/__pycache__/baz.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/path_pkg_resources_3/package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/contribute_to_namespace/namespace_pep_420/__pycache__/submodule.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/package/__pycache__/hello.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/package/__pycache__/import_package_subpackage_module.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/package/__pycache__/absimport.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/package/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/package/subpackage/__pycache__/module.cpython-36.pyc,,
|
||||
astroid/tests/testdata/python2/data/package/subpackage/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_transforms.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_brain_numpy.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_raw_building.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_regrtest.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_manager.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_scoped_nodes.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/resources.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_protocols.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_python3.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_helpers.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_brain.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_modutils.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_peephole.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_inference.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_objects.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_builder.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_lookup.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_object_model.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_nodes.cpython-36.pyc,,
|
||||
astroid/tests/__pycache__/unittest_utils.cpython-36.pyc,,
|
||||
astroid/__pycache__/inference.cpython-36.pyc,,
|
||||
astroid/__pycache__/util.cpython-36.pyc,,
|
||||
astroid/__pycache__/exceptions.cpython-36.pyc,,
|
||||
astroid/__pycache__/raw_building.cpython-36.pyc,,
|
||||
astroid/__pycache__/bases.cpython-36.pyc,,
|
||||
astroid/__pycache__/__pkginfo__.cpython-36.pyc,,
|
||||
astroid/__pycache__/test_utils.cpython-36.pyc,,
|
||||
astroid/__pycache__/transforms.cpython-36.pyc,,
|
||||
astroid/__pycache__/astpeephole.cpython-36.pyc,,
|
||||
astroid/__pycache__/objects.cpython-36.pyc,,
|
||||
astroid/__pycache__/builder.cpython-36.pyc,,
|
||||
astroid/__pycache__/mixins.cpython-36.pyc,,
|
||||
astroid/__pycache__/scoped_nodes.cpython-36.pyc,,
|
||||
astroid/__pycache__/as_string.cpython-36.pyc,,
|
||||
astroid/__pycache__/node_classes.cpython-36.pyc,,
|
||||
astroid/__pycache__/modutils.cpython-36.pyc,,
|
||||
astroid/__pycache__/protocols.cpython-36.pyc,,
|
||||
astroid/__pycache__/rebuilder.cpython-36.pyc,,
|
||||
astroid/__pycache__/nodes.cpython-36.pyc,,
|
||||
astroid/__pycache__/helpers.cpython-36.pyc,,
|
||||
astroid/__pycache__/arguments.cpython-36.pyc,,
|
||||
astroid/__pycache__/context.cpython-36.pyc,,
|
||||
astroid/__pycache__/manager.cpython-36.pyc,,
|
||||
astroid/__pycache__/__init__.cpython-36.pyc,,
|
||||
astroid/__pycache__/decorators.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_mechanize.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_pkg_resources.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_typing.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_attrs.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_collections.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_pytest.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_random.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_functools.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_subprocess.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_multiprocessing.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_six.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_dateutil.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_uuid.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_fstrings.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_re.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_curses.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_io.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_ssl.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_qt.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_threading.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_hashlib.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_builtin_inference.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_gi.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_nose.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_numpy.cpython-36.pyc,,
|
||||
astroid/brain/__pycache__/brain_namedtuple_enum.cpython-36.pyc,,
|
@ -0,0 +1,6 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: bdist_wheel (0.30.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py2-none-any
|
||||
Tag: py3-none-any
|
||||
|
@ -0,0 +1 @@
|
||||
{"classifiers": ["Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Software Development :: Quality Assurance", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.details": {"contacts": [{"email": "code-quality@python.org", "name": "Python Code Quality Authority", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/PyCQA/astroid"}}}, "extras": [], "generator": "bdist_wheel (0.30.0)", "license": "LGPL", "metadata_version": "2.0", "name": "astroid", "requires_python": ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", "run_requires": [{"requires": ["lazy-object-proxy", "six", "wrapt"]}, {"environment": "python_version<\"3.4\"", "requires": ["backports.functools-lru-cache", "enum34 (>=1.1.3)", "singledispatch"]}], "summary": "A abstract syntax tree for Python with inference support.", "version": "1.6.5"}
|
@ -0,0 +1 @@
|
||||
astroid
|
156
thesisenv/lib/python3.6/site-packages/astroid/__init__.py
Normal file
156
thesisenv/lib/python3.6/site-packages/astroid/__init__.py
Normal file
@ -0,0 +1,156 @@
|
||||
# Copyright (c) 2006-2013, 2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Python Abstract Syntax Tree New Generation
|
||||
|
||||
The aim of this module is to provide a common base representation of
|
||||
python source code for projects such as pychecker, pyreverse,
|
||||
pylint... Well, actually the development of this library is essentially
|
||||
governed by pylint's needs.
|
||||
|
||||
It extends class defined in the python's _ast module with some
|
||||
additional methods and attributes. Instance attributes are added by a
|
||||
builder object, which can either generate extended ast (let's call
|
||||
them astroid ;) by visiting an existent ast tree or by inspecting living
|
||||
object. Methods are added by monkey patching ast classes.
|
||||
|
||||
Main modules are:
|
||||
|
||||
* nodes and scoped_nodes for more information about methods and
|
||||
attributes added to different node classes
|
||||
|
||||
* the manager contains a high level object to get astroid trees from
|
||||
source files and living objects. It maintains a cache of previously
|
||||
constructed tree for quick access
|
||||
|
||||
* builder contains the class responsible to build astroid trees
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
from operator import attrgetter
|
||||
|
||||
import enum
|
||||
|
||||
|
||||
_Context = enum.Enum('Context', 'Load Store Del')
|
||||
Load = _Context.Load
|
||||
Store = _Context.Store
|
||||
Del = _Context.Del
|
||||
del _Context
|
||||
|
||||
|
||||
from .__pkginfo__ import version as __version__
|
||||
# WARNING: internal imports order matters !
|
||||
|
||||
# pylint: disable=redefined-builtin, wildcard-import
|
||||
|
||||
# make all exception classes accessible from astroid package
|
||||
from astroid.exceptions import *
|
||||
|
||||
# make all node classes accessible from astroid package
|
||||
from astroid.nodes import *
|
||||
|
||||
# trigger extra monkey-patching
|
||||
from astroid import inference
|
||||
|
||||
# more stuff available
|
||||
from astroid import raw_building
|
||||
from astroid.bases import BaseInstance, Instance, BoundMethod, UnboundMethod
|
||||
from astroid.node_classes import are_exclusive, unpack_infer
|
||||
from astroid.scoped_nodes import builtin_lookup
|
||||
from astroid.builder import parse, extract_node
|
||||
from astroid.util import Uninferable, YES
|
||||
|
||||
# make a manager instance (borg) accessible from astroid package
|
||||
from astroid.manager import AstroidManager
|
||||
MANAGER = AstroidManager()
|
||||
del AstroidManager
|
||||
|
||||
# transform utilities (filters and decorator)
|
||||
|
||||
class AsStringRegexpPredicate(object):
|
||||
"""ClassDef to be used as predicate that may be given to `register_transform`
|
||||
|
||||
First argument is a regular expression that will be searched against the `as_string`
|
||||
representation of the node onto which it's applied.
|
||||
|
||||
If specified, the second argument is an `attrgetter` expression that will be
|
||||
applied on the node first to get the actual node on which `as_string` should
|
||||
be called.
|
||||
|
||||
WARNING: This can be fairly slow, as it has to convert every AST node back
|
||||
to Python code; you should consider examining the AST directly instead.
|
||||
"""
|
||||
def __init__(self, regexp, expression=None):
|
||||
self.regexp = re.compile(regexp)
|
||||
self.expression = expression
|
||||
|
||||
def __call__(self, node):
|
||||
if self.expression is not None:
|
||||
node = attrgetter(self.expression)(node)
|
||||
# pylint: disable=no-member; github.com/pycqa/astroid/126
|
||||
return self.regexp.search(node.as_string())
|
||||
|
||||
def inference_tip(infer_function, raise_on_overwrite=False):
|
||||
"""Given an instance specific inference function, return a function to be
|
||||
given to MANAGER.register_transform to set this inference function.
|
||||
|
||||
:param bool raise_on_overwrite: Raise an `InferenceOverwriteError`
|
||||
if the inference tip will overwrite another. Used for debugging
|
||||
|
||||
Typical usage
|
||||
|
||||
.. sourcecode:: python
|
||||
|
||||
MANAGER.register_transform(Call, inference_tip(infer_named_tuple),
|
||||
predicate)
|
||||
|
||||
.. Note::
|
||||
|
||||
Using an inference tip will override
|
||||
any previously set inference tip for the given
|
||||
node. Use a predicate in the transform to prevent
|
||||
excess overwrites.
|
||||
"""
|
||||
def transform(node, infer_function=infer_function):
|
||||
if (raise_on_overwrite
|
||||
and node._explicit_inference is not None
|
||||
and node._explicit_inference is not infer_function):
|
||||
raise InferenceOverwriteError(
|
||||
"Inference already set to {existing_inference}. "
|
||||
"Trying to overwrite with {new_inference} for {node}"
|
||||
.format(existing_inference=infer_function,
|
||||
new_inference=node._explicit_inference,
|
||||
node=node))
|
||||
node._explicit_inference = infer_function
|
||||
return node
|
||||
return transform
|
||||
|
||||
|
||||
def register_module_extender(manager, module_name, get_extension_mod):
|
||||
def transform(node):
|
||||
extension_module = get_extension_mod()
|
||||
for name, objs in extension_module.locals.items():
|
||||
node.locals[name] = objs
|
||||
for obj in objs:
|
||||
if obj.parent is extension_module:
|
||||
obj.parent = node
|
||||
|
||||
manager.register_transform(Module, transform, lambda n: n.name == module_name)
|
||||
|
||||
|
||||
# load brain plugins
|
||||
BRAIN_MODULES_DIR = os.path.join(os.path.dirname(__file__), 'brain')
|
||||
if BRAIN_MODULES_DIR not in sys.path:
|
||||
# add it to the end of the list so user path take precedence
|
||||
sys.path.append(BRAIN_MODULES_DIR)
|
||||
# load modules in this directory
|
||||
for module in os.listdir(BRAIN_MODULES_DIR):
|
||||
if module.endswith('.py'):
|
||||
__import__(module[:-3])
|
68
thesisenv/lib/python3.6/site-packages/astroid/__pkginfo__.py
Normal file
68
thesisenv/lib/python3.6/site-packages/astroid/__pkginfo__.py
Normal file
@ -0,0 +1,68 @@
|
||||
# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""astroid packaging information"""
|
||||
|
||||
from sys import version_info as py_version
|
||||
|
||||
from pkg_resources import parse_version
|
||||
from setuptools import __version__ as setuptools_version
|
||||
|
||||
distname = 'astroid'
|
||||
|
||||
modname = 'astroid'
|
||||
|
||||
version = '1.6.5'
|
||||
numversion = tuple(map(int, version.split('.')))
|
||||
|
||||
extras_require = {}
|
||||
install_requires = ['lazy_object_proxy', 'six', 'wrapt']
|
||||
|
||||
|
||||
def has_environment_marker_range_operators_support():
|
||||
"""Code extracted from 'pytest/setup.py'
|
||||
https://github.com/pytest-dev/pytest/blob/7538680c/setup.py#L31
|
||||
|
||||
The first known release to support environment marker with range operators
|
||||
it is 17.1, see: https://setuptools.readthedocs.io/en/latest/history.html#id113
|
||||
"""
|
||||
return parse_version(setuptools_version) >= parse_version('17.1')
|
||||
|
||||
|
||||
if has_environment_marker_range_operators_support():
|
||||
extras_require[':python_version<"3.4"'] = ['enum34>=1.1.3',
|
||||
'singledispatch',
|
||||
'backports.functools_lru_cache']
|
||||
else:
|
||||
if py_version < (3, 4):
|
||||
install_requires.extend(['enum34',
|
||||
'singledispatch',
|
||||
'backports.functools_lru_cache'])
|
||||
|
||||
|
||||
# pylint: disable=redefined-builtin; why license is a builtin anyway?
|
||||
license = 'LGPL'
|
||||
|
||||
author = 'Python Code Quality Authority'
|
||||
author_email = 'code-quality@python.org'
|
||||
mailinglist = "mailto://%s" % author_email
|
||||
web = 'https://github.com/PyCQA/astroid'
|
||||
|
||||
description = "A abstract syntax tree for Python with inference support."
|
||||
|
||||
classifiers = ["Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: Software Development :: Quality Assurance",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 2",
|
||||
"Programming Language :: Python :: 2.7",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.4",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
]
|
257
thesisenv/lib/python3.6/site-packages/astroid/arguments.py
Normal file
257
thesisenv/lib/python3.6/site-packages/astroid/arguments.py
Normal file
@ -0,0 +1,257 @@
|
||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
import six
|
||||
|
||||
from astroid import bases
|
||||
from astroid import context as contextmod
|
||||
from astroid import exceptions
|
||||
from astroid import nodes
|
||||
from astroid import util
|
||||
|
||||
|
||||
|
||||
class CallSite(object):
|
||||
"""Class for understanding arguments passed into a call site
|
||||
|
||||
It needs a call context, which contains the arguments and the
|
||||
keyword arguments that were passed into a given call site.
|
||||
In order to infer what an argument represents, call
|
||||
:meth:`infer_argument` with the corresponding function node
|
||||
and the argument name.
|
||||
"""
|
||||
|
||||
def __init__(self, callcontext):
|
||||
args = callcontext.args
|
||||
keywords = callcontext.keywords
|
||||
self.duplicated_keywords = set()
|
||||
self._unpacked_args = self._unpack_args(args)
|
||||
self._unpacked_kwargs = self._unpack_keywords(keywords)
|
||||
|
||||
self.positional_arguments = [
|
||||
arg for arg in self._unpacked_args
|
||||
if arg is not util.Uninferable
|
||||
]
|
||||
self.keyword_arguments = {
|
||||
key: value for key, value in self._unpacked_kwargs.items()
|
||||
if value is not util.Uninferable
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_call(cls, call_node):
|
||||
"""Get a CallSite object from the given Call node."""
|
||||
callcontext = contextmod.CallContext(call_node.args,
|
||||
call_node.keywords)
|
||||
return cls(callcontext)
|
||||
|
||||
def has_invalid_arguments(self):
|
||||
"""Check if in the current CallSite were passed *invalid* arguments
|
||||
|
||||
This can mean multiple things. For instance, if an unpacking
|
||||
of an invalid object was passed, then this method will return True.
|
||||
Other cases can be when the arguments can't be inferred by astroid,
|
||||
for example, by passing objects which aren't known statically.
|
||||
"""
|
||||
return len(self.positional_arguments) != len(self._unpacked_args)
|
||||
|
||||
def has_invalid_keywords(self):
|
||||
"""Check if in the current CallSite were passed *invalid* keyword arguments
|
||||
|
||||
For instance, unpacking a dictionary with integer keys is invalid
|
||||
(**{1:2}), because the keys must be strings, which will make this
|
||||
method to return True. Other cases where this might return True if
|
||||
objects which can't be inferred were passed.
|
||||
"""
|
||||
return len(self.keyword_arguments) != len(self._unpacked_kwargs)
|
||||
|
||||
def _unpack_keywords(self, keywords):
|
||||
values = {}
|
||||
context = contextmod.InferenceContext()
|
||||
for name, value in keywords:
|
||||
if name is None:
|
||||
# Then it's an unpacking operation (**)
|
||||
try:
|
||||
inferred = next(value.infer(context=context))
|
||||
except exceptions.InferenceError:
|
||||
values[name] = util.Uninferable
|
||||
continue
|
||||
|
||||
if not isinstance(inferred, nodes.Dict):
|
||||
# Not something we can work with.
|
||||
values[name] = util.Uninferable
|
||||
continue
|
||||
|
||||
for dict_key, dict_value in inferred.items:
|
||||
try:
|
||||
dict_key = next(dict_key.infer(context=context))
|
||||
except exceptions.InferenceError:
|
||||
values[name] = util.Uninferable
|
||||
continue
|
||||
if not isinstance(dict_key, nodes.Const):
|
||||
values[name] = util.Uninferable
|
||||
continue
|
||||
if not isinstance(dict_key.value, six.string_types):
|
||||
values[name] = util.Uninferable
|
||||
continue
|
||||
if dict_key.value in values:
|
||||
# The name is already in the dictionary
|
||||
values[dict_key.value] = util.Uninferable
|
||||
self.duplicated_keywords.add(dict_key.value)
|
||||
continue
|
||||
values[dict_key.value] = dict_value
|
||||
else:
|
||||
values[name] = value
|
||||
return values
|
||||
|
||||
@staticmethod
|
||||
def _unpack_args(args):
|
||||
values = []
|
||||
context = contextmod.InferenceContext()
|
||||
for arg in args:
|
||||
if isinstance(arg, nodes.Starred):
|
||||
try:
|
||||
inferred = next(arg.value.infer(context=context))
|
||||
except exceptions.InferenceError:
|
||||
values.append(util.Uninferable)
|
||||
continue
|
||||
|
||||
if inferred is util.Uninferable:
|
||||
values.append(util.Uninferable)
|
||||
continue
|
||||
if not hasattr(inferred, 'elts'):
|
||||
values.append(util.Uninferable)
|
||||
continue
|
||||
values.extend(inferred.elts)
|
||||
else:
|
||||
values.append(arg)
|
||||
return values
|
||||
|
||||
def infer_argument(self, funcnode, name, context):
|
||||
"""infer a function argument value according to the call context
|
||||
|
||||
Arguments:
|
||||
funcnode: The function being called.
|
||||
name: The name of the argument whose value is being inferred.
|
||||
context: TODO
|
||||
"""
|
||||
if name in self.duplicated_keywords:
|
||||
raise exceptions.InferenceError('The arguments passed to {func!r} '
|
||||
' have duplicate keywords.',
|
||||
call_site=self, func=funcnode,
|
||||
arg=name, context=context)
|
||||
|
||||
# Look into the keywords first, maybe it's already there.
|
||||
try:
|
||||
return self.keyword_arguments[name].infer(context)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# Too many arguments given and no variable arguments.
|
||||
if len(self.positional_arguments) > len(funcnode.args.args):
|
||||
if not funcnode.args.vararg:
|
||||
raise exceptions.InferenceError('Too many positional arguments '
|
||||
'passed to {func!r} that does '
|
||||
'not have *args.',
|
||||
call_site=self, func=funcnode,
|
||||
arg=name, context=context)
|
||||
|
||||
positional = self.positional_arguments[:len(funcnode.args.args)]
|
||||
vararg = self.positional_arguments[len(funcnode.args.args):]
|
||||
argindex = funcnode.args.find_argname(name)[0]
|
||||
kwonlyargs = set(arg.name for arg in funcnode.args.kwonlyargs)
|
||||
kwargs = {
|
||||
key: value for key, value in self.keyword_arguments.items()
|
||||
if key not in kwonlyargs
|
||||
}
|
||||
# If there are too few positionals compared to
|
||||
# what the function expects to receive, check to see
|
||||
# if the missing positional arguments were passed
|
||||
# as keyword arguments and if so, place them into the
|
||||
# positional args list.
|
||||
if len(positional) < len(funcnode.args.args):
|
||||
for func_arg in funcnode.args.args:
|
||||
if func_arg.name in kwargs:
|
||||
arg = kwargs.pop(func_arg.name)
|
||||
positional.append(arg)
|
||||
|
||||
if argindex is not None:
|
||||
# 2. first argument of instance/class method
|
||||
if argindex == 0 and funcnode.type in ('method', 'classmethod'):
|
||||
if context.boundnode is not None:
|
||||
boundnode = context.boundnode
|
||||
else:
|
||||
# XXX can do better ?
|
||||
boundnode = funcnode.parent.frame()
|
||||
|
||||
if isinstance(boundnode, nodes.ClassDef):
|
||||
# Verify that we're accessing a method
|
||||
# of the metaclass through a class, as in
|
||||
# `cls.metaclass_method`. In this case, the
|
||||
# first argument is always the class.
|
||||
method_scope = funcnode.parent.scope()
|
||||
if method_scope is boundnode.metaclass():
|
||||
return iter((boundnode, ))
|
||||
|
||||
if funcnode.type == 'method':
|
||||
if not isinstance(boundnode, bases.Instance):
|
||||
boundnode = bases.Instance(boundnode)
|
||||
return iter((boundnode,))
|
||||
if funcnode.type == 'classmethod':
|
||||
return iter((boundnode,))
|
||||
# if we have a method, extract one position
|
||||
# from the index, so we'll take in account
|
||||
# the extra parameter represented by `self` or `cls`
|
||||
if funcnode.type in ('method', 'classmethod'):
|
||||
argindex -= 1
|
||||
# 2. search arg index
|
||||
try:
|
||||
return self.positional_arguments[argindex].infer(context)
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
if funcnode.args.kwarg == name:
|
||||
# It wants all the keywords that were passed into
|
||||
# the call site.
|
||||
if self.has_invalid_keywords():
|
||||
raise exceptions.InferenceError(
|
||||
"Inference failed to find values for all keyword arguments "
|
||||
"to {func!r}: {unpacked_kwargs!r} doesn't correspond to "
|
||||
"{keyword_arguments!r}.",
|
||||
keyword_arguments=self.keyword_arguments,
|
||||
unpacked_kwargs=self._unpacked_kwargs,
|
||||
call_site=self, func=funcnode, arg=name, context=context)
|
||||
kwarg = nodes.Dict(lineno=funcnode.args.lineno,
|
||||
col_offset=funcnode.args.col_offset,
|
||||
parent=funcnode.args)
|
||||
kwarg.postinit([(nodes.const_factory(key), value)
|
||||
for key, value in kwargs.items()])
|
||||
return iter((kwarg, ))
|
||||
elif funcnode.args.vararg == name:
|
||||
# It wants all the args that were passed into
|
||||
# the call site.
|
||||
if self.has_invalid_arguments():
|
||||
raise exceptions.InferenceError(
|
||||
"Inference failed to find values for all positional "
|
||||
"arguments to {func!r}: {unpacked_args!r} doesn't "
|
||||
"correspond to {positional_arguments!r}.",
|
||||
positional_arguments=self.positional_arguments,
|
||||
unpacked_args=self._unpacked_args,
|
||||
call_site=self, func=funcnode, arg=name, context=context)
|
||||
args = nodes.Tuple(lineno=funcnode.args.lineno,
|
||||
col_offset=funcnode.args.col_offset,
|
||||
parent=funcnode.args)
|
||||
args.postinit(vararg)
|
||||
return iter((args, ))
|
||||
|
||||
# Check if it's a default parameter.
|
||||
try:
|
||||
return funcnode.args.default_value(name).infer(context)
|
||||
except exceptions.NoDefault:
|
||||
pass
|
||||
raise exceptions.InferenceError('No value found for argument {name} to '
|
||||
'{func!r}', call_site=self,
|
||||
func=funcnode, arg=name, context=context)
|
527
thesisenv/lib/python3.6/site-packages/astroid/as_string.py
Normal file
527
thesisenv/lib/python3.6/site-packages/astroid/as_string.py
Normal file
@ -0,0 +1,527 @@
|
||||
# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""This module renders Astroid nodes as string:
|
||||
|
||||
* :func:`to_code` function return equivalent (hopefully valid) python string
|
||||
|
||||
* :func:`dump` function return an internal representation of nodes found
|
||||
in the tree, useful for debugging or understanding the tree structure
|
||||
"""
|
||||
import sys
|
||||
|
||||
import six
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
|
||||
class AsStringVisitor(object):
|
||||
"""Visitor to render an Astroid node as a valid python code string"""
|
||||
|
||||
def __init__(self, indent):
|
||||
self.indent = indent
|
||||
|
||||
def __call__(self, node):
|
||||
"""Makes this visitor behave as a simple function"""
|
||||
return node.accept(self)
|
||||
|
||||
def _stmt_list(self, stmts):
|
||||
"""return a list of nodes to string"""
|
||||
stmts = '\n'.join([nstr for nstr in [n.accept(self) for n in stmts] if nstr])
|
||||
return self.indent + stmts.replace('\n', '\n'+self.indent)
|
||||
|
||||
|
||||
## visit_<node> methods ###########################################
|
||||
|
||||
def visit_arguments(self, node):
|
||||
"""return an astroid.Function node as string"""
|
||||
return node.format_args()
|
||||
|
||||
def visit_assignattr(self, node):
|
||||
"""return an astroid.AssAttr node as string"""
|
||||
return self.visit_attribute(node)
|
||||
|
||||
def visit_assert(self, node):
|
||||
"""return an astroid.Assert node as string"""
|
||||
if node.fail:
|
||||
return 'assert %s, %s' % (node.test.accept(self),
|
||||
node.fail.accept(self))
|
||||
return 'assert %s' % node.test.accept(self)
|
||||
|
||||
def visit_assignname(self, node):
|
||||
"""return an astroid.AssName node as string"""
|
||||
return node.name
|
||||
|
||||
def visit_assign(self, node):
|
||||
"""return an astroid.Assign node as string"""
|
||||
lhs = ' = '.join([n.accept(self) for n in node.targets])
|
||||
return '%s = %s' % (lhs, node.value.accept(self))
|
||||
|
||||
def visit_augassign(self, node):
|
||||
"""return an astroid.AugAssign node as string"""
|
||||
return '%s %s %s' % (node.target.accept(self), node.op, node.value.accept(self))
|
||||
|
||||
def visit_annassign(self, node):
|
||||
"""Return an astroid.AugAssign node as string"""
|
||||
|
||||
target = node.target.accept(self)
|
||||
annotation = node.annotation.accept(self)
|
||||
if node.value is None:
|
||||
return '%s: %s' % (target, annotation)
|
||||
return '%s: %s = %s' % (target, annotation, node.value.accept(self))
|
||||
|
||||
def visit_repr(self, node):
|
||||
"""return an astroid.Repr node as string"""
|
||||
return '`%s`' % node.value.accept(self)
|
||||
|
||||
def visit_binop(self, node):
|
||||
"""return an astroid.BinOp node as string"""
|
||||
return '(%s) %s (%s)' % (node.left.accept(self), node.op, node.right.accept(self))
|
||||
|
||||
def visit_boolop(self, node):
|
||||
"""return an astroid.BoolOp node as string"""
|
||||
return (' %s ' % node.op).join(['(%s)' % n.accept(self)
|
||||
for n in node.values])
|
||||
|
||||
def visit_break(self, node):
|
||||
"""return an astroid.Break node as string"""
|
||||
return 'break'
|
||||
|
||||
def visit_call(self, node):
|
||||
"""return an astroid.Call node as string"""
|
||||
expr_str = node.func.accept(self)
|
||||
args = [arg.accept(self) for arg in node.args]
|
||||
if node.keywords:
|
||||
keywords = [kwarg.accept(self) for kwarg in node.keywords]
|
||||
else:
|
||||
keywords = []
|
||||
|
||||
args.extend(keywords)
|
||||
return '%s(%s)' % (expr_str, ', '.join(args))
|
||||
|
||||
def visit_classdef(self, node):
|
||||
"""return an astroid.ClassDef node as string"""
|
||||
decorate = node.decorators.accept(self) if node.decorators else ''
|
||||
bases = ', '.join([n.accept(self) for n in node.bases])
|
||||
if sys.version_info[0] == 2:
|
||||
bases = '(%s)' % bases if bases else ''
|
||||
else:
|
||||
metaclass = node.metaclass()
|
||||
if metaclass and not node.has_metaclass_hack():
|
||||
if bases:
|
||||
bases = '(%s, metaclass=%s)' % (bases, metaclass.name)
|
||||
else:
|
||||
bases = '(metaclass=%s)' % metaclass.name
|
||||
else:
|
||||
bases = '(%s)' % bases if bases else ''
|
||||
docs = '\n%s"""%s"""' % (self.indent, node.doc) if node.doc else ''
|
||||
return '\n\n%sclass %s%s:%s\n%s\n' % (decorate, node.name, bases, docs,
|
||||
self._stmt_list(node.body))
|
||||
|
||||
def visit_compare(self, node):
|
||||
"""return an astroid.Compare node as string"""
|
||||
rhs_str = ' '.join(['%s %s' % (op, expr.accept(self))
|
||||
for op, expr in node.ops])
|
||||
return '%s %s' % (node.left.accept(self), rhs_str)
|
||||
|
||||
def visit_comprehension(self, node):
|
||||
"""return an astroid.Comprehension node as string"""
|
||||
ifs = ''.join([' if %s' % n.accept(self) for n in node.ifs])
|
||||
return 'for %s in %s%s' % (node.target.accept(self),
|
||||
node.iter.accept(self), ifs)
|
||||
|
||||
def visit_const(self, node):
|
||||
"""return an astroid.Const node as string"""
|
||||
return repr(node.value)
|
||||
|
||||
def visit_continue(self, node):
|
||||
"""return an astroid.Continue node as string"""
|
||||
return 'continue'
|
||||
|
||||
def visit_delete(self, node): # XXX check if correct
|
||||
"""return an astroid.Delete node as string"""
|
||||
return 'del %s' % ', '.join([child.accept(self)
|
||||
for child in node.targets])
|
||||
|
||||
def visit_delattr(self, node):
|
||||
"""return an astroid.DelAttr node as string"""
|
||||
return self.visit_attribute(node)
|
||||
|
||||
def visit_delname(self, node):
|
||||
"""return an astroid.DelName node as string"""
|
||||
return node.name
|
||||
|
||||
def visit_decorators(self, node):
|
||||
"""return an astroid.Decorators node as string"""
|
||||
return '@%s\n' % '\n@'.join([item.accept(self) for item in node.nodes])
|
||||
|
||||
def visit_dict(self, node):
|
||||
"""return an astroid.Dict node as string"""
|
||||
return '{%s}' % ', '.join(self._visit_dict(node))
|
||||
|
||||
def _visit_dict(self, node):
|
||||
for key, value in node.items:
|
||||
key = key.accept(self)
|
||||
value = value.accept(self)
|
||||
if key == '**':
|
||||
# It can only be a DictUnpack node.
|
||||
yield key + value
|
||||
else:
|
||||
yield '%s: %s' % (key, value)
|
||||
|
||||
def visit_dictunpack(self, node):
|
||||
return '**'
|
||||
|
||||
def visit_dictcomp(self, node):
|
||||
"""return an astroid.DictComp node as string"""
|
||||
return '{%s: %s %s}' % (node.key.accept(self), node.value.accept(self),
|
||||
' '.join([n.accept(self) for n in node.generators]))
|
||||
|
||||
def visit_expr(self, node):
|
||||
"""return an astroid.Discard node as string"""
|
||||
return node.value.accept(self)
|
||||
|
||||
def visit_emptynode(self, node):
|
||||
"""dummy method for visiting an Empty node"""
|
||||
return ''
|
||||
|
||||
def visit_excepthandler(self, node):
|
||||
if node.type:
|
||||
if node.name:
|
||||
excs = 'except %s, %s' % (node.type.accept(self),
|
||||
node.name.accept(self))
|
||||
else:
|
||||
excs = 'except %s' % node.type.accept(self)
|
||||
else:
|
||||
excs = 'except'
|
||||
return '%s:\n%s' % (excs, self._stmt_list(node.body))
|
||||
|
||||
def visit_ellipsis(self, node):
|
||||
"""return an astroid.Ellipsis node as string"""
|
||||
return '...'
|
||||
|
||||
def visit_empty(self, node):
|
||||
"""return an Empty node as string"""
|
||||
return ''
|
||||
|
||||
def visit_exec(self, node):
|
||||
"""return an astroid.Exec node as string"""
|
||||
if node.locals:
|
||||
return 'exec %s in %s, %s' % (node.expr.accept(self),
|
||||
node.locals.accept(self),
|
||||
node.globals.accept(self))
|
||||
if node.globals:
|
||||
return 'exec %s in %s' % (node.expr.accept(self),
|
||||
node.globals.accept(self))
|
||||
return 'exec %s' % node.expr.accept(self)
|
||||
|
||||
def visit_extslice(self, node):
|
||||
"""return an astroid.ExtSlice node as string"""
|
||||
return ','.join([dim.accept(self) for dim in node.dims])
|
||||
|
||||
def visit_for(self, node):
|
||||
"""return an astroid.For node as string"""
|
||||
fors = 'for %s in %s:\n%s' % (node.target.accept(self),
|
||||
node.iter.accept(self),
|
||||
self._stmt_list(node.body))
|
||||
if node.orelse:
|
||||
fors = '%s\nelse:\n%s' % (fors, self._stmt_list(node.orelse))
|
||||
return fors
|
||||
|
||||
def visit_importfrom(self, node):
|
||||
"""return an astroid.ImportFrom node as string"""
|
||||
return 'from %s import %s' % ('.' * (node.level or 0) + node.modname,
|
||||
_import_string(node.names))
|
||||
|
||||
def visit_functiondef(self, node):
|
||||
"""return an astroid.Function node as string"""
|
||||
decorate = node.decorators.accept(self) if node.decorators else ''
|
||||
docs = '\n%s"""%s"""' % (self.indent, node.doc) if node.doc else ''
|
||||
return_annotation = ''
|
||||
if six.PY3 and node.returns:
|
||||
return_annotation = '->' + node.returns.as_string()
|
||||
trailer = return_annotation + ":"
|
||||
else:
|
||||
trailer = ":"
|
||||
def_format = "\n%sdef %s(%s)%s%s\n%s"
|
||||
return def_format % (decorate, node.name,
|
||||
node.args.accept(self),
|
||||
trailer, docs,
|
||||
self._stmt_list(node.body))
|
||||
|
||||
def visit_generatorexp(self, node):
|
||||
"""return an astroid.GeneratorExp node as string"""
|
||||
return '(%s %s)' % (node.elt.accept(self),
|
||||
' '.join([n.accept(self) for n in node.generators]))
|
||||
|
||||
def visit_attribute(self, node):
|
||||
"""return an astroid.Getattr node as string"""
|
||||
return '%s.%s' % (node.expr.accept(self), node.attrname)
|
||||
|
||||
def visit_global(self, node):
|
||||
"""return an astroid.Global node as string"""
|
||||
return 'global %s' % ', '.join(node.names)
|
||||
|
||||
def visit_if(self, node):
|
||||
"""return an astroid.If node as string"""
|
||||
ifs = ['if %s:\n%s' % (node.test.accept(self), self._stmt_list(node.body))]
|
||||
if node.orelse:# XXX use elif ???
|
||||
ifs.append('else:\n%s' % self._stmt_list(node.orelse))
|
||||
return '\n'.join(ifs)
|
||||
|
||||
def visit_ifexp(self, node):
|
||||
"""return an astroid.IfExp node as string"""
|
||||
return '%s if %s else %s' % (node.body.accept(self),
|
||||
node.test.accept(self),
|
||||
node.orelse.accept(self))
|
||||
|
||||
def visit_import(self, node):
|
||||
"""return an astroid.Import node as string"""
|
||||
return 'import %s' % _import_string(node.names)
|
||||
|
||||
def visit_keyword(self, node):
|
||||
"""return an astroid.Keyword node as string"""
|
||||
if node.arg is None:
|
||||
return '**%s' % node.value.accept(self)
|
||||
return '%s=%s' % (node.arg, node.value.accept(self))
|
||||
|
||||
def visit_lambda(self, node):
|
||||
"""return an astroid.Lambda node as string"""
|
||||
return 'lambda %s: %s' % (node.args.accept(self),
|
||||
node.body.accept(self))
|
||||
|
||||
def visit_list(self, node):
|
||||
"""return an astroid.List node as string"""
|
||||
return '[%s]' % ', '.join([child.accept(self) for child in node.elts])
|
||||
|
||||
def visit_listcomp(self, node):
|
||||
"""return an astroid.ListComp node as string"""
|
||||
return '[%s %s]' % (node.elt.accept(self),
|
||||
' '.join([n.accept(self) for n in node.generators]))
|
||||
|
||||
def visit_module(self, node):
|
||||
"""return an astroid.Module node as string"""
|
||||
docs = '"""%s"""\n\n' % node.doc if node.doc else ''
|
||||
return docs + '\n'.join([n.accept(self) for n in node.body]) + '\n\n'
|
||||
|
||||
def visit_name(self, node):
|
||||
"""return an astroid.Name node as string"""
|
||||
return node.name
|
||||
|
||||
def visit_pass(self, node):
|
||||
"""return an astroid.Pass node as string"""
|
||||
return 'pass'
|
||||
|
||||
def visit_print(self, node):
|
||||
"""return an astroid.Print node as string"""
|
||||
nodes = ', '.join([n.accept(self) for n in node.values])
|
||||
if not node.nl:
|
||||
nodes = '%s,' % nodes
|
||||
if node.dest:
|
||||
return 'print >> %s, %s' % (node.dest.accept(self), nodes)
|
||||
return 'print %s' % nodes
|
||||
|
||||
def visit_raise(self, node):
|
||||
"""return an astroid.Raise node as string"""
|
||||
if node.exc:
|
||||
if node.inst:
|
||||
if node.tback:
|
||||
return 'raise %s, %s, %s' % (node.exc.accept(self),
|
||||
node.inst.accept(self),
|
||||
node.tback.accept(self))
|
||||
return 'raise %s, %s' % (node.exc.accept(self),
|
||||
node.inst.accept(self))
|
||||
return 'raise %s' % node.exc.accept(self)
|
||||
return 'raise'
|
||||
|
||||
def visit_return(self, node):
|
||||
"""return an astroid.Return node as string"""
|
||||
if node.value:
|
||||
return 'return %s' % node.value.accept(self)
|
||||
|
||||
return 'return'
|
||||
|
||||
def visit_index(self, node):
|
||||
"""return a astroid.Index node as string"""
|
||||
return node.value.accept(self)
|
||||
|
||||
def visit_set(self, node):
|
||||
"""return an astroid.Set node as string"""
|
||||
return '{%s}' % ', '.join([child.accept(self) for child in node.elts])
|
||||
|
||||
def visit_setcomp(self, node):
|
||||
"""return an astroid.SetComp node as string"""
|
||||
return '{%s %s}' % (node.elt.accept(self),
|
||||
' '.join([n.accept(self) for n in node.generators]))
|
||||
|
||||
def visit_slice(self, node):
|
||||
"""return a astroid.Slice node as string"""
|
||||
lower = node.lower.accept(self) if node.lower else ''
|
||||
upper = node.upper.accept(self) if node.upper else''
|
||||
step = node.step.accept(self) if node.step else ''
|
||||
if step:
|
||||
return '%s:%s:%s' % (lower, upper, step)
|
||||
return '%s:%s' % (lower, upper)
|
||||
|
||||
def visit_subscript(self, node):
|
||||
"""return an astroid.Subscript node as string"""
|
||||
return '%s[%s]' % (node.value.accept(self), node.slice.accept(self))
|
||||
|
||||
def visit_tryexcept(self, node):
|
||||
"""return an astroid.TryExcept node as string"""
|
||||
trys = ['try:\n%s' % self._stmt_list(node.body)]
|
||||
for handler in node.handlers:
|
||||
trys.append(handler.accept(self))
|
||||
if node.orelse:
|
||||
trys.append('else:\n%s' % self._stmt_list(node.orelse))
|
||||
return '\n'.join(trys)
|
||||
|
||||
def visit_tryfinally(self, node):
|
||||
"""return an astroid.TryFinally node as string"""
|
||||
return 'try:\n%s\nfinally:\n%s' % (self._stmt_list(node.body),
|
||||
self._stmt_list(node.finalbody))
|
||||
|
||||
def visit_tuple(self, node):
|
||||
"""return an astroid.Tuple node as string"""
|
||||
if len(node.elts) == 1:
|
||||
return '(%s, )' % node.elts[0].accept(self)
|
||||
return '(%s)' % ', '.join([child.accept(self) for child in node.elts])
|
||||
|
||||
def visit_unaryop(self, node):
|
||||
"""return an astroid.UnaryOp node as string"""
|
||||
if node.op == 'not':
|
||||
operator = 'not '
|
||||
else:
|
||||
operator = node.op
|
||||
return '%s%s' % (operator, node.operand.accept(self))
|
||||
|
||||
def visit_while(self, node):
|
||||
"""return an astroid.While node as string"""
|
||||
whiles = 'while %s:\n%s' % (node.test.accept(self),
|
||||
self._stmt_list(node.body))
|
||||
if node.orelse:
|
||||
whiles = '%s\nelse:\n%s' % (whiles, self._stmt_list(node.orelse))
|
||||
return whiles
|
||||
|
||||
def visit_with(self, node): # 'with' without 'as' is possible
|
||||
"""return an astroid.With node as string"""
|
||||
items = ', '.join(('(%s)' % expr.accept(self)) +
|
||||
(vars and ' as (%s)' % (vars.accept(self)) or '')
|
||||
for expr, vars in node.items)
|
||||
return 'with %s:\n%s' % (items, self._stmt_list(node.body))
|
||||
|
||||
def visit_yield(self, node):
|
||||
"""yield an ast.Yield node as string"""
|
||||
yi_val = (" " + node.value.accept(self)) if node.value else ""
|
||||
expr = 'yield' + yi_val
|
||||
if node.parent.is_statement:
|
||||
return expr
|
||||
|
||||
return "(%s)" % (expr,)
|
||||
|
||||
def visit_starred(self, node):
|
||||
"""return Starred node as string"""
|
||||
return "*" + node.value.accept(self)
|
||||
|
||||
# These aren't for real AST nodes, but for inference objects.
|
||||
|
||||
def visit_frozenset(self, node):
|
||||
return node.parent.accept(self)
|
||||
|
||||
def visit_super(self, node):
|
||||
return node.parent.accept(self)
|
||||
|
||||
def visit_uninferable(self, node):
|
||||
return str(node)
|
||||
|
||||
|
||||
class AsStringVisitor3(AsStringVisitor):
|
||||
"""AsStringVisitor3 overwrites some AsStringVisitor methods"""
|
||||
|
||||
def visit_excepthandler(self, node):
|
||||
if node.type:
|
||||
if node.name:
|
||||
excs = 'except %s as %s' % (node.type.accept(self),
|
||||
node.name.accept(self))
|
||||
else:
|
||||
excs = 'except %s' % node.type.accept(self)
|
||||
else:
|
||||
excs = 'except'
|
||||
return '%s:\n%s' % (excs, self._stmt_list(node.body))
|
||||
|
||||
def visit_nonlocal(self, node):
|
||||
"""return an astroid.Nonlocal node as string"""
|
||||
return 'nonlocal %s' % ', '.join(node.names)
|
||||
|
||||
def visit_raise(self, node):
|
||||
"""return an astroid.Raise node as string"""
|
||||
if node.exc:
|
||||
if node.cause:
|
||||
return 'raise %s from %s' % (node.exc.accept(self),
|
||||
node.cause.accept(self))
|
||||
return 'raise %s' % node.exc.accept(self)
|
||||
return 'raise'
|
||||
|
||||
def visit_yieldfrom(self, node):
|
||||
""" Return an astroid.YieldFrom node as string. """
|
||||
yi_val = (" " + node.value.accept(self)) if node.value else ""
|
||||
expr = 'yield from' + yi_val
|
||||
if node.parent.is_statement:
|
||||
return expr
|
||||
|
||||
return "(%s)" % (expr,)
|
||||
|
||||
def visit_asyncfunctiondef(self, node):
|
||||
function = super(AsStringVisitor3, self).visit_functiondef(node)
|
||||
return 'async ' + function.strip()
|
||||
|
||||
def visit_await(self, node):
|
||||
return 'await %s' % node.value.accept(self)
|
||||
|
||||
def visit_asyncwith(self, node):
|
||||
return 'async %s' % self.visit_with(node)
|
||||
|
||||
def visit_asyncfor(self, node):
|
||||
return 'async %s' % self.visit_for(node)
|
||||
|
||||
def visit_joinedstr(self, node):
|
||||
# Special treatment for constants,
|
||||
# as we want to join literals not reprs
|
||||
string = ''.join(
|
||||
value.value if type(value).__name__ == 'Const'
|
||||
else value.accept(self)
|
||||
for value in node.values
|
||||
)
|
||||
return "f'%s'" % string
|
||||
|
||||
def visit_formattedvalue(self, node):
|
||||
return '{%s}' % node.value.accept(self)
|
||||
|
||||
def visit_comprehension(self, node):
|
||||
"""return an astroid.Comprehension node as string"""
|
||||
return '%s%s' % ('async ' if node.is_async else '',
|
||||
super(AsStringVisitor3, self).visit_comprehension(node))
|
||||
|
||||
|
||||
def _import_string(names):
|
||||
"""return a list of (name, asname) formatted as a string"""
|
||||
_names = []
|
||||
for name, asname in names:
|
||||
if asname is not None:
|
||||
_names.append('%s as %s' % (name, asname))
|
||||
else:
|
||||
_names.append(name)
|
||||
return ', '.join(_names)
|
||||
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
AsStringVisitor = AsStringVisitor3
|
||||
|
||||
# This sets the default indent to 4 spaces.
|
||||
to_code = AsStringVisitor(' ')
|
74
thesisenv/lib/python3.6/site-packages/astroid/astpeephole.py
Normal file
74
thesisenv/lib/python3.6/site-packages/astroid/astpeephole.py
Normal file
@ -0,0 +1,74 @@
|
||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Small AST optimizations."""
|
||||
|
||||
import _ast
|
||||
|
||||
from astroid import nodes
|
||||
|
||||
|
||||
__all__ = ('ASTPeepholeOptimizer', )
|
||||
|
||||
|
||||
try:
|
||||
_TYPES = (_ast.Str, _ast.Bytes)
|
||||
except AttributeError:
|
||||
_TYPES = (_ast.Str, )
|
||||
|
||||
|
||||
class ASTPeepholeOptimizer(object):
|
||||
"""Class for applying small optimizations to generate new AST."""
|
||||
|
||||
def optimize_binop(self, node, parent=None):
|
||||
"""Optimize BinOps with string Const nodes on the lhs.
|
||||
|
||||
This fixes an infinite recursion crash, where multiple
|
||||
strings are joined using the addition operator. With a
|
||||
sufficient number of such strings, astroid will fail
|
||||
with a maximum recursion limit exceeded. The
|
||||
function will return a Const node with all the strings
|
||||
already joined.
|
||||
Return ``None`` if no AST node can be obtained
|
||||
through optimization.
|
||||
"""
|
||||
ast_nodes = []
|
||||
current = node
|
||||
while isinstance(current, _ast.BinOp):
|
||||
# lhs must be a BinOp with the addition operand.
|
||||
if not isinstance(current.left, _ast.BinOp):
|
||||
return None
|
||||
if (not isinstance(current.left.op, _ast.Add)
|
||||
or not isinstance(current.op, _ast.Add)):
|
||||
return None
|
||||
|
||||
# rhs must a str / bytes.
|
||||
if not isinstance(current.right, _TYPES):
|
||||
return None
|
||||
|
||||
ast_nodes.append(current.right.s)
|
||||
current = current.left
|
||||
|
||||
if (isinstance(current, _ast.BinOp)
|
||||
and isinstance(current.left, _TYPES)
|
||||
and isinstance(current.right, _TYPES)):
|
||||
# Stop early if we are at the last BinOp in
|
||||
# the operation
|
||||
ast_nodes.append(current.right.s)
|
||||
ast_nodes.append(current.left.s)
|
||||
break
|
||||
|
||||
if not ast_nodes:
|
||||
return None
|
||||
|
||||
# If we have inconsistent types, bail out.
|
||||
known = type(ast_nodes[0])
|
||||
if any(not isinstance(element, known)
|
||||
for element in ast_nodes[1:]):
|
||||
return None
|
||||
|
||||
value = known().join(reversed(ast_nodes))
|
||||
newnode = nodes.Const(value, node.lineno, node.col_offset, parent)
|
||||
return newnode
|
453
thesisenv/lib/python3.6/site-packages/astroid/bases.py
Normal file
453
thesisenv/lib/python3.6/site-packages/astroid/bases.py
Normal file
@ -0,0 +1,453 @@
|
||||
# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""This module contains base classes and functions for the nodes and some
|
||||
inference utils.
|
||||
"""
|
||||
|
||||
import collections
|
||||
import sys
|
||||
|
||||
import six
|
||||
|
||||
from astroid import context as contextmod
|
||||
from astroid import exceptions
|
||||
from astroid import util
|
||||
|
||||
objectmodel = util.lazy_import('interpreter.objectmodel')
|
||||
BUILTINS = six.moves.builtins.__name__
|
||||
manager = util.lazy_import('manager')
|
||||
MANAGER = manager.AstroidManager()
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
BUILTINS = 'builtins'
|
||||
BOOL_SPECIAL_METHOD = '__bool__'
|
||||
else:
|
||||
BUILTINS = '__builtin__'
|
||||
BOOL_SPECIAL_METHOD = '__nonzero__'
|
||||
PROPERTIES = {BUILTINS + '.property', 'abc.abstractproperty'}
|
||||
# List of possible property names. We use this list in order
|
||||
# to see if a method is a property or not. This should be
|
||||
# pretty reliable and fast, the alternative being to check each
|
||||
# decorator to see if its a real property-like descriptor, which
|
||||
# can be too complicated.
|
||||
# Also, these aren't qualified, because each project can
|
||||
# define them, we shouldn't expect to know every possible
|
||||
# property-like decorator!
|
||||
# TODO(cpopa): just implement descriptors already.
|
||||
POSSIBLE_PROPERTIES = {"cached_property", "cachedproperty",
|
||||
"lazyproperty", "lazy_property", "reify",
|
||||
"lazyattribute", "lazy_attribute",
|
||||
"LazyProperty", "lazy"}
|
||||
|
||||
|
||||
def _is_property(meth):
|
||||
if PROPERTIES.intersection(meth.decoratornames()):
|
||||
return True
|
||||
stripped = {name.split(".")[-1] for name in meth.decoratornames()
|
||||
if name is not util.Uninferable}
|
||||
return any(name in stripped for name in POSSIBLE_PROPERTIES)
|
||||
|
||||
|
||||
class Proxy(object):
|
||||
"""a simple proxy object"""
|
||||
|
||||
_proxied = None # proxied object may be set by class or by instance
|
||||
|
||||
def __init__(self, proxied=None):
|
||||
if proxied is not None:
|
||||
self._proxied = proxied
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name == '_proxied':
|
||||
return getattr(self.__class__, '_proxied')
|
||||
if name in self.__dict__:
|
||||
return self.__dict__[name]
|
||||
return getattr(self._proxied, name)
|
||||
|
||||
def infer(self, context=None):
|
||||
yield self
|
||||
|
||||
|
||||
def _infer_stmts(stmts, context, frame=None):
|
||||
"""Return an iterator on statements inferred by each statement in *stmts*."""
|
||||
stmt = None
|
||||
inferred = False
|
||||
if context is not None:
|
||||
name = context.lookupname
|
||||
context = context.clone()
|
||||
else:
|
||||
name = None
|
||||
context = contextmod.InferenceContext()
|
||||
|
||||
for stmt in stmts:
|
||||
if stmt is util.Uninferable:
|
||||
yield stmt
|
||||
inferred = True
|
||||
continue
|
||||
context.lookupname = stmt._infer_name(frame, name)
|
||||
try:
|
||||
for inferred in stmt.infer(context=context):
|
||||
yield inferred
|
||||
inferred = True
|
||||
except exceptions.NameInferenceError:
|
||||
continue
|
||||
except exceptions.InferenceError:
|
||||
yield util.Uninferable
|
||||
inferred = True
|
||||
if not inferred:
|
||||
raise exceptions.InferenceError(
|
||||
'Inference failed for all members of {stmts!r}.',
|
||||
stmts=stmts, frame=frame, context=context)
|
||||
|
||||
|
||||
def _infer_method_result_truth(instance, method_name, context):
|
||||
# Get the method from the instance and try to infer
|
||||
# its return's truth value.
|
||||
meth = next(instance.igetattr(method_name, context=context), None)
|
||||
if meth and hasattr(meth, 'infer_call_result'):
|
||||
if not meth.callable():
|
||||
return util.Uninferable
|
||||
for value in meth.infer_call_result(instance, context=context):
|
||||
if value is util.Uninferable:
|
||||
return value
|
||||
|
||||
inferred = next(value.infer(context=context))
|
||||
return inferred.bool_value()
|
||||
return util.Uninferable
|
||||
|
||||
|
||||
class BaseInstance(Proxy):
|
||||
"""An instance base class, which provides lookup methods for potential instances."""
|
||||
|
||||
special_attributes = None
|
||||
|
||||
def display_type(self):
|
||||
return 'Instance of'
|
||||
|
||||
def getattr(self, name, context=None, lookupclass=True):
|
||||
try:
|
||||
values = self._proxied.instance_attr(name, context)
|
||||
except exceptions.AttributeInferenceError:
|
||||
if self.special_attributes and name in self.special_attributes:
|
||||
return [self.special_attributes.lookup(name)]
|
||||
|
||||
if lookupclass:
|
||||
# Class attributes not available through the instance
|
||||
# unless they are explicitly defined.
|
||||
return self._proxied.getattr(name, context,
|
||||
class_context=False)
|
||||
|
||||
util.reraise(exceptions.AttributeInferenceError(target=self,
|
||||
attribute=name,
|
||||
context=context))
|
||||
# since we've no context information, return matching class members as
|
||||
# well
|
||||
if lookupclass:
|
||||
try:
|
||||
return values + self._proxied.getattr(name, context,
|
||||
class_context=False)
|
||||
except exceptions.AttributeInferenceError:
|
||||
pass
|
||||
return values
|
||||
|
||||
def igetattr(self, name, context=None):
|
||||
"""inferred getattr"""
|
||||
if not context:
|
||||
context = contextmod.InferenceContext()
|
||||
try:
|
||||
# avoid recursively inferring the same attr on the same class
|
||||
if context.push((self._proxied, name)):
|
||||
return
|
||||
|
||||
# XXX frame should be self._proxied, or not ?
|
||||
get_attr = self.getattr(name, context, lookupclass=False)
|
||||
for stmt in _infer_stmts(self._wrap_attr(get_attr, context),
|
||||
context, frame=self):
|
||||
yield stmt
|
||||
except exceptions.AttributeInferenceError as error:
|
||||
try:
|
||||
# fallback to class.igetattr since it has some logic to handle
|
||||
# descriptors
|
||||
# But only if the _proxied is the Class.
|
||||
if self._proxied.__class__.__name__ != 'ClassDef':
|
||||
util.reraise(exceptions.InferenceError(**vars(error)))
|
||||
attrs = self._proxied.igetattr(name, context, class_context=False)
|
||||
for stmt in self._wrap_attr(attrs, context):
|
||||
yield stmt
|
||||
except exceptions.AttributeInferenceError as error:
|
||||
util.reraise(exceptions.InferenceError(**vars(error)))
|
||||
|
||||
def _wrap_attr(self, attrs, context=None):
|
||||
"""wrap bound methods of attrs in a InstanceMethod proxies"""
|
||||
for attr in attrs:
|
||||
if isinstance(attr, UnboundMethod):
|
||||
if _is_property(attr):
|
||||
for inferred in attr.infer_call_result(self, context):
|
||||
yield inferred
|
||||
else:
|
||||
yield BoundMethod(attr, self)
|
||||
elif hasattr(attr, 'name') and attr.name == '<lambda>':
|
||||
# This is a lambda function defined at class level,
|
||||
# since its scope is the underlying _proxied class.
|
||||
# Unfortunately, we can't do an isinstance check here,
|
||||
# because of the circular dependency between astroid.bases
|
||||
# and astroid.scoped_nodes.
|
||||
if attr.statement().scope() == self._proxied:
|
||||
if attr.args.args and attr.args.args[0].name == 'self':
|
||||
yield BoundMethod(attr, self)
|
||||
continue
|
||||
yield attr
|
||||
else:
|
||||
yield attr
|
||||
|
||||
def infer_call_result(self, caller, context=None):
|
||||
"""infer what a class instance is returning when called"""
|
||||
inferred = False
|
||||
for node in self._proxied.igetattr('__call__', context):
|
||||
if node is util.Uninferable or not node.callable():
|
||||
continue
|
||||
for res in node.infer_call_result(caller, context):
|
||||
inferred = True
|
||||
yield res
|
||||
if not inferred:
|
||||
raise exceptions.InferenceError(node=self, caller=caller,
|
||||
context=context)
|
||||
|
||||
|
||||
class Instance(BaseInstance):
|
||||
"""A special node representing a class instance."""
|
||||
|
||||
# pylint: disable=unnecessary-lambda
|
||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.InstanceModel())
|
||||
|
||||
def __repr__(self):
|
||||
return '<Instance of %s.%s at 0x%s>' % (self._proxied.root().name,
|
||||
self._proxied.name,
|
||||
id(self))
|
||||
def __str__(self):
|
||||
return 'Instance of %s.%s' % (self._proxied.root().name,
|
||||
self._proxied.name)
|
||||
|
||||
def callable(self):
|
||||
try:
|
||||
self._proxied.getattr('__call__', class_context=False)
|
||||
return True
|
||||
except exceptions.AttributeInferenceError:
|
||||
return False
|
||||
|
||||
def pytype(self):
|
||||
return self._proxied.qname()
|
||||
|
||||
def display_type(self):
|
||||
return 'Instance of'
|
||||
|
||||
def bool_value(self):
|
||||
"""Infer the truth value for an Instance
|
||||
|
||||
The truth value of an instance is determined by these conditions:
|
||||
|
||||
* if it implements __bool__ on Python 3 or __nonzero__
|
||||
on Python 2, then its bool value will be determined by
|
||||
calling this special method and checking its result.
|
||||
* when this method is not defined, __len__() is called, if it
|
||||
is defined, and the object is considered true if its result is
|
||||
nonzero. If a class defines neither __len__() nor __bool__(),
|
||||
all its instances are considered true.
|
||||
"""
|
||||
context = contextmod.InferenceContext()
|
||||
context.callcontext = contextmod.CallContext(args=[])
|
||||
context.boundnode = self
|
||||
|
||||
try:
|
||||
result = _infer_method_result_truth(self, BOOL_SPECIAL_METHOD, context)
|
||||
except (exceptions.InferenceError, exceptions.AttributeInferenceError):
|
||||
# Fallback to __len__.
|
||||
try:
|
||||
result = _infer_method_result_truth(self, '__len__', context)
|
||||
except (exceptions.AttributeInferenceError, exceptions.InferenceError):
|
||||
return True
|
||||
return result
|
||||
|
||||
# TODO(cpopa): this is set in inference.py
|
||||
# The circular dependency hell goes deeper and deeper.
|
||||
def getitem(self, index, context=None):
|
||||
pass
|
||||
|
||||
|
||||
class UnboundMethod(Proxy):
|
||||
"""a special node representing a method not bound to an instance"""
|
||||
|
||||
# pylint: disable=unnecessary-lambda
|
||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.UnboundMethodModel())
|
||||
|
||||
def __repr__(self):
|
||||
frame = self._proxied.parent.frame()
|
||||
return '<%s %s of %s at 0x%s' % (self.__class__.__name__,
|
||||
self._proxied.name,
|
||||
frame.qname(), id(self))
|
||||
|
||||
def is_bound(self):
|
||||
return False
|
||||
|
||||
def getattr(self, name, context=None):
|
||||
if name in self.special_attributes:
|
||||
return [self.special_attributes.lookup(name)]
|
||||
return self._proxied.getattr(name, context)
|
||||
|
||||
def igetattr(self, name, context=None):
|
||||
if name in self.special_attributes:
|
||||
return iter((self.special_attributes.lookup(name), ))
|
||||
return self._proxied.igetattr(name, context)
|
||||
|
||||
def infer_call_result(self, caller, context):
|
||||
# If we're unbound method __new__ of builtin object, the result is an
|
||||
# instance of the class given as first argument.
|
||||
if (self._proxied.name == '__new__' and
|
||||
self._proxied.parent.frame().qname() == '%s.object' % BUILTINS):
|
||||
infer = caller.args[0].infer() if caller.args else []
|
||||
return (Instance(x) if x is not util.Uninferable else x for x in infer)
|
||||
return self._proxied.infer_call_result(caller, context)
|
||||
|
||||
def bool_value(self):
|
||||
return True
|
||||
|
||||
|
||||
class BoundMethod(UnboundMethod):
|
||||
"""a special node representing a method bound to an instance"""
|
||||
|
||||
# pylint: disable=unnecessary-lambda
|
||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.BoundMethodModel())
|
||||
|
||||
def __init__(self, proxy, bound):
|
||||
UnboundMethod.__init__(self, proxy)
|
||||
self.bound = bound
|
||||
|
||||
def is_bound(self):
|
||||
return True
|
||||
|
||||
def _infer_type_new_call(self, caller, context):
|
||||
"""Try to infer what type.__new__(mcs, name, bases, attrs) returns.
|
||||
|
||||
In order for such call to be valid, the metaclass needs to be
|
||||
a subtype of ``type``, the name needs to be a string, the bases
|
||||
needs to be a tuple of classes and the attributes a dictionary
|
||||
of strings to values.
|
||||
"""
|
||||
from astroid import node_classes
|
||||
# Verify the metaclass
|
||||
mcs = next(caller.args[0].infer(context=context))
|
||||
if mcs.__class__.__name__ != 'ClassDef':
|
||||
# Not a valid first argument.
|
||||
return None
|
||||
if not mcs.is_subtype_of("%s.type" % BUILTINS):
|
||||
# Not a valid metaclass.
|
||||
return None
|
||||
|
||||
# Verify the name
|
||||
name = next(caller.args[1].infer(context=context))
|
||||
if name.__class__.__name__ != 'Const':
|
||||
# Not a valid name, needs to be a const.
|
||||
return None
|
||||
if not isinstance(name.value, str):
|
||||
# Needs to be a string.
|
||||
return None
|
||||
|
||||
# Verify the bases
|
||||
bases = next(caller.args[2].infer(context=context))
|
||||
if bases.__class__.__name__ != 'Tuple':
|
||||
# Needs to be a tuple.
|
||||
return None
|
||||
inferred_bases = [next(elt.infer(context=context))
|
||||
for elt in bases.elts]
|
||||
if any(base.__class__.__name__ != 'ClassDef'
|
||||
for base in inferred_bases):
|
||||
# All the bases needs to be Classes
|
||||
return None
|
||||
|
||||
# Verify the attributes.
|
||||
attrs = next(caller.args[3].infer(context=context))
|
||||
if attrs.__class__.__name__ != 'Dict':
|
||||
# Needs to be a dictionary.
|
||||
return None
|
||||
cls_locals = collections.defaultdict(list)
|
||||
for key, value in attrs.items:
|
||||
key = next(key.infer(context=context))
|
||||
value = next(value.infer(context=context))
|
||||
if key.__class__.__name__ != 'Const':
|
||||
# Something invalid as an attribute.
|
||||
return None
|
||||
if not isinstance(key.value, str):
|
||||
# Not a proper attribute.
|
||||
return None
|
||||
cls_locals[key.value].append(value)
|
||||
|
||||
# Build the class from now.
|
||||
cls = mcs.__class__(name=name.value, lineno=caller.lineno,
|
||||
col_offset=caller.col_offset,
|
||||
parent=caller)
|
||||
empty = node_classes.Pass()
|
||||
cls.postinit(bases=bases.elts, body=[empty], decorators=[],
|
||||
newstyle=True, metaclass=mcs, keywords=[])
|
||||
cls.locals = cls_locals
|
||||
return cls
|
||||
|
||||
def infer_call_result(self, caller, context=None):
|
||||
if context is None:
|
||||
context = contextmod.InferenceContext()
|
||||
context = context.clone()
|
||||
context.boundnode = self.bound
|
||||
|
||||
if (self.bound.__class__.__name__ == 'ClassDef'
|
||||
and self.bound.name == 'type'
|
||||
and self.name == '__new__'
|
||||
and len(caller.args) == 4
|
||||
# TODO(cpopa): this check shouldn't be needed.
|
||||
and self._proxied.parent.frame().qname() == '%s.object' % BUILTINS):
|
||||
|
||||
# Check if we have an ``type.__new__(mcs, name, bases, attrs)`` call.
|
||||
new_cls = self._infer_type_new_call(caller, context)
|
||||
if new_cls:
|
||||
return iter((new_cls, ))
|
||||
|
||||
return super(BoundMethod, self).infer_call_result(caller, context)
|
||||
|
||||
def bool_value(self):
|
||||
return True
|
||||
|
||||
|
||||
class Generator(BaseInstance):
|
||||
"""a special node representing a generator.
|
||||
|
||||
Proxied class is set once for all in raw_building.
|
||||
"""
|
||||
|
||||
# pylint: disable=unnecessary-lambda
|
||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.GeneratorModel())
|
||||
|
||||
# pylint: disable=super-init-not-called
|
||||
def __init__(self, parent=None):
|
||||
self.parent = parent
|
||||
|
||||
def callable(self):
|
||||
return False
|
||||
|
||||
def pytype(self):
|
||||
return '%s.generator' % BUILTINS
|
||||
|
||||
def display_type(self):
|
||||
return 'Generator'
|
||||
|
||||
def bool_value(self):
|
||||
return True
|
||||
|
||||
def __repr__(self):
|
||||
return '<Generator(%s) l.%s at 0x%s>' % (self._proxied.name, self.lineno, id(self))
|
||||
|
||||
def __str__(self):
|
||||
return 'Generator(%s)' % (self._proxied.name)
|
@ -0,0 +1,58 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
"""
|
||||
Astroid hook for the attrs library
|
||||
|
||||
Without this hook pylint reports unsupported-assignment-operation
|
||||
for atrrs classes
|
||||
"""
|
||||
|
||||
import astroid
|
||||
from astroid import MANAGER
|
||||
|
||||
|
||||
ATTR_IB = 'attr.ib'
|
||||
|
||||
|
||||
def is_decorated_with_attrs(
|
||||
node, decorator_names=('attr.s', 'attr.attrs', 'attr.attributes')):
|
||||
"""Return True if a decorated node has
|
||||
an attr decorator applied."""
|
||||
if not node.decorators:
|
||||
return False
|
||||
for decorator_attribute in node.decorators.nodes:
|
||||
if decorator_attribute.as_string() in decorator_names:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def attr_attributes_transform(node):
|
||||
"""Given that the ClassNode has an attr decorator,
|
||||
rewrite class attributes as instance attributes
|
||||
"""
|
||||
# Astroid can't infer this attribute properly
|
||||
# Prevents https://github.com/PyCQA/pylint/issues/1884
|
||||
node.locals["__attrs_attrs__"] = [astroid.Unknown(parent=node.body)]
|
||||
|
||||
for cdefbodynode in node.body:
|
||||
if not isinstance(cdefbodynode, astroid.Assign):
|
||||
continue
|
||||
if isinstance(cdefbodynode.value, astroid.Call):
|
||||
if cdefbodynode.value.func.as_string() != ATTR_IB:
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
for target in cdefbodynode.targets:
|
||||
|
||||
rhs_node = astroid.Unknown(
|
||||
lineno=cdefbodynode.lineno,
|
||||
col_offset=cdefbodynode.col_offset,
|
||||
parent=cdefbodynode
|
||||
)
|
||||
node.locals[target.name] = [rhs_node]
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
astroid.Class,
|
||||
attr_attributes_transform,
|
||||
is_decorated_with_attrs)
|
@ -0,0 +1,530 @@
|
||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for various builtins."""
|
||||
|
||||
from functools import partial
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
|
||||
import six
|
||||
from astroid import (MANAGER, UseInferenceDefault, AttributeInferenceError,
|
||||
inference_tip, InferenceError, NameInferenceError)
|
||||
from astroid import arguments
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid import helpers
|
||||
from astroid import nodes
|
||||
from astroid import objects
|
||||
from astroid import scoped_nodes
|
||||
from astroid import util
|
||||
|
||||
|
||||
OBJECT_DUNDER_NEW = 'object.__new__'
|
||||
|
||||
|
||||
def _extend_str(class_node, rvalue):
|
||||
"""function to extend builtin str/unicode class"""
|
||||
# TODO(cpopa): this approach will make astroid to believe
|
||||
# that some arguments can be passed by keyword, but
|
||||
# unfortunately, strings and bytes don't accept keyword arguments.
|
||||
code = dedent('''
|
||||
class whatever(object):
|
||||
def join(self, iterable):
|
||||
return {rvalue}
|
||||
def replace(self, old, new, count=None):
|
||||
return {rvalue}
|
||||
def format(self, *args, **kwargs):
|
||||
return {rvalue}
|
||||
def encode(self, encoding='ascii', errors=None):
|
||||
return ''
|
||||
def decode(self, encoding='ascii', errors=None):
|
||||
return u''
|
||||
def capitalize(self):
|
||||
return {rvalue}
|
||||
def title(self):
|
||||
return {rvalue}
|
||||
def lower(self):
|
||||
return {rvalue}
|
||||
def upper(self):
|
||||
return {rvalue}
|
||||
def swapcase(self):
|
||||
return {rvalue}
|
||||
def index(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def find(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def count(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def strip(self, chars=None):
|
||||
return {rvalue}
|
||||
def lstrip(self, chars=None):
|
||||
return {rvalue}
|
||||
def rstrip(self, chars=None):
|
||||
return {rvalue}
|
||||
def rjust(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
def center(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
def ljust(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
''')
|
||||
code = code.format(rvalue=rvalue)
|
||||
fake = AstroidBuilder(MANAGER).string_build(code)['whatever']
|
||||
for method in fake.mymethods():
|
||||
class_node.locals[method.name] = [method]
|
||||
method.parent = class_node
|
||||
|
||||
|
||||
def extend_builtins(class_transforms):
|
||||
from astroid.bases import BUILTINS
|
||||
builtin_ast = MANAGER.astroid_cache[BUILTINS]
|
||||
for class_name, transform in class_transforms.items():
|
||||
transform(builtin_ast[class_name])
|
||||
|
||||
|
||||
if sys.version_info > (3, 0):
|
||||
extend_builtins({'bytes': partial(_extend_str, rvalue="b''"),
|
||||
'str': partial(_extend_str, rvalue="''")})
|
||||
else:
|
||||
extend_builtins({'str': partial(_extend_str, rvalue="''"),
|
||||
'unicode': partial(_extend_str, rvalue="u''")})
|
||||
|
||||
|
||||
def register_builtin_transform(transform, builtin_name):
|
||||
"""Register a new transform function for the given *builtin_name*.
|
||||
|
||||
The transform function must accept two parameters, a node and
|
||||
an optional context.
|
||||
"""
|
||||
def _transform_wrapper(node, context=None):
|
||||
result = transform(node, context=context)
|
||||
if result:
|
||||
if not result.parent:
|
||||
# Let the transformation function determine
|
||||
# the parent for its result. Otherwise,
|
||||
# we set it to be the node we transformed from.
|
||||
result.parent = node
|
||||
|
||||
result.lineno = node.lineno
|
||||
result.col_offset = node.col_offset
|
||||
return iter([result])
|
||||
|
||||
MANAGER.register_transform(nodes.Call,
|
||||
inference_tip(_transform_wrapper),
|
||||
lambda n: (isinstance(n.func, nodes.Name) and
|
||||
n.func.name == builtin_name))
|
||||
|
||||
|
||||
def _generic_inference(node, context, node_type, transform):
|
||||
args = node.args
|
||||
if not args:
|
||||
return node_type()
|
||||
if len(node.args) > 1:
|
||||
raise UseInferenceDefault()
|
||||
|
||||
arg, = args
|
||||
transformed = transform(arg)
|
||||
if not transformed:
|
||||
try:
|
||||
inferred = next(arg.infer(context=context))
|
||||
except (InferenceError, StopIteration):
|
||||
raise UseInferenceDefault()
|
||||
if inferred is util.Uninferable:
|
||||
raise UseInferenceDefault()
|
||||
transformed = transform(inferred)
|
||||
if not transformed or transformed is util.Uninferable:
|
||||
raise UseInferenceDefault()
|
||||
return transformed
|
||||
|
||||
|
||||
def _generic_transform(arg, klass, iterables, build_elts):
|
||||
if isinstance(arg, klass):
|
||||
return arg
|
||||
elif isinstance(arg, iterables):
|
||||
if not all(isinstance(elt, nodes.Const)
|
||||
for elt in arg.elts):
|
||||
# TODO(cpopa): Don't support heterogenous elements.
|
||||
# Not yet, though.
|
||||
raise UseInferenceDefault()
|
||||
elts = [elt.value for elt in arg.elts]
|
||||
elif isinstance(arg, nodes.Dict):
|
||||
if not all(isinstance(elt[0], nodes.Const)
|
||||
for elt in arg.items):
|
||||
raise UseInferenceDefault()
|
||||
elts = [item[0].value for item in arg.items]
|
||||
elif (isinstance(arg, nodes.Const) and
|
||||
isinstance(arg.value, (six.string_types, six.binary_type))):
|
||||
elts = arg.value
|
||||
else:
|
||||
return
|
||||
return klass.from_constants(elts=build_elts(elts))
|
||||
|
||||
|
||||
def _infer_builtin(node, context,
|
||||
klass=None, iterables=None,
|
||||
build_elts=None):
|
||||
transform_func = partial(
|
||||
_generic_transform,
|
||||
klass=klass,
|
||||
iterables=iterables,
|
||||
build_elts=build_elts)
|
||||
|
||||
return _generic_inference(node, context, klass, transform_func)
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
infer_tuple = partial(
|
||||
_infer_builtin,
|
||||
klass=nodes.Tuple,
|
||||
iterables=(nodes.List, nodes.Set, objects.FrozenSet,
|
||||
objects.DictItems, objects.DictKeys,
|
||||
objects.DictValues),
|
||||
build_elts=tuple)
|
||||
|
||||
infer_list = partial(
|
||||
_infer_builtin,
|
||||
klass=nodes.List,
|
||||
iterables=(nodes.Tuple, nodes.Set, objects.FrozenSet,
|
||||
objects.DictItems, objects.DictKeys,
|
||||
objects.DictValues),
|
||||
build_elts=list)
|
||||
|
||||
infer_set = partial(
|
||||
_infer_builtin,
|
||||
klass=nodes.Set,
|
||||
iterables=(nodes.List, nodes.Tuple, objects.FrozenSet,
|
||||
objects.DictKeys),
|
||||
build_elts=set)
|
||||
|
||||
infer_frozenset = partial(
|
||||
_infer_builtin,
|
||||
klass=objects.FrozenSet,
|
||||
iterables=(nodes.List, nodes.Tuple, nodes.Set, objects.FrozenSet,
|
||||
objects.DictKeys),
|
||||
build_elts=frozenset)
|
||||
|
||||
|
||||
def _get_elts(arg, context):
|
||||
is_iterable = lambda n: isinstance(n,
|
||||
(nodes.List, nodes.Tuple, nodes.Set))
|
||||
try:
|
||||
inferred = next(arg.infer(context))
|
||||
except (InferenceError, NameInferenceError):
|
||||
raise UseInferenceDefault()
|
||||
if isinstance(inferred, nodes.Dict):
|
||||
items = inferred.items
|
||||
elif is_iterable(inferred):
|
||||
items = []
|
||||
for elt in inferred.elts:
|
||||
# If an item is not a pair of two items,
|
||||
# then fallback to the default inference.
|
||||
# Also, take in consideration only hashable items,
|
||||
# tuples and consts. We are choosing Names as well.
|
||||
if not is_iterable(elt):
|
||||
raise UseInferenceDefault()
|
||||
if len(elt.elts) != 2:
|
||||
raise UseInferenceDefault()
|
||||
if not isinstance(elt.elts[0],
|
||||
(nodes.Tuple, nodes.Const, nodes.Name)):
|
||||
raise UseInferenceDefault()
|
||||
items.append(tuple(elt.elts))
|
||||
else:
|
||||
raise UseInferenceDefault()
|
||||
return items
|
||||
|
||||
def infer_dict(node, context=None):
|
||||
"""Try to infer a dict call to a Dict node.
|
||||
|
||||
The function treats the following cases:
|
||||
|
||||
* dict()
|
||||
* dict(mapping)
|
||||
* dict(iterable)
|
||||
* dict(iterable, **kwargs)
|
||||
* dict(mapping, **kwargs)
|
||||
* dict(**kwargs)
|
||||
|
||||
If a case can't be inferred, we'll fallback to default inference.
|
||||
"""
|
||||
call = arguments.CallSite.from_call(node)
|
||||
if call.has_invalid_arguments() or call.has_invalid_keywords():
|
||||
raise UseInferenceDefault
|
||||
|
||||
args = call.positional_arguments
|
||||
kwargs = list(call.keyword_arguments.items())
|
||||
|
||||
if not args and not kwargs:
|
||||
# dict()
|
||||
return nodes.Dict()
|
||||
elif kwargs and not args:
|
||||
# dict(a=1, b=2, c=4)
|
||||
items = [(nodes.Const(key), value) for key, value in kwargs]
|
||||
elif len(args) == 1 and kwargs:
|
||||
# dict(some_iterable, b=2, c=4)
|
||||
elts = _get_elts(args[0], context)
|
||||
keys = [(nodes.Const(key), value) for key, value in kwargs]
|
||||
items = elts + keys
|
||||
elif len(args) == 1:
|
||||
items = _get_elts(args[0], context)
|
||||
else:
|
||||
raise UseInferenceDefault()
|
||||
|
||||
value = nodes.Dict(col_offset=node.col_offset,
|
||||
lineno=node.lineno,
|
||||
parent=node.parent)
|
||||
value.postinit(items)
|
||||
return value
|
||||
|
||||
|
||||
def infer_super(node, context=None):
|
||||
"""Understand super calls.
|
||||
|
||||
There are some restrictions for what can be understood:
|
||||
|
||||
* unbounded super (one argument form) is not understood.
|
||||
|
||||
* if the super call is not inside a function (classmethod or method),
|
||||
then the default inference will be used.
|
||||
|
||||
* if the super arguments can't be inferred, the default inference
|
||||
will be used.
|
||||
"""
|
||||
if len(node.args) == 1:
|
||||
# Ignore unbounded super.
|
||||
raise UseInferenceDefault
|
||||
|
||||
scope = node.scope()
|
||||
if not isinstance(scope, nodes.FunctionDef):
|
||||
# Ignore non-method uses of super.
|
||||
raise UseInferenceDefault
|
||||
if scope.type not in ('classmethod', 'method'):
|
||||
# Not interested in staticmethods.
|
||||
raise UseInferenceDefault
|
||||
|
||||
cls = scoped_nodes.get_wrapping_class(scope)
|
||||
if not len(node.args):
|
||||
mro_pointer = cls
|
||||
# In we are in a classmethod, the interpreter will fill
|
||||
# automatically the class as the second argument, not an instance.
|
||||
if scope.type == 'classmethod':
|
||||
mro_type = cls
|
||||
else:
|
||||
mro_type = cls.instantiate_class()
|
||||
else:
|
||||
# TODO(cpopa): support flow control (multiple inference values).
|
||||
try:
|
||||
mro_pointer = next(node.args[0].infer(context=context))
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
try:
|
||||
mro_type = next(node.args[1].infer(context=context))
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if mro_pointer is util.Uninferable or mro_type is util.Uninferable:
|
||||
# No way we could understand this.
|
||||
raise UseInferenceDefault
|
||||
|
||||
super_obj = objects.Super(mro_pointer=mro_pointer,
|
||||
mro_type=mro_type,
|
||||
self_class=cls,
|
||||
scope=scope)
|
||||
super_obj.parent = node
|
||||
return super_obj
|
||||
|
||||
|
||||
def _infer_getattr_args(node, context):
|
||||
if len(node.args) not in (2, 3):
|
||||
# Not a valid getattr call.
|
||||
raise UseInferenceDefault
|
||||
|
||||
try:
|
||||
# TODO(cpopa): follow all the values of the first argument?
|
||||
obj = next(node.args[0].infer(context=context))
|
||||
attr = next(node.args[1].infer(context=context))
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if obj is util.Uninferable or attr is util.Uninferable:
|
||||
# If one of the arguments is something we can't infer,
|
||||
# then also make the result of the getattr call something
|
||||
# which is unknown.
|
||||
return util.Uninferable, util.Uninferable
|
||||
|
||||
is_string = (isinstance(attr, nodes.Const) and
|
||||
isinstance(attr.value, six.string_types))
|
||||
if not is_string:
|
||||
raise UseInferenceDefault
|
||||
|
||||
return obj, attr.value
|
||||
|
||||
|
||||
def infer_getattr(node, context=None):
|
||||
"""Understand getattr calls
|
||||
|
||||
If one of the arguments is an Uninferable object, then the
|
||||
result will be an Uninferable object. Otherwise, the normal attribute
|
||||
lookup will be done.
|
||||
"""
|
||||
obj, attr = _infer_getattr_args(node, context)
|
||||
if obj is util.Uninferable or attr is util.Uninferable or not hasattr(obj, 'igetattr'):
|
||||
return util.Uninferable
|
||||
|
||||
try:
|
||||
return next(obj.igetattr(attr, context=context))
|
||||
except (StopIteration, InferenceError, AttributeInferenceError):
|
||||
if len(node.args) == 3:
|
||||
# Try to infer the default and return it instead.
|
||||
try:
|
||||
return next(node.args[2].infer(context=context))
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
|
||||
raise UseInferenceDefault
|
||||
|
||||
|
||||
def infer_hasattr(node, context=None):
|
||||
"""Understand hasattr calls
|
||||
|
||||
This always guarantees three possible outcomes for calling
|
||||
hasattr: Const(False) when we are sure that the object
|
||||
doesn't have the intended attribute, Const(True) when
|
||||
we know that the object has the attribute and Uninferable
|
||||
when we are unsure of the outcome of the function call.
|
||||
"""
|
||||
try:
|
||||
obj, attr = _infer_getattr_args(node, context)
|
||||
if obj is util.Uninferable or attr is util.Uninferable or not hasattr(obj, 'getattr'):
|
||||
return util.Uninferable
|
||||
obj.getattr(attr, context=context)
|
||||
except UseInferenceDefault:
|
||||
# Can't infer something from this function call.
|
||||
return util.Uninferable
|
||||
except AttributeInferenceError:
|
||||
# Doesn't have it.
|
||||
return nodes.Const(False)
|
||||
return nodes.Const(True)
|
||||
|
||||
|
||||
def infer_callable(node, context=None):
|
||||
"""Understand callable calls
|
||||
|
||||
This follows Python's semantics, where an object
|
||||
is callable if it provides an attribute __call__,
|
||||
even though that attribute is something which can't be
|
||||
called.
|
||||
"""
|
||||
if len(node.args) != 1:
|
||||
# Invalid callable call.
|
||||
raise UseInferenceDefault
|
||||
|
||||
argument = node.args[0]
|
||||
try:
|
||||
inferred = next(argument.infer(context=context))
|
||||
except InferenceError:
|
||||
return util.Uninferable
|
||||
if inferred is util.Uninferable:
|
||||
return util.Uninferable
|
||||
return nodes.Const(inferred.callable())
|
||||
|
||||
|
||||
def infer_bool(node, context=None):
|
||||
"""Understand bool calls."""
|
||||
if len(node.args) > 1:
|
||||
# Invalid bool call.
|
||||
raise UseInferenceDefault
|
||||
|
||||
if not node.args:
|
||||
return nodes.Const(False)
|
||||
|
||||
argument = node.args[0]
|
||||
try:
|
||||
inferred = next(argument.infer(context=context))
|
||||
except InferenceError:
|
||||
return util.Uninferable
|
||||
if inferred is util.Uninferable:
|
||||
return util.Uninferable
|
||||
|
||||
bool_value = inferred.bool_value()
|
||||
if bool_value is util.Uninferable:
|
||||
return util.Uninferable
|
||||
return nodes.Const(bool_value)
|
||||
|
||||
|
||||
def infer_type(node, context=None):
|
||||
"""Understand the one-argument form of *type*."""
|
||||
if len(node.args) != 1:
|
||||
raise UseInferenceDefault
|
||||
|
||||
return helpers.object_type(node.args[0], context)
|
||||
|
||||
|
||||
def infer_slice(node, context=None):
|
||||
"""Understand `slice` calls."""
|
||||
args = node.args
|
||||
if not 0 < len(args) <= 3:
|
||||
raise UseInferenceDefault
|
||||
|
||||
args = list(map(helpers.safe_infer, args))
|
||||
for arg in args:
|
||||
if not arg or arg is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
if not isinstance(arg, nodes.Const):
|
||||
raise UseInferenceDefault
|
||||
if not isinstance(arg.value, (type(None), int)):
|
||||
raise UseInferenceDefault
|
||||
|
||||
if len(args) < 3:
|
||||
# Make sure we have 3 arguments.
|
||||
args.extend([None] * (3 - len(args)))
|
||||
|
||||
slice_node = nodes.Slice(lineno=node.lineno,
|
||||
col_offset=node.col_offset,
|
||||
parent=node.parent)
|
||||
slice_node.postinit(*args)
|
||||
return slice_node
|
||||
|
||||
|
||||
def _infer_object__new__decorator(node, context=None):
|
||||
# Instantiate class immediately
|
||||
# since that's what @object.__new__ does
|
||||
return iter((node.instantiate_class(),))
|
||||
|
||||
|
||||
def _infer_object__new__decorator_check(node):
|
||||
"""Predicate before inference_tip
|
||||
|
||||
Check if the given ClassDef has a @object.__new__ decorator
|
||||
"""
|
||||
if not node.decorators:
|
||||
return False
|
||||
|
||||
for decorator in node.decorators.nodes:
|
||||
if isinstance(decorator, nodes.Attribute):
|
||||
if decorator.as_string() == OBJECT_DUNDER_NEW:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
# Builtins inference
|
||||
register_builtin_transform(infer_bool, 'bool')
|
||||
register_builtin_transform(infer_super, 'super')
|
||||
register_builtin_transform(infer_callable, 'callable')
|
||||
register_builtin_transform(infer_getattr, 'getattr')
|
||||
register_builtin_transform(infer_hasattr, 'hasattr')
|
||||
register_builtin_transform(infer_tuple, 'tuple')
|
||||
register_builtin_transform(infer_set, 'set')
|
||||
register_builtin_transform(infer_list, 'list')
|
||||
register_builtin_transform(infer_dict, 'dict')
|
||||
register_builtin_transform(infer_frozenset, 'frozenset')
|
||||
register_builtin_transform(infer_type, 'type')
|
||||
register_builtin_transform(infer_slice, 'slice')
|
||||
|
||||
# Infer object.__new__ calls
|
||||
MANAGER.register_transform(
|
||||
nodes.ClassDef,
|
||||
inference_tip(_infer_object__new__decorator),
|
||||
_infer_object__new__decorator_check
|
||||
)
|
@ -0,0 +1,66 @@
|
||||
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
|
||||
PY35 = sys.version_info >= (3, 5)
|
||||
|
||||
|
||||
def _collections_transform():
|
||||
return astroid.parse('''
|
||||
class defaultdict(dict):
|
||||
default_factory = None
|
||||
def __missing__(self, key): pass
|
||||
def __getitem__(self, key): return default_factory
|
||||
|
||||
''' + _deque_mock() + '''
|
||||
|
||||
class OrderedDict(dict):
|
||||
def __reversed__(self): return self[::-1]
|
||||
''')
|
||||
|
||||
|
||||
def _deque_mock():
|
||||
base_deque_class = '''
|
||||
class deque(object):
|
||||
maxlen = 0
|
||||
def __init__(self, iterable=None, maxlen=None):
|
||||
self.iterable = iterable
|
||||
def append(self, x): pass
|
||||
def appendleft(self, x): pass
|
||||
def clear(self): pass
|
||||
def count(self, x): return 0
|
||||
def extend(self, iterable): pass
|
||||
def extendleft(self, iterable): pass
|
||||
def pop(self): pass
|
||||
def popleft(self): pass
|
||||
def remove(self, value): pass
|
||||
def reverse(self): pass
|
||||
def rotate(self, n=1): pass
|
||||
def __iter__(self): return self
|
||||
def __reversed__(self): return self.iterable[::-1]
|
||||
def __getitem__(self, index): pass
|
||||
def __setitem__(self, index, value): pass
|
||||
def __delitem__(self, index): pass
|
||||
def __bool__(self): return bool(self.iterable)
|
||||
def __nonzero__(self): return bool(self.iterable)
|
||||
def __contains__(self, o): return o in self.iterable
|
||||
def __len__(self): return len(self.iterable)
|
||||
def __copy__(self): return deque(self.iterable)'''
|
||||
if PY35:
|
||||
base_deque_class += '''
|
||||
def copy(self): return deque(self.iterable)
|
||||
def index(self, x, start=0, end=0): return 0
|
||||
def insert(self, x, i): pass
|
||||
def __add__(self, other): pass
|
||||
def __iadd__(self, other): pass
|
||||
def __mul__(self, other): pass
|
||||
def __imul__(self, other): pass
|
||||
def __rmul__(self, other): pass'''
|
||||
return base_deque_class
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, 'collections', _collections_transform)
|
||||
|
@ -0,0 +1,177 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import astroid
|
||||
|
||||
|
||||
def _curses_transform():
|
||||
return astroid.parse('''
|
||||
A_ALTCHARSET = 1
|
||||
A_BLINK = 1
|
||||
A_BOLD = 1
|
||||
A_DIM = 1
|
||||
A_INVIS = 1
|
||||
A_ITALIC = 1
|
||||
A_NORMAL = 1
|
||||
A_PROTECT = 1
|
||||
A_REVERSE = 1
|
||||
A_STANDOUT = 1
|
||||
A_UNDERLINE = 1
|
||||
A_HORIZONTAL = 1
|
||||
A_LEFT = 1
|
||||
A_LOW = 1
|
||||
A_RIGHT = 1
|
||||
A_TOP = 1
|
||||
A_VERTICAL = 1
|
||||
A_CHARTEXT = 1
|
||||
A_ATTRIBUTES = 1
|
||||
A_CHARTEXT = 1
|
||||
A_COLOR = 1
|
||||
KEY_MIN = 1
|
||||
KEY_BREAK = 1
|
||||
KEY_DOWN = 1
|
||||
KEY_UP = 1
|
||||
KEY_LEFT = 1
|
||||
KEY_RIGHT = 1
|
||||
KEY_HOME = 1
|
||||
KEY_BACKSPACE = 1
|
||||
KEY_F0 = 1
|
||||
KEY_Fn = 1
|
||||
KEY_DL = 1
|
||||
KEY_IL = 1
|
||||
KEY_DC = 1
|
||||
KEY_IC = 1
|
||||
KEY_EIC = 1
|
||||
KEY_CLEAR = 1
|
||||
KEY_EOS = 1
|
||||
KEY_EOL = 1
|
||||
KEY_SF = 1
|
||||
KEY_SR = 1
|
||||
KEY_NPAGE = 1
|
||||
KEY_PPAGE = 1
|
||||
KEY_STAB = 1
|
||||
KEY_CTAB = 1
|
||||
KEY_CATAB = 1
|
||||
KEY_ENTER = 1
|
||||
KEY_SRESET = 1
|
||||
KEY_RESET = 1
|
||||
KEY_PRINT = 1
|
||||
KEY_LL = 1
|
||||
KEY_A1 = 1
|
||||
KEY_A3 = 1
|
||||
KEY_B2 = 1
|
||||
KEY_C1 = 1
|
||||
KEY_C3 = 1
|
||||
KEY_BTAB = 1
|
||||
KEY_BEG = 1
|
||||
KEY_CANCEL = 1
|
||||
KEY_CLOSE = 1
|
||||
KEY_COMMAND = 1
|
||||
KEY_COPY = 1
|
||||
KEY_CREATE = 1
|
||||
KEY_END = 1
|
||||
KEY_EXIT = 1
|
||||
KEY_FIND = 1
|
||||
KEY_HELP = 1
|
||||
KEY_MARK = 1
|
||||
KEY_MESSAGE = 1
|
||||
KEY_MOVE = 1
|
||||
KEY_NEXT = 1
|
||||
KEY_OPEN = 1
|
||||
KEY_OPTIONS = 1
|
||||
KEY_PREVIOUS = 1
|
||||
KEY_REDO = 1
|
||||
KEY_REFERENCE = 1
|
||||
KEY_REFRESH = 1
|
||||
KEY_REPLACE = 1
|
||||
KEY_RESTART = 1
|
||||
KEY_RESUME = 1
|
||||
KEY_SAVE = 1
|
||||
KEY_SBEG = 1
|
||||
KEY_SCANCEL = 1
|
||||
KEY_SCOMMAND = 1
|
||||
KEY_SCOPY = 1
|
||||
KEY_SCREATE = 1
|
||||
KEY_SDC = 1
|
||||
KEY_SDL = 1
|
||||
KEY_SELECT = 1
|
||||
KEY_SEND = 1
|
||||
KEY_SEOL = 1
|
||||
KEY_SEXIT = 1
|
||||
KEY_SFIND = 1
|
||||
KEY_SHELP = 1
|
||||
KEY_SHOME = 1
|
||||
KEY_SIC = 1
|
||||
KEY_SLEFT = 1
|
||||
KEY_SMESSAGE = 1
|
||||
KEY_SMOVE = 1
|
||||
KEY_SNEXT = 1
|
||||
KEY_SOPTIONS = 1
|
||||
KEY_SPREVIOUS = 1
|
||||
KEY_SPRINT = 1
|
||||
KEY_SREDO = 1
|
||||
KEY_SREPLACE = 1
|
||||
KEY_SRIGHT = 1
|
||||
KEY_SRSUME = 1
|
||||
KEY_SSAVE = 1
|
||||
KEY_SSUSPEND = 1
|
||||
KEY_SUNDO = 1
|
||||
KEY_SUSPEND = 1
|
||||
KEY_UNDO = 1
|
||||
KEY_MOUSE = 1
|
||||
KEY_RESIZE = 1
|
||||
KEY_MAX = 1
|
||||
ACS_BBSS = 1
|
||||
ACS_BLOCK = 1
|
||||
ACS_BOARD = 1
|
||||
ACS_BSBS = 1
|
||||
ACS_BSSB = 1
|
||||
ACS_BSSS = 1
|
||||
ACS_BTEE = 1
|
||||
ACS_BULLET = 1
|
||||
ACS_CKBOARD = 1
|
||||
ACS_DARROW = 1
|
||||
ACS_DEGREE = 1
|
||||
ACS_DIAMOND = 1
|
||||
ACS_GEQUAL = 1
|
||||
ACS_HLINE = 1
|
||||
ACS_LANTERN = 1
|
||||
ACS_LARROW = 1
|
||||
ACS_LEQUAL = 1
|
||||
ACS_LLCORNER = 1
|
||||
ACS_LRCORNER = 1
|
||||
ACS_LTEE = 1
|
||||
ACS_NEQUAL = 1
|
||||
ACS_PI = 1
|
||||
ACS_PLMINUS = 1
|
||||
ACS_PLUS = 1
|
||||
ACS_RARROW = 1
|
||||
ACS_RTEE = 1
|
||||
ACS_S1 = 1
|
||||
ACS_S3 = 1
|
||||
ACS_S7 = 1
|
||||
ACS_S9 = 1
|
||||
ACS_SBBS = 1
|
||||
ACS_SBSB = 1
|
||||
ACS_SBSS = 1
|
||||
ACS_SSBB = 1
|
||||
ACS_SSBS = 1
|
||||
ACS_SSSB = 1
|
||||
ACS_SSSS = 1
|
||||
ACS_STERLING = 1
|
||||
ACS_TTEE = 1
|
||||
ACS_UARROW = 1
|
||||
ACS_ULCORNER = 1
|
||||
ACS_URCORNER = 1
|
||||
ACS_VLINE = 1
|
||||
COLOR_BLACK = 1
|
||||
COLOR_BLUE = 1
|
||||
COLOR_CYAN = 1
|
||||
COLOR_GREEN = 1
|
||||
COLOR_MAGENTA = 1
|
||||
COLOR_RED = 1
|
||||
COLOR_WHITE = 1
|
||||
COLOR_YELLOW = 1
|
||||
''')
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, 'curses', _curses_transform)
|
@ -0,0 +1,21 @@
|
||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Cara Vinson <ceridwenv@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for dateutil"""
|
||||
|
||||
import textwrap
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
def dateutil_transform():
|
||||
return AstroidBuilder(MANAGER).string_build(textwrap.dedent('''
|
||||
import datetime
|
||||
def parse(timestr, parserinfo=None, **kwargs):
|
||||
return datetime.datetime()
|
||||
'''))
|
||||
|
||||
register_module_extender(MANAGER, 'dateutil.parser', dateutil_transform)
|
@ -0,0 +1,60 @@
|
||||
# Copyright (c) 2017 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def _clone_node_with_lineno(node, parent, lineno):
|
||||
cls = node.__class__
|
||||
other_fields = node._other_fields
|
||||
_astroid_fields = node._astroid_fields
|
||||
init_params = {
|
||||
'lineno': lineno,
|
||||
'col_offset': node.col_offset,
|
||||
'parent': parent
|
||||
}
|
||||
postinit_params = {
|
||||
param: getattr(node, param)
|
||||
for param in _astroid_fields
|
||||
}
|
||||
if other_fields:
|
||||
init_params.update({
|
||||
param: getattr(node, param)
|
||||
for param in other_fields
|
||||
})
|
||||
new_node = cls(**init_params)
|
||||
if hasattr(node, 'postinit') and _astroid_fields:
|
||||
new_node.postinit(**postinit_params)
|
||||
return new_node
|
||||
|
||||
|
||||
def _transform_formatted_value(node):
|
||||
if node.value and node.value.lineno == 1:
|
||||
if node.lineno != node.value.lineno:
|
||||
new_node = astroid.FormattedValue(
|
||||
lineno=node.lineno,
|
||||
col_offset=node.col_offset,
|
||||
parent=node.parent
|
||||
)
|
||||
new_value = _clone_node_with_lineno(
|
||||
node=node.value,
|
||||
lineno=node.lineno,
|
||||
parent=new_node
|
||||
)
|
||||
new_node.postinit(value=new_value,
|
||||
format_spec=node.format_spec)
|
||||
return new_node
|
||||
|
||||
|
||||
if sys.version_info[:2] >= (3, 6):
|
||||
# TODO: this fix tries to *patch* http://bugs.python.org/issue29051
|
||||
# The problem is that FormattedValue.value, which is a Name node,
|
||||
# has wrong line numbers, usually 1. This creates problems for pylint,
|
||||
# which expects correct line numbers for things such as message control.
|
||||
astroid.MANAGER.register_transform(
|
||||
astroid.FormattedValue,
|
||||
_transform_formatted_value)
|
@ -0,0 +1,75 @@
|
||||
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
"""Astroid hooks for understanding functools library module."""
|
||||
|
||||
import astroid
|
||||
from astroid import BoundMethod
|
||||
from astroid import extract_node
|
||||
from astroid import helpers
|
||||
from astroid.interpreter import objectmodel
|
||||
from astroid import MANAGER
|
||||
|
||||
|
||||
LRU_CACHE = 'functools.lru_cache'
|
||||
|
||||
|
||||
class LruWrappedModel(objectmodel.FunctionModel):
|
||||
"""Special attribute model for functions decorated with functools.lru_cache.
|
||||
|
||||
The said decorators patches at decoration time some functions onto
|
||||
the decorated function.
|
||||
"""
|
||||
|
||||
@property
|
||||
def py__wrapped__(self):
|
||||
return self._instance
|
||||
|
||||
@property
|
||||
def pycache_info(self):
|
||||
cache_info = extract_node('''
|
||||
from functools import _CacheInfo
|
||||
_CacheInfo(0, 0, 0, 0)
|
||||
''')
|
||||
class CacheInfoBoundMethod(BoundMethod):
|
||||
def infer_call_result(self, caller, context=None):
|
||||
yield helpers.safe_infer(cache_info)
|
||||
|
||||
return CacheInfoBoundMethod(proxy=self._instance, bound=self._instance)
|
||||
|
||||
@property
|
||||
def pycache_clear(self):
|
||||
node = extract_node('''def cache_clear(self): pass''')
|
||||
return BoundMethod(proxy=node, bound=self._instance.parent.scope())
|
||||
|
||||
|
||||
def _transform_lru_cache(node, context=None):
|
||||
# TODO: this is not ideal, since the node should be immutable,
|
||||
# but due to https://github.com/PyCQA/astroid/issues/354,
|
||||
# there's not much we can do now.
|
||||
# Replacing the node would work partially, because,
|
||||
# in pylint, the old node would still be available, leading
|
||||
# to spurious false positives.
|
||||
node.special_attributes = LruWrappedModel()(node)
|
||||
return
|
||||
|
||||
|
||||
def _looks_like_lru_cache(node):
|
||||
"""Check if the given function node is decorated with lru_cache."""
|
||||
if not node.decorators:
|
||||
return False
|
||||
|
||||
for decorator in node.decorators.nodes:
|
||||
if not isinstance(decorator, astroid.Call):
|
||||
continue
|
||||
|
||||
func = helpers.safe_infer(decorator.func)
|
||||
if func in (None, astroid.Uninferable):
|
||||
continue
|
||||
|
||||
if isinstance(func, astroid.FunctionDef) and func.qname() == LRU_CACHE:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
MANAGER.register_transform(astroid.FunctionDef, _transform_lru_cache,
|
||||
_looks_like_lru_cache)
|
201
thesisenv/lib/python3.6/site-packages/astroid/brain/brain_gi.py
Normal file
201
thesisenv/lib/python3.6/site-packages/astroid/brain/brain_gi.py
Normal file
@ -0,0 +1,201 @@
|
||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for the Python 2 GObject introspection bindings.
|
||||
|
||||
Helps with understanding everything imported from 'gi.repository'
|
||||
"""
|
||||
|
||||
import inspect
|
||||
import itertools
|
||||
import sys
|
||||
import re
|
||||
import warnings
|
||||
|
||||
from astroid import MANAGER, AstroidBuildingError, nodes
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
_inspected_modules = {}
|
||||
|
||||
_identifier_re = r'^[A-Za-z_]\w*$'
|
||||
|
||||
def _gi_build_stub(parent):
|
||||
"""
|
||||
Inspect the passed module recursively and build stubs for functions,
|
||||
classes, etc.
|
||||
"""
|
||||
classes = {}
|
||||
functions = {}
|
||||
constants = {}
|
||||
methods = {}
|
||||
for name in dir(parent):
|
||||
if name.startswith("__"):
|
||||
continue
|
||||
|
||||
# Check if this is a valid name in python
|
||||
if not re.match(_identifier_re, name):
|
||||
continue
|
||||
|
||||
try:
|
||||
obj = getattr(parent, name)
|
||||
except:
|
||||
continue
|
||||
|
||||
if inspect.isclass(obj):
|
||||
classes[name] = obj
|
||||
elif (inspect.isfunction(obj) or
|
||||
inspect.isbuiltin(obj)):
|
||||
functions[name] = obj
|
||||
elif (inspect.ismethod(obj) or
|
||||
inspect.ismethoddescriptor(obj)):
|
||||
methods[name] = obj
|
||||
elif (str(obj).startswith("<flags") or
|
||||
str(obj).startswith("<enum ") or
|
||||
str(obj).startswith("<GType ") or
|
||||
inspect.isdatadescriptor(obj)):
|
||||
constants[name] = 0
|
||||
elif isinstance(obj, (int, str)):
|
||||
constants[name] = obj
|
||||
elif callable(obj):
|
||||
# Fall back to a function for anything callable
|
||||
functions[name] = obj
|
||||
else:
|
||||
# Assume everything else is some manner of constant
|
||||
constants[name] = 0
|
||||
|
||||
ret = ""
|
||||
|
||||
if constants:
|
||||
ret += "# %s constants\n\n" % parent.__name__
|
||||
for name in sorted(constants):
|
||||
if name[0].isdigit():
|
||||
# GDK has some busted constant names like
|
||||
# Gdk.EventType.2BUTTON_PRESS
|
||||
continue
|
||||
|
||||
val = constants[name]
|
||||
|
||||
strval = str(val)
|
||||
if isinstance(val, str):
|
||||
strval = '"%s"' % str(val).replace("\\", "\\\\")
|
||||
ret += "%s = %s\n" % (name, strval)
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if functions:
|
||||
ret += "# %s functions\n\n" % parent.__name__
|
||||
for name in sorted(functions):
|
||||
ret += "def %s(*args, **kwargs):\n" % name
|
||||
ret += " pass\n"
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if methods:
|
||||
ret += "# %s methods\n\n" % parent.__name__
|
||||
for name in sorted(methods):
|
||||
ret += "def %s(self, *args, **kwargs):\n" % name
|
||||
ret += " pass\n"
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if classes:
|
||||
ret += "# %s classes\n\n" % parent.__name__
|
||||
for name in sorted(classes):
|
||||
ret += "class %s(object):\n" % name
|
||||
|
||||
classret = _gi_build_stub(classes[name])
|
||||
if not classret:
|
||||
classret = "pass\n"
|
||||
|
||||
for line in classret.splitlines():
|
||||
ret += " " + line + "\n"
|
||||
ret += "\n"
|
||||
|
||||
return ret
|
||||
|
||||
def _import_gi_module(modname):
|
||||
# we only consider gi.repository submodules
|
||||
if not modname.startswith('gi.repository.'):
|
||||
raise AstroidBuildingError(modname=modname)
|
||||
# build astroid representation unless we already tried so
|
||||
if modname not in _inspected_modules:
|
||||
modnames = [modname]
|
||||
optional_modnames = []
|
||||
|
||||
# GLib and GObject may have some special case handling
|
||||
# in pygobject that we need to cope with. However at
|
||||
# least as of pygobject3-3.13.91 the _glib module doesn't
|
||||
# exist anymore, so if treat these modules as optional.
|
||||
if modname == 'gi.repository.GLib':
|
||||
optional_modnames.append('gi._glib')
|
||||
elif modname == 'gi.repository.GObject':
|
||||
optional_modnames.append('gi._gobject')
|
||||
|
||||
try:
|
||||
modcode = ''
|
||||
for m in itertools.chain(modnames, optional_modnames):
|
||||
try:
|
||||
with warnings.catch_warnings():
|
||||
# Just inspecting the code can raise gi deprecation
|
||||
# warnings, so ignore them.
|
||||
try:
|
||||
from gi import PyGIDeprecationWarning, PyGIWarning
|
||||
warnings.simplefilter("ignore", PyGIDeprecationWarning)
|
||||
warnings.simplefilter("ignore", PyGIWarning)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
__import__(m)
|
||||
modcode += _gi_build_stub(sys.modules[m])
|
||||
except ImportError:
|
||||
if m not in optional_modnames:
|
||||
raise
|
||||
except ImportError:
|
||||
astng = _inspected_modules[modname] = None
|
||||
else:
|
||||
astng = AstroidBuilder(MANAGER).string_build(modcode, modname)
|
||||
_inspected_modules[modname] = astng
|
||||
else:
|
||||
astng = _inspected_modules[modname]
|
||||
if astng is None:
|
||||
raise AstroidBuildingError(modname=modname)
|
||||
return astng
|
||||
|
||||
def _looks_like_require_version(node):
|
||||
# Return whether this looks like a call to gi.require_version(<name>, <version>)
|
||||
# Only accept function calls with two constant arguments
|
||||
if len(node.args) != 2:
|
||||
return False
|
||||
|
||||
if not all(isinstance(arg, nodes.Const) for arg in node.args):
|
||||
return False
|
||||
|
||||
func = node.func
|
||||
if isinstance(func, nodes.Attribute):
|
||||
if func.attrname != 'require_version':
|
||||
return False
|
||||
if isinstance(func.expr, nodes.Name) and func.expr.name == 'gi':
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
if isinstance(func, nodes.Name):
|
||||
return func.name == 'require_version'
|
||||
|
||||
return False
|
||||
|
||||
def _register_require_version(node):
|
||||
# Load the gi.require_version locally
|
||||
try:
|
||||
import gi
|
||||
gi.require_version(node.args[0].value, node.args[1].value)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return node
|
||||
|
||||
MANAGER.register_failed_import_hook(_import_gi_module)
|
||||
MANAGER.register_transform(nodes.Call, _register_require_version, _looks_like_require_version)
|
@ -0,0 +1,40 @@
|
||||
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
import six
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def _hashlib_transform():
|
||||
template = '''
|
||||
class %(name)s(object):
|
||||
def __init__(self, value=''): pass
|
||||
def digest(self):
|
||||
return %(digest)s
|
||||
def copy(self):
|
||||
return self
|
||||
def update(self, value): pass
|
||||
def hexdigest(self):
|
||||
return ''
|
||||
@property
|
||||
def name(self):
|
||||
return %(name)r
|
||||
@property
|
||||
def block_size(self):
|
||||
return 1
|
||||
@property
|
||||
def digest_size(self):
|
||||
return 1
|
||||
'''
|
||||
algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
|
||||
classes = "".join(
|
||||
template % {'name': hashfunc, 'digest': 'b""' if six.PY3 else '""'}
|
||||
for hashfunc in algorithms)
|
||||
return astroid.parse(classes)
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, 'hashlib', _hashlib_transform)
|
||||
|
@ -0,0 +1,43 @@
|
||||
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
'''Astroid brain hints for some of the _io C objects.'''
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
BUFFERED = {'BufferedWriter', 'BufferedReader'}
|
||||
TextIOWrapper = 'TextIOWrapper'
|
||||
FileIO = 'FileIO'
|
||||
BufferedWriter = 'BufferedWriter'
|
||||
|
||||
|
||||
def _generic_io_transform(node, name, cls):
|
||||
'''Transform the given name, by adding the given *class* as a member of the node.'''
|
||||
|
||||
io_module = astroid.MANAGER.ast_from_module_name('_io')
|
||||
attribute_object = io_module[cls]
|
||||
instance = attribute_object.instantiate_class()
|
||||
node.locals[name] = [instance]
|
||||
|
||||
|
||||
def _transform_text_io_wrapper(node):
|
||||
# This is not always correct, since it can vary with the type of the descriptor,
|
||||
# being stdout, stderr or stdin. But we cannot get access to the name of the
|
||||
# stream, which is why we are using the BufferedWriter class as a default
|
||||
# value
|
||||
return _generic_io_transform(node, name='buffer', cls=BufferedWriter)
|
||||
|
||||
|
||||
def _transform_buffered(node):
|
||||
return _generic_io_transform(node, name='raw', cls=FileIO)
|
||||
|
||||
|
||||
astroid.MANAGER.register_transform(astroid.ClassDef,
|
||||
_transform_buffered,
|
||||
lambda node: node.name in BUFFERED)
|
||||
astroid.MANAGER.register_transform(astroid.ClassDef,
|
||||
_transform_text_io_wrapper,
|
||||
lambda node: node.name == TextIOWrapper)
|
@ -0,0 +1,23 @@
|
||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
def mechanize_transform():
|
||||
return AstroidBuilder(MANAGER).string_build('''
|
||||
|
||||
class Browser(object):
|
||||
def open(self, url, data=None, timeout=None):
|
||||
return None
|
||||
def open_novisit(self, url, data=None, timeout=None):
|
||||
return None
|
||||
def open_local_file(self, filename):
|
||||
return None
|
||||
|
||||
''')
|
||||
|
||||
|
||||
register_module_extender(MANAGER, 'mechanize', mechanize_transform)
|
@ -0,0 +1,104 @@
|
||||
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
import sys
|
||||
|
||||
import astroid
|
||||
from astroid import exceptions
|
||||
|
||||
|
||||
PY34 = sys.version_info >= (3, 4)
|
||||
|
||||
|
||||
def _multiprocessing_transform():
|
||||
module = astroid.parse('''
|
||||
from multiprocessing.managers import SyncManager
|
||||
def Manager():
|
||||
return SyncManager()
|
||||
''')
|
||||
if not PY34:
|
||||
return module
|
||||
|
||||
# On Python 3.4, multiprocessing uses a getattr lookup inside contexts,
|
||||
# in order to get the attributes they need. Since it's extremely
|
||||
# dynamic, we use this approach to fake it.
|
||||
node = astroid.parse('''
|
||||
from multiprocessing.context import DefaultContext, BaseContext
|
||||
default = DefaultContext()
|
||||
base = BaseContext()
|
||||
''')
|
||||
try:
|
||||
context = next(node['default'].infer())
|
||||
base = next(node['base'].infer())
|
||||
except exceptions.InferenceError:
|
||||
return module
|
||||
|
||||
for node in (context, base):
|
||||
for key, value in node.locals.items():
|
||||
if key.startswith("_"):
|
||||
continue
|
||||
|
||||
value = value[0]
|
||||
if isinstance(value, astroid.FunctionDef):
|
||||
# We need to rebound this, since otherwise
|
||||
# it will have an extra argument (self).
|
||||
value = astroid.BoundMethod(value, node)
|
||||
module[key] = value
|
||||
return module
|
||||
|
||||
|
||||
def _multiprocessing_managers_transform():
|
||||
return astroid.parse('''
|
||||
import array
|
||||
import threading
|
||||
import multiprocessing.pool as pool
|
||||
|
||||
import six
|
||||
|
||||
class Namespace(object):
|
||||
pass
|
||||
|
||||
class Value(object):
|
||||
def __init__(self, typecode, value, lock=True):
|
||||
self._typecode = typecode
|
||||
self._value = value
|
||||
def get(self):
|
||||
return self._value
|
||||
def set(self, value):
|
||||
self._value = value
|
||||
def __repr__(self):
|
||||
return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value)
|
||||
value = property(get, set)
|
||||
|
||||
def Array(typecode, sequence, lock=True):
|
||||
return array.array(typecode, sequence)
|
||||
|
||||
class SyncManager(object):
|
||||
Queue = JoinableQueue = six.moves.queue.Queue
|
||||
Event = threading.Event
|
||||
RLock = threading.RLock
|
||||
BoundedSemaphore = threading.BoundedSemaphore
|
||||
Condition = threading.Condition
|
||||
Barrier = threading.Barrier
|
||||
Pool = pool.Pool
|
||||
list = list
|
||||
dict = dict
|
||||
Value = Value
|
||||
Array = Array
|
||||
Namespace = Namespace
|
||||
__enter__ = lambda self: self
|
||||
__exit__ = lambda *args: args
|
||||
|
||||
def start(self, initializer=None, initargs=None):
|
||||
pass
|
||||
def shutdown(self):
|
||||
pass
|
||||
''')
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, 'multiprocessing.managers',
|
||||
_multiprocessing_managers_transform)
|
||||
astroid.register_module_extender(astroid.MANAGER, 'multiprocessing',
|
||||
_multiprocessing_transform)
|
@ -0,0 +1,285 @@
|
||||
# Copyright (c) 2012-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for the Python standard library."""
|
||||
|
||||
import functools
|
||||
import sys
|
||||
import keyword
|
||||
from textwrap import dedent
|
||||
|
||||
from astroid import (
|
||||
MANAGER, UseInferenceDefault, inference_tip,
|
||||
InferenceError)
|
||||
from astroid import arguments
|
||||
from astroid import exceptions
|
||||
from astroid import nodes
|
||||
from astroid.builder import AstroidBuilder, extract_node
|
||||
from astroid import util
|
||||
|
||||
|
||||
def _infer_first(node, context):
|
||||
if node is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
try:
|
||||
value = next(node.infer(context=context))
|
||||
if value is util.Uninferable:
|
||||
raise UseInferenceDefault()
|
||||
else:
|
||||
return value
|
||||
except StopIteration:
|
||||
raise InferenceError()
|
||||
|
||||
|
||||
def _find_func_form_arguments(node, context):
|
||||
|
||||
def _extract_namedtuple_arg_or_keyword(position, key_name=None):
|
||||
|
||||
if len(args) > position:
|
||||
return _infer_first(args[position], context)
|
||||
if key_name and key_name in found_keywords:
|
||||
return _infer_first(found_keywords[key_name], context)
|
||||
|
||||
args = node.args
|
||||
keywords = node.keywords
|
||||
found_keywords = {
|
||||
keyword.arg: keyword.value for keyword in keywords
|
||||
} if keywords else {}
|
||||
|
||||
name = _extract_namedtuple_arg_or_keyword(
|
||||
position=0,
|
||||
key_name='typename'
|
||||
)
|
||||
names = _extract_namedtuple_arg_or_keyword(
|
||||
position=1,
|
||||
key_name='field_names'
|
||||
)
|
||||
if name and names:
|
||||
return name.value, names
|
||||
|
||||
raise UseInferenceDefault()
|
||||
|
||||
|
||||
def infer_func_form(node, base_type, context=None, enum=False):
|
||||
"""Specific inference function for namedtuple or Python 3 enum. """
|
||||
# node is a Call node, class name as first argument and generated class
|
||||
# attributes as second argument
|
||||
|
||||
# namedtuple or enums list of attributes can be a list of strings or a
|
||||
# whitespace-separate string
|
||||
try:
|
||||
name, names = _find_func_form_arguments(node, context)
|
||||
try:
|
||||
attributes = names.value.replace(',', ' ').split()
|
||||
except AttributeError:
|
||||
if not enum:
|
||||
attributes = [_infer_first(const, context).value
|
||||
for const in names.elts]
|
||||
else:
|
||||
# Enums supports either iterator of (name, value) pairs
|
||||
# or mappings.
|
||||
# TODO: support only list, tuples and mappings.
|
||||
if hasattr(names, 'items') and isinstance(names.items, list):
|
||||
attributes = [_infer_first(const[0], context).value
|
||||
for const in names.items
|
||||
if isinstance(const[0], nodes.Const)]
|
||||
elif hasattr(names, 'elts'):
|
||||
# Enums can support either ["a", "b", "c"]
|
||||
# or [("a", 1), ("b", 2), ...], but they can't
|
||||
# be mixed.
|
||||
if all(isinstance(const, nodes.Tuple)
|
||||
for const in names.elts):
|
||||
attributes = [_infer_first(const.elts[0], context).value
|
||||
for const in names.elts
|
||||
if isinstance(const, nodes.Tuple)]
|
||||
else:
|
||||
attributes = [_infer_first(const, context).value
|
||||
for const in names.elts]
|
||||
else:
|
||||
raise AttributeError
|
||||
if not attributes:
|
||||
raise AttributeError
|
||||
except (AttributeError, exceptions.InferenceError):
|
||||
raise UseInferenceDefault()
|
||||
|
||||
# If we can't infer the name of the class, don't crash, up to this point
|
||||
# we know it is a namedtuple anyway.
|
||||
name = name or 'Uninferable'
|
||||
# we want to return a Class node instance with proper attributes set
|
||||
class_node = nodes.ClassDef(name, 'docstring')
|
||||
class_node.parent = node.parent
|
||||
# set base class=tuple
|
||||
class_node.bases.append(base_type)
|
||||
# XXX add __init__(*attributes) method
|
||||
for attr in attributes:
|
||||
fake_node = nodes.EmptyNode()
|
||||
fake_node.parent = class_node
|
||||
fake_node.attrname = attr
|
||||
class_node.instance_attrs[attr] = [fake_node]
|
||||
return class_node, name, attributes
|
||||
|
||||
|
||||
def _looks_like(node, name):
|
||||
func = node.func
|
||||
if isinstance(func, nodes.Attribute):
|
||||
return func.attrname == name
|
||||
if isinstance(func, nodes.Name):
|
||||
return func.name == name
|
||||
return False
|
||||
|
||||
_looks_like_namedtuple = functools.partial(_looks_like, name='namedtuple')
|
||||
_looks_like_enum = functools.partial(_looks_like, name='Enum')
|
||||
|
||||
|
||||
def infer_named_tuple(node, context=None):
|
||||
"""Specific inference function for namedtuple Call node"""
|
||||
class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied,
|
||||
context=context)
|
||||
call_site = arguments.CallSite.from_call(node)
|
||||
func = next(extract_node('import collections; collections.namedtuple').infer())
|
||||
try:
|
||||
rename = next(call_site.infer_argument(func, 'rename', context)).bool_value()
|
||||
except InferenceError:
|
||||
rename = False
|
||||
|
||||
if rename:
|
||||
attributes = _get_renamed_namedtuple_atributes(attributes)
|
||||
|
||||
replace_args = ', '.join(
|
||||
'{arg}=None'.format(arg=arg)
|
||||
for arg in attributes
|
||||
)
|
||||
|
||||
field_def = (" {name} = property(lambda self: self[{index:d}], "
|
||||
"doc='Alias for field number {index:d}')")
|
||||
field_defs = '\n'.join(field_def.format(name=name, index=index)
|
||||
for index, name in enumerate(attributes))
|
||||
fake = AstroidBuilder(MANAGER).string_build('''
|
||||
class %(name)s(tuple):
|
||||
__slots__ = ()
|
||||
_fields = %(fields)r
|
||||
def _asdict(self):
|
||||
return self.__dict__
|
||||
@classmethod
|
||||
def _make(cls, iterable, new=tuple.__new__, len=len):
|
||||
return new(cls, iterable)
|
||||
def _replace(self, %(replace_args)s):
|
||||
return self
|
||||
def __getnewargs__(self):
|
||||
return tuple(self)
|
||||
%(field_defs)s
|
||||
''' % {'name': name,
|
||||
'fields': attributes,
|
||||
'field_defs': field_defs,
|
||||
'replace_args': replace_args})
|
||||
class_node.locals['_asdict'] = fake.body[0].locals['_asdict']
|
||||
class_node.locals['_make'] = fake.body[0].locals['_make']
|
||||
class_node.locals['_replace'] = fake.body[0].locals['_replace']
|
||||
class_node.locals['_fields'] = fake.body[0].locals['_fields']
|
||||
for attr in attributes:
|
||||
class_node.locals[attr] = fake.body[0].locals[attr]
|
||||
# we use UseInferenceDefault, we can't be a generator so return an iterator
|
||||
return iter([class_node])
|
||||
|
||||
|
||||
def _get_renamed_namedtuple_atributes(field_names):
|
||||
names = list(field_names)
|
||||
seen = set()
|
||||
for i, name in enumerate(field_names):
|
||||
if (not all(c.isalnum() or c == '_' for c in name) or keyword.iskeyword(name)
|
||||
or not name or name[0].isdigit() or name.startswith('_') or name in seen):
|
||||
names[i] = '_%d' % i
|
||||
seen.add(name)
|
||||
return tuple(names)
|
||||
|
||||
|
||||
def infer_enum(node, context=None):
|
||||
""" Specific inference function for enum Call node. """
|
||||
enum_meta = extract_node('''
|
||||
class EnumMeta(object):
|
||||
'docstring'
|
||||
def __call__(self, node):
|
||||
class EnumAttribute(object):
|
||||
name = ''
|
||||
value = 0
|
||||
return EnumAttribute()
|
||||
def __iter__(self):
|
||||
class EnumAttribute(object):
|
||||
name = ''
|
||||
value = 0
|
||||
return [EnumAttribute()]
|
||||
def __next__(self):
|
||||
return next(iter(self))
|
||||
def __getitem__(self, attr):
|
||||
class Value(object):
|
||||
@property
|
||||
def name(self):
|
||||
return ''
|
||||
@property
|
||||
def value(self):
|
||||
return attr
|
||||
|
||||
return Value()
|
||||
__members__ = ['']
|
||||
''')
|
||||
class_node = infer_func_form(node, enum_meta,
|
||||
context=context, enum=True)[0]
|
||||
return iter([class_node.instantiate_class()])
|
||||
|
||||
|
||||
def infer_enum_class(node):
|
||||
""" Specific inference for enums. """
|
||||
names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum'))
|
||||
for basename in node.basenames:
|
||||
# TODO: doesn't handle subclasses yet. This implementation
|
||||
# is a hack to support enums.
|
||||
if basename not in names:
|
||||
continue
|
||||
if node.root().name == 'enum':
|
||||
# Skip if the class is directly from enum module.
|
||||
break
|
||||
for local, values in node.locals.items():
|
||||
if any(not isinstance(value, nodes.AssignName)
|
||||
for value in values):
|
||||
continue
|
||||
|
||||
stmt = values[0].statement()
|
||||
if isinstance(stmt, nodes.Assign):
|
||||
if isinstance(stmt.targets[0], nodes.Tuple):
|
||||
targets = stmt.targets[0].itered()
|
||||
else:
|
||||
targets = stmt.targets
|
||||
elif isinstance(stmt, nodes.AnnAssign):
|
||||
targets = [stmt.target]
|
||||
|
||||
new_targets = []
|
||||
for target in targets:
|
||||
# Replace all the assignments with our mocked class.
|
||||
classdef = dedent('''
|
||||
class %(name)s(%(types)s):
|
||||
@property
|
||||
def value(self):
|
||||
# Not the best return.
|
||||
return None
|
||||
@property
|
||||
def name(self):
|
||||
return %(name)r
|
||||
''' % {'name': target.name, 'types': ', '.join(node.basenames)})
|
||||
fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name]
|
||||
fake.parent = target.parent
|
||||
for method in node.mymethods():
|
||||
fake.locals[method.name] = [method]
|
||||
new_targets.append(fake.instantiate_class())
|
||||
node.locals[local] = new_targets
|
||||
break
|
||||
return node
|
||||
|
||||
|
||||
MANAGER.register_transform(nodes.Call, inference_tip(infer_named_tuple),
|
||||
_looks_like_namedtuple)
|
||||
MANAGER.register_transform(nodes.Call, inference_tip(infer_enum),
|
||||
_looks_like_enum)
|
||||
MANAGER.register_transform(nodes.ClassDef, infer_enum_class)
|
@ -0,0 +1,70 @@
|
||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Hooks for nose library."""
|
||||
|
||||
import re
|
||||
import textwrap
|
||||
|
||||
import astroid
|
||||
import astroid.builder
|
||||
|
||||
_BUILDER = astroid.builder.AstroidBuilder(astroid.MANAGER)
|
||||
|
||||
|
||||
def _pep8(name, caps=re.compile('([A-Z])')):
|
||||
return caps.sub(lambda m: '_' + m.groups()[0].lower(), name)
|
||||
|
||||
|
||||
def _nose_tools_functions():
|
||||
"""Get an iterator of names and bound methods."""
|
||||
module = _BUILDER.string_build(textwrap.dedent('''
|
||||
import unittest
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
pass
|
||||
a = Test()
|
||||
'''))
|
||||
try:
|
||||
case = next(module['a'].infer())
|
||||
except astroid.InferenceError:
|
||||
return
|
||||
for method in case.methods():
|
||||
if method.name.startswith('assert') and '_' not in method.name:
|
||||
pep8_name = _pep8(method.name)
|
||||
yield pep8_name, astroid.BoundMethod(method, case)
|
||||
if method.name == 'assertEqual':
|
||||
# nose also exports assert_equals.
|
||||
yield 'assert_equals', astroid.BoundMethod(method, case)
|
||||
|
||||
|
||||
def _nose_tools_transform(node):
|
||||
for method_name, method in _nose_tools_functions():
|
||||
node.locals[method_name] = [method]
|
||||
|
||||
|
||||
def _nose_tools_trivial_transform():
|
||||
"""Custom transform for the nose.tools module."""
|
||||
stub = _BUILDER.string_build('''__all__ = []''')
|
||||
all_entries = ['ok_', 'eq_']
|
||||
|
||||
for pep8_name, method in _nose_tools_functions():
|
||||
all_entries.append(pep8_name)
|
||||
stub[pep8_name] = method
|
||||
|
||||
# Update the __all__ variable, since nose.tools
|
||||
# does this manually with .append.
|
||||
all_assign = stub['__all__'].parent
|
||||
all_object = astroid.List(all_entries)
|
||||
all_object.parent = all_assign
|
||||
all_assign.value = all_object
|
||||
return stub
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, 'nose.tools.trivial',
|
||||
_nose_tools_trivial_transform)
|
||||
astroid.MANAGER.register_transform(astroid.Module, _nose_tools_transform,
|
||||
lambda n: n.name == 'nose.tools')
|
@ -0,0 +1,193 @@
|
||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for numpy."""
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def numpy_random_mtrand_transform():
|
||||
return astroid.parse('''
|
||||
def beta(a, b, size=None): pass
|
||||
def binomial(n, p, size=None): pass
|
||||
def bytes(length): pass
|
||||
def chisquare(df, size=None): pass
|
||||
def choice(a, size=None, replace=True, p=None): pass
|
||||
def dirichlet(alpha, size=None): pass
|
||||
def exponential(scale=1.0, size=None): pass
|
||||
def f(dfnum, dfden, size=None): pass
|
||||
def gamma(shape, scale=1.0, size=None): pass
|
||||
def geometric(p, size=None): pass
|
||||
def get_state(): pass
|
||||
def gumbel(loc=0.0, scale=1.0, size=None): pass
|
||||
def hypergeometric(ngood, nbad, nsample, size=None): pass
|
||||
def laplace(loc=0.0, scale=1.0, size=None): pass
|
||||
def logistic(loc=0.0, scale=1.0, size=None): pass
|
||||
def lognormal(mean=0.0, sigma=1.0, size=None): pass
|
||||
def logseries(p, size=None): pass
|
||||
def multinomial(n, pvals, size=None): pass
|
||||
def multivariate_normal(mean, cov, size=None): pass
|
||||
def negative_binomial(n, p, size=None): pass
|
||||
def noncentral_chisquare(df, nonc, size=None): pass
|
||||
def noncentral_f(dfnum, dfden, nonc, size=None): pass
|
||||
def normal(loc=0.0, scale=1.0, size=None): pass
|
||||
def pareto(a, size=None): pass
|
||||
def permutation(x): pass
|
||||
def poisson(lam=1.0, size=None): pass
|
||||
def power(a, size=None): pass
|
||||
def rand(*args): pass
|
||||
def randint(low, high=None, size=None, dtype='l'): pass
|
||||
def randn(*args): pass
|
||||
def random_integers(low, high=None, size=None): pass
|
||||
def random_sample(size=None): pass
|
||||
def rayleigh(scale=1.0, size=None): pass
|
||||
def seed(seed=None): pass
|
||||
def set_state(state): pass
|
||||
def shuffle(x): pass
|
||||
def standard_cauchy(size=None): pass
|
||||
def standard_exponential(size=None): pass
|
||||
def standard_gamma(shape, size=None): pass
|
||||
def standard_normal(size=None): pass
|
||||
def standard_t(df, size=None): pass
|
||||
def triangular(left, mode, right, size=None): pass
|
||||
def uniform(low=0.0, high=1.0, size=None): pass
|
||||
def vonmises(mu, kappa, size=None): pass
|
||||
def wald(mean, scale, size=None): pass
|
||||
def weibull(a, size=None): pass
|
||||
def zipf(a, size=None): pass
|
||||
''')
|
||||
|
||||
|
||||
def numpy_core_umath_transform():
|
||||
ufunc_optional_keyword_arguments = ("""out=None, where=True, casting='same_kind', order='K', """
|
||||
"""dtype=None, subok=True""")
|
||||
return astroid.parse('''
|
||||
# Constants
|
||||
e = 2.718281828459045
|
||||
euler_gamma = 0.5772156649015329
|
||||
|
||||
# No arg functions
|
||||
def geterrobj(): pass
|
||||
|
||||
# One arg functions
|
||||
def seterrobj(errobj): pass
|
||||
|
||||
# One arg functions with optional kwargs
|
||||
def arccos(x, {opt_args:s}): pass
|
||||
def arccosh(x, {opt_args:s}): pass
|
||||
def arcsin(x, {opt_args:s}): pass
|
||||
def arcsinh(x, {opt_args:s}): pass
|
||||
def arctan(x, {opt_args:s}): pass
|
||||
def arctanh(x, {opt_args:s}): pass
|
||||
def cbrt(x, {opt_args:s}): pass
|
||||
def conj(x, {opt_args:s}): pass
|
||||
def conjugate(x, {opt_args:s}): pass
|
||||
def cosh(x, {opt_args:s}): pass
|
||||
def deg2rad(x, {opt_args:s}): pass
|
||||
def degrees(x, {opt_args:s}): pass
|
||||
def exp2(x, {opt_args:s}): pass
|
||||
def expm1(x, {opt_args:s}): pass
|
||||
def fabs(x, {opt_args:s}): pass
|
||||
def frexp(x, {opt_args:s}): pass
|
||||
def isfinite(x, {opt_args:s}): pass
|
||||
def isinf(x, {opt_args:s}): pass
|
||||
def log(x, {opt_args:s}): pass
|
||||
def log1p(x, {opt_args:s}): pass
|
||||
def log2(x, {opt_args:s}): pass
|
||||
def logical_not(x, {opt_args:s}): pass
|
||||
def modf(x, {opt_args:s}): pass
|
||||
def negative(x, {opt_args:s}): pass
|
||||
def rad2deg(x, {opt_args:s}): pass
|
||||
def radians(x, {opt_args:s}): pass
|
||||
def reciprocal(x, {opt_args:s}): pass
|
||||
def rint(x, {opt_args:s}): pass
|
||||
def sign(x, {opt_args:s}): pass
|
||||
def signbit(x, {opt_args:s}): pass
|
||||
def sinh(x, {opt_args:s}): pass
|
||||
def spacing(x, {opt_args:s}): pass
|
||||
def square(x, {opt_args:s}): pass
|
||||
def tan(x, {opt_args:s}): pass
|
||||
def tanh(x, {opt_args:s}): pass
|
||||
def trunc(x, {opt_args:s}): pass
|
||||
|
||||
# Two args functions with optional kwargs
|
||||
def bitwise_and(x1, x2, {opt_args:s}): pass
|
||||
def bitwise_or(x1, x2, {opt_args:s}): pass
|
||||
def bitwise_xor(x1, x2, {opt_args:s}): pass
|
||||
def copysign(x1, x2, {opt_args:s}): pass
|
||||
def divide(x1, x2, {opt_args:s}): pass
|
||||
def equal(x1, x2, {opt_args:s}): pass
|
||||
def float_power(x1, x2, {opt_args:s}): pass
|
||||
def floor_divide(x1, x2, {opt_args:s}): pass
|
||||
def fmax(x1, x2, {opt_args:s}): pass
|
||||
def fmin(x1, x2, {opt_args:s}): pass
|
||||
def fmod(x1, x2, {opt_args:s}): pass
|
||||
def greater(x1, x2, {opt_args:s}): pass
|
||||
def hypot(x1, x2, {opt_args:s}): pass
|
||||
def ldexp(x1, x2, {opt_args:s}): pass
|
||||
def left_shift(x1, x2, {opt_args:s}): pass
|
||||
def less(x1, x2, {opt_args:s}): pass
|
||||
def logaddexp(x1, x2, {opt_args:s}): pass
|
||||
def logaddexp2(x1, x2, {opt_args:s}): pass
|
||||
def logical_and(x1, x2, {opt_args:s}): pass
|
||||
def logical_or(x1, x2, {opt_args:s}): pass
|
||||
def logical_xor(x1, x2, {opt_args:s}): pass
|
||||
def maximum(x1, x2, {opt_args:s}): pass
|
||||
def minimum(x1, x2, {opt_args:s}): pass
|
||||
def nextafter(x1, x2, {opt_args:s}): pass
|
||||
def not_equal(x1, x2, {opt_args:s}): pass
|
||||
def power(x1, x2, {opt_args:s}): pass
|
||||
def remainder(x1, x2, {opt_args:s}): pass
|
||||
def right_shift(x1, x2, {opt_args:s}): pass
|
||||
def subtract(x1, x2, {opt_args:s}): pass
|
||||
def true_divide(x1, x2, {opt_args:s}): pass
|
||||
'''.format(opt_args=ufunc_optional_keyword_arguments))
|
||||
|
||||
|
||||
def numpy_core_numerictypes_transform():
|
||||
return astroid.parse('''
|
||||
# different types defined in numerictypes.py
|
||||
uint16 = type('uint16')
|
||||
uint32 = type('uint32')
|
||||
uint64 = type('uint64')
|
||||
int128 = type('int128')
|
||||
uint128 = type('uint128')
|
||||
float16 = type('float16')
|
||||
float32 = type('float32')
|
||||
float64 = type('float64')
|
||||
float80 = type('float80')
|
||||
float96 = type('float96')
|
||||
float128 = type('float128')
|
||||
float256 = type('float256')
|
||||
complex32 = type('complex32')
|
||||
complex64 = type('complex64')
|
||||
complex128 = type('complex128')
|
||||
complex160 = type('complex160')
|
||||
complex192 = type('complex192')
|
||||
complex256 = type('complex256')
|
||||
complex512 = type('complex512')
|
||||
timedelta64 = type('timedelta64')
|
||||
datetime64 = type('datetime64')
|
||||
unicode_ = type('unicode_')
|
||||
string_ = type('string_')
|
||||
object_ = type('object_')
|
||||
''')
|
||||
|
||||
|
||||
def numpy_funcs():
|
||||
return astroid.parse('''
|
||||
import builtins
|
||||
def sum(a, axis=None, dtype=None, out=None, keepdims=None):
|
||||
return builtins.sum(a)
|
||||
''')
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, 'numpy.core.umath', numpy_core_umath_transform)
|
||||
astroid.register_module_extender(astroid.MANAGER, 'numpy.random.mtrand',
|
||||
numpy_random_mtrand_transform)
|
||||
astroid.register_module_extender(astroid.MANAGER, 'numpy.core.numerictypes',
|
||||
numpy_core_numerictypes_transform)
|
||||
astroid.register_module_extender(astroid.MANAGER, 'numpy', numpy_funcs)
|
@ -0,0 +1,70 @@
|
||||
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
import astroid
|
||||
from astroid import parse
|
||||
from astroid import inference_tip
|
||||
from astroid import register_module_extender
|
||||
from astroid import MANAGER
|
||||
|
||||
|
||||
def pkg_resources_transform():
|
||||
return parse('''
|
||||
def require(*requirements):
|
||||
return pkg_resources.working_set.require(*requirements)
|
||||
|
||||
def run_script(requires, script_name):
|
||||
return pkg_resources.working_set.run_script(requires, script_name)
|
||||
|
||||
def iter_entry_points(group, name=None):
|
||||
return pkg_resources.working_set.iter_entry_points(group, name)
|
||||
|
||||
def resource_exists(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).has_resource(resource_name)
|
||||
|
||||
def resource_isdir(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).resource_isdir(
|
||||
resource_name)
|
||||
|
||||
def resource_filename(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).get_resource_filename(
|
||||
self, resource_name)
|
||||
|
||||
def resource_stream(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).get_resource_stream(
|
||||
self, resource_name)
|
||||
|
||||
def resource_string(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).get_resource_string(
|
||||
self, resource_name)
|
||||
|
||||
def resource_listdir(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).resource_listdir(
|
||||
resource_name)
|
||||
|
||||
def extraction_error():
|
||||
pass
|
||||
|
||||
def get_cache_path(archive_name, names=()):
|
||||
extract_path = self.extraction_path or get_default_cache()
|
||||
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
|
||||
return target_path
|
||||
|
||||
def postprocess(tempname, filename):
|
||||
pass
|
||||
|
||||
def set_extraction_path(path):
|
||||
pass
|
||||
|
||||
def cleanup_resources(force=False):
|
||||
pass
|
||||
|
||||
def get_distribution(dist):
|
||||
return Distribution(dist)
|
||||
|
||||
''')
|
||||
|
||||
register_module_extender(MANAGER, 'pkg_resources', pkg_resources_transform)
|
@ -0,0 +1,82 @@
|
||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Cara Vinson <ceridwenv@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for pytest."""
|
||||
from __future__ import absolute_import
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
|
||||
def pytest_transform():
|
||||
return AstroidBuilder(MANAGER).string_build('''
|
||||
|
||||
try:
|
||||
import _pytest.mark
|
||||
import _pytest.recwarn
|
||||
import _pytest.runner
|
||||
import _pytest.python
|
||||
import _pytest.skipping
|
||||
import _pytest.assertion
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
deprecated_call = _pytest.recwarn.deprecated_call
|
||||
warns = _pytest.recwarn.warns
|
||||
|
||||
exit = _pytest.runner.exit
|
||||
fail = _pytest.runner.fail
|
||||
skip = _pytest.runner.skip
|
||||
importorskip = _pytest.runner.importorskip
|
||||
|
||||
xfail = _pytest.skipping.xfail
|
||||
mark = _pytest.mark.MarkGenerator()
|
||||
raises = _pytest.python.raises
|
||||
|
||||
# New in pytest 3.0
|
||||
try:
|
||||
approx = _pytest.python.approx
|
||||
register_assert_rewrite = _pytest.assertion.register_assert_rewrite
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
# Moved in pytest 3.0
|
||||
|
||||
try:
|
||||
import _pytest.freeze_support
|
||||
freeze_includes = _pytest.freeze_support.freeze_includes
|
||||
except ImportError:
|
||||
try:
|
||||
import _pytest.genscript
|
||||
freeze_includes = _pytest.genscript.freeze_includes
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import _pytest.debugging
|
||||
set_trace = _pytest.debugging.pytestPDB().set_trace
|
||||
except ImportError:
|
||||
try:
|
||||
import _pytest.pdb
|
||||
set_trace = _pytest.pdb.pytestPDB().set_trace
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import _pytest.fixtures
|
||||
fixture = _pytest.fixtures.fixture
|
||||
yield_fixture = _pytest.fixtures.yield_fixture
|
||||
except ImportError:
|
||||
try:
|
||||
import _pytest.python
|
||||
fixture = _pytest.python.fixture
|
||||
yield_fixture = _pytest.python.yield_fixture
|
||||
except ImportError:
|
||||
pass
|
||||
''')
|
||||
|
||||
register_module_extender(MANAGER, 'pytest', pytest_transform)
|
||||
register_module_extender(MANAGER, 'py.test', pytest_transform)
|
@ -0,0 +1,53 @@
|
||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for the PyQT library."""
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid import nodes
|
||||
from astroid import parse
|
||||
|
||||
|
||||
def _looks_like_signal(node, signal_name='pyqtSignal'):
|
||||
if '__class__' in node.instance_attrs:
|
||||
try:
|
||||
cls = node.instance_attrs['__class__'][0]
|
||||
return cls.name == signal_name
|
||||
except AttributeError:
|
||||
# return False if the cls does not have a name attribute
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def transform_pyqt_signal(node):
|
||||
module = parse('''
|
||||
class pyqtSignal(object):
|
||||
def connect(self, slot, type=None, no_receiver_check=False):
|
||||
pass
|
||||
def disconnect(self, slot):
|
||||
pass
|
||||
def emit(self, *args):
|
||||
pass
|
||||
''')
|
||||
signal_cls = module['pyqtSignal']
|
||||
node.instance_attrs['emit'] = signal_cls['emit']
|
||||
node.instance_attrs['disconnect'] = signal_cls['disconnect']
|
||||
node.instance_attrs['connect'] = signal_cls['connect']
|
||||
|
||||
|
||||
def pyqt4_qtcore_transform():
|
||||
return AstroidBuilder(MANAGER).string_build('''
|
||||
|
||||
def SIGNAL(signal_name): pass
|
||||
|
||||
class QObject(object):
|
||||
def emit(self, signal): pass
|
||||
''')
|
||||
|
||||
|
||||
register_module_extender(MANAGER, 'PyQt4.QtCore', pyqt4_qtcore_transform)
|
||||
MANAGER.register_transform(nodes.FunctionDef, transform_pyqt_signal,
|
||||
_looks_like_signal)
|
@ -0,0 +1,98 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import random
|
||||
|
||||
import astroid
|
||||
from astroid import helpers
|
||||
from astroid import MANAGER
|
||||
|
||||
|
||||
ACCEPTED_ITERABLES_FOR_SAMPLE = (
|
||||
astroid.List,
|
||||
astroid.Set,
|
||||
astroid.Tuple,
|
||||
)
|
||||
|
||||
|
||||
def _clone_node_with_lineno(node, parent, lineno):
|
||||
cls = node.__class__
|
||||
other_fields = node._other_fields
|
||||
_astroid_fields = node._astroid_fields
|
||||
init_params = {
|
||||
'lineno': lineno,
|
||||
'col_offset': node.col_offset,
|
||||
'parent': parent
|
||||
}
|
||||
postinit_params = {
|
||||
param: getattr(node, param)
|
||||
for param in _astroid_fields
|
||||
}
|
||||
if other_fields:
|
||||
init_params.update({
|
||||
param: getattr(node, param)
|
||||
for param in other_fields
|
||||
})
|
||||
new_node = cls(**init_params)
|
||||
if hasattr(node, 'postinit') and _astroid_fields:
|
||||
new_node.postinit(**postinit_params)
|
||||
return new_node
|
||||
|
||||
|
||||
def infer_random_sample(node, context=None):
|
||||
if len(node.args) != 2:
|
||||
raise astroid.UseInferenceDefault
|
||||
|
||||
length = node.args[1]
|
||||
if not isinstance(length, astroid.Const):
|
||||
raise astroid.UseInferenceDefault
|
||||
if not isinstance(length.value, int):
|
||||
raise astroid.UseInferenceDefault
|
||||
|
||||
inferred_sequence = helpers.safe_infer(node.args[0], context=context)
|
||||
if inferred_sequence in (None, astroid.Uninferable):
|
||||
raise astroid.UseInferenceDefault
|
||||
|
||||
# TODO: might need to support more cases
|
||||
if not isinstance(inferred_sequence, ACCEPTED_ITERABLES_FOR_SAMPLE):
|
||||
raise astroid.UseInferenceDefault
|
||||
|
||||
if length.value > len(inferred_sequence.elts):
|
||||
# In this case, this will raise a ValueError
|
||||
raise astroid.UseInferenceDefault
|
||||
|
||||
try:
|
||||
elts = random.sample(inferred_sequence.elts, length.value)
|
||||
except ValueError:
|
||||
raise astroid.UseInferenceDefault
|
||||
|
||||
new_node = astroid.List(
|
||||
lineno=node.lineno,
|
||||
col_offset=node.col_offset,
|
||||
parent=node.scope(),
|
||||
)
|
||||
new_elts = [
|
||||
_clone_node_with_lineno(
|
||||
elt,
|
||||
parent=new_node,
|
||||
lineno=new_node.lineno
|
||||
)
|
||||
for elt in elts
|
||||
]
|
||||
new_node.postinit(new_elts)
|
||||
return iter((new_node, ))
|
||||
|
||||
|
||||
def _looks_like_random_sample(node):
|
||||
func = node.func
|
||||
if isinstance(func, astroid.Attribute):
|
||||
return func.attrname == 'sample'
|
||||
if isinstance(func, astroid.Name):
|
||||
return func.name == 'sample'
|
||||
return False
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
astroid.Call,
|
||||
astroid.inference_tip(infer_random_sample),
|
||||
_looks_like_random_sample,
|
||||
)
|
@ -0,0 +1,34 @@
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
import sys
|
||||
import astroid
|
||||
|
||||
PY36 = sys.version_info >= (3, 6)
|
||||
|
||||
if PY36:
|
||||
# Since Python 3.6 there is the RegexFlag enum
|
||||
# where every entry will be exposed via updating globals()
|
||||
|
||||
def _re_transform():
|
||||
return astroid.parse('''
|
||||
import sre_compile
|
||||
ASCII = sre_compile.SRE_FLAG_ASCII
|
||||
IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE
|
||||
LOCALE = sre_compile.SRE_FLAG_LOCALE
|
||||
UNICODE = sre_compile.SRE_FLAG_UNICODE
|
||||
MULTILINE = sre_compile.SRE_FLAG_MULTILINE
|
||||
DOTALL = sre_compile.SRE_FLAG_DOTALL
|
||||
VERBOSE = sre_compile.SRE_FLAG_VERBOSE
|
||||
A = ASCII
|
||||
I = IGNORECASE
|
||||
L = LOCALE
|
||||
U = UNICODE
|
||||
M = MULTILINE
|
||||
S = DOTALL
|
||||
X = VERBOSE
|
||||
TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE
|
||||
T = TEMPLATE
|
||||
DEBUG = sre_compile.SRE_FLAG_DEBUG
|
||||
''')
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, 're', _re_transform)
|
308
thesisenv/lib/python3.6/site-packages/astroid/brain/brain_six.py
Normal file
308
thesisenv/lib/python3.6/site-packages/astroid/brain/brain_six.py
Normal file
@ -0,0 +1,308 @@
|
||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""Astroid hooks for six module."""
|
||||
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid.exceptions import AstroidBuildingError, InferenceError, AttributeInferenceError
|
||||
from astroid import nodes
|
||||
|
||||
|
||||
SIX_ADD_METACLASS = 'six.add_metaclass'
|
||||
|
||||
|
||||
def _indent(text, prefix, predicate=None):
|
||||
"""Adds 'prefix' to the beginning of selected lines in 'text'.
|
||||
|
||||
If 'predicate' is provided, 'prefix' will only be added to the lines
|
||||
where 'predicate(line)' is True. If 'predicate' is not provided,
|
||||
it will default to adding 'prefix' to all non-empty lines that do not
|
||||
consist solely of whitespace characters.
|
||||
"""
|
||||
if predicate is None:
|
||||
predicate = lambda line: line.strip()
|
||||
|
||||
def prefixed_lines():
|
||||
for line in text.splitlines(True):
|
||||
yield prefix + line if predicate(line) else line
|
||||
return ''.join(prefixed_lines())
|
||||
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
_IMPORTS_2 = """
|
||||
import BaseHTTPServer
|
||||
import CGIHTTPServer
|
||||
import SimpleHTTPServer
|
||||
|
||||
from StringIO import StringIO
|
||||
from cStringIO import StringIO as cStringIO
|
||||
from UserDict import UserDict
|
||||
from UserList import UserList
|
||||
from UserString import UserString
|
||||
|
||||
import __builtin__ as builtins
|
||||
import thread as _thread
|
||||
import dummy_thread as _dummy_thread
|
||||
import ConfigParser as configparser
|
||||
import copy_reg as copyreg
|
||||
from itertools import (imap as map,
|
||||
ifilter as filter,
|
||||
ifilterfalse as filterfalse,
|
||||
izip_longest as zip_longest,
|
||||
izip as zip)
|
||||
import htmlentitydefs as html_entities
|
||||
import HTMLParser as html_parser
|
||||
import httplib as http_client
|
||||
import cookielib as http_cookiejar
|
||||
import Cookie as http_cookies
|
||||
import Queue as queue
|
||||
import repr as reprlib
|
||||
from pipes import quote as shlex_quote
|
||||
import SocketServer as socketserver
|
||||
import SimpleXMLRPCServer as xmlrpc_server
|
||||
import xmlrpclib as xmlrpc_client
|
||||
import _winreg as winreg
|
||||
import robotparser as urllib_robotparser
|
||||
import Tkinter as tkinter
|
||||
import tkFileDialog as tkinter_tkfiledialog
|
||||
|
||||
input = raw_input
|
||||
intern = intern
|
||||
range = xrange
|
||||
xrange = xrange
|
||||
reduce = reduce
|
||||
reload_module = reload
|
||||
|
||||
class UrllibParse(object):
|
||||
def __init__(self):
|
||||
import urlparse as _urlparse
|
||||
import urllib as _urllib
|
||||
|
||||
self.ParseResult = _urlparse.ParseResult
|
||||
self.SplitResult = _urlparse.SplitResult
|
||||
self.parse_qs = _urlparse.parse_qs
|
||||
self.parse_qsl = _urlparse.parse_qsl
|
||||
self.urldefrag = _urlparse.urldefrag
|
||||
self.urljoin = _urlparse.urljoin
|
||||
self.urlparse = _urlparse.urlparse
|
||||
self.urlsplit = _urlparse.urlsplit
|
||||
self.urlunparse = _urlparse.urlunparse
|
||||
self.urlunsplit = _urlparse.urlunsplit
|
||||
self.quote = _urllib.quote
|
||||
self.quote_plus = _urllib.quote_plus
|
||||
self.unquote = _urllib.unquote
|
||||
self.unquote_plus = _urllib.unquote_plus
|
||||
self.urlencode = _urllib.urlencode
|
||||
self.splitquery = _urllib.splitquery
|
||||
self.splittag = _urllib.splittag
|
||||
self.splituser = _urllib.splituser
|
||||
self.uses_fragment = _urlparse.uses_fragment
|
||||
self.uses_netloc = _urlparse.uses_netloc
|
||||
self.uses_params = _urlparse.uses_params
|
||||
self.uses_query = _urlparse.uses_query
|
||||
self.uses_relative = _urlparse.uses_relative
|
||||
|
||||
class UrllibError(object):
|
||||
import urllib2 as _urllib2
|
||||
import urllib as _urllib
|
||||
URLError = _urllib2.URLError
|
||||
HTTPError = _urllib2.HTTPError
|
||||
ContentTooShortError = _urllib.ContentTooShortError
|
||||
|
||||
class DummyModule(object):
|
||||
pass
|
||||
|
||||
class UrllibRequest(object):
|
||||
def __init__(self):
|
||||
import urlparse as _urlparse
|
||||
import urllib2 as _urllib2
|
||||
import urllib as _urllib
|
||||
self.urlopen = _urllib2.urlopen
|
||||
self.install_opener = _urllib2.install_opener
|
||||
self.build_opener = _urllib2.build_opener
|
||||
self.pathname2url = _urllib.pathname2url
|
||||
self.url2pathname = _urllib.url2pathname
|
||||
self.getproxies = _urllib.getproxies
|
||||
self.Request = _urllib2.Request
|
||||
self.OpenerDirector = _urllib2.OpenerDirector
|
||||
self.HTTPDefaultErrorHandler = _urllib2.HTTPDefaultErrorHandler
|
||||
self.HTTPRedirectHandler = _urllib2.HTTPRedirectHandler
|
||||
self.HTTPCookieProcessor = _urllib2.HTTPCookieProcessor
|
||||
self.ProxyHandler = _urllib2.ProxyHandler
|
||||
self.BaseHandler = _urllib2.BaseHandler
|
||||
self.HTTPPasswordMgr = _urllib2.HTTPPasswordMgr
|
||||
self.HTTPPasswordMgrWithDefaultRealm = _urllib2.HTTPPasswordMgrWithDefaultRealm
|
||||
self.AbstractBasicAuthHandler = _urllib2.AbstractBasicAuthHandler
|
||||
self.HTTPBasicAuthHandler = _urllib2.HTTPBasicAuthHandler
|
||||
self.ProxyBasicAuthHandler = _urllib2.ProxyBasicAuthHandler
|
||||
self.AbstractDigestAuthHandler = _urllib2.AbstractDigestAuthHandler
|
||||
self.HTTPDigestAuthHandler = _urllib2.HTTPDigestAuthHandler
|
||||
self.ProxyDigestAuthHandler = _urllib2.ProxyDigestAuthHandler
|
||||
self.HTTPHandler = _urllib2.HTTPHandler
|
||||
self.HTTPSHandler = _urllib2.HTTPSHandler
|
||||
self.FileHandler = _urllib2.FileHandler
|
||||
self.FTPHandler = _urllib2.FTPHandler
|
||||
self.CacheFTPHandler = _urllib2.CacheFTPHandler
|
||||
self.UnknownHandler = _urllib2.UnknownHandler
|
||||
self.HTTPErrorProcessor = _urllib2.HTTPErrorProcessor
|
||||
self.urlretrieve = _urllib.urlretrieve
|
||||
self.urlcleanup = _urllib.urlcleanup
|
||||
self.proxy_bypass = _urllib.proxy_bypass
|
||||
|
||||
urllib_parse = UrllibParse()
|
||||
urllib_error = UrllibError()
|
||||
urllib = DummyModule()
|
||||
urllib.request = UrllibRequest()
|
||||
urllib.parse = UrllibParse()
|
||||
urllib.error = UrllibError()
|
||||
"""
|
||||
else:
|
||||
_IMPORTS_3 = """
|
||||
import _io
|
||||
cStringIO = _io.StringIO
|
||||
filter = filter
|
||||
from itertools import filterfalse
|
||||
input = input
|
||||
from sys import intern
|
||||
map = map
|
||||
range = range
|
||||
from imp import reload as reload_module
|
||||
from functools import reduce
|
||||
from shlex import quote as shlex_quote
|
||||
from io import StringIO
|
||||
from collections import UserDict, UserList, UserString
|
||||
xrange = range
|
||||
zip = zip
|
||||
from itertools import zip_longest
|
||||
import builtins
|
||||
import configparser
|
||||
import copyreg
|
||||
import _dummy_thread
|
||||
import http.cookiejar as http_cookiejar
|
||||
import http.cookies as http_cookies
|
||||
import html.entities as html_entities
|
||||
import html.parser as html_parser
|
||||
import http.client as http_client
|
||||
import http.server as http_server
|
||||
BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server
|
||||
import pickle as cPickle
|
||||
import queue
|
||||
import reprlib
|
||||
import socketserver
|
||||
import _thread
|
||||
import winreg
|
||||
import xmlrpc.server as xmlrpc_server
|
||||
import xmlrpc.client as xmlrpc_client
|
||||
import urllib.robotparser as urllib_robotparser
|
||||
import email.mime.multipart as email_mime_multipart
|
||||
import email.mime.nonmultipart as email_mime_nonmultipart
|
||||
import email.mime.text as email_mime_text
|
||||
import email.mime.base as email_mime_base
|
||||
import urllib.parse as urllib_parse
|
||||
import urllib.error as urllib_error
|
||||
import tkinter
|
||||
import tkinter.dialog as tkinter_dialog
|
||||
import tkinter.filedialog as tkinter_filedialog
|
||||
import tkinter.scrolledtext as tkinter_scrolledtext
|
||||
import tkinter.simpledialog as tkinder_simpledialog
|
||||
import tkinter.tix as tkinter_tix
|
||||
import tkinter.ttk as tkinter_ttk
|
||||
import tkinter.constants as tkinter_constants
|
||||
import tkinter.dnd as tkinter_dnd
|
||||
import tkinter.colorchooser as tkinter_colorchooser
|
||||
import tkinter.commondialog as tkinter_commondialog
|
||||
import tkinter.filedialog as tkinter_tkfiledialog
|
||||
import tkinter.font as tkinter_font
|
||||
import tkinter.messagebox as tkinter_messagebox
|
||||
import urllib
|
||||
import urllib.request as urllib_request
|
||||
import urllib.robotparser as urllib_robotparser
|
||||
import urllib.parse as urllib_parse
|
||||
import urllib.error as urllib_error
|
||||
"""
|
||||
if sys.version_info[0] == 2:
|
||||
_IMPORTS = dedent(_IMPORTS_2)
|
||||
else:
|
||||
_IMPORTS = dedent(_IMPORTS_3)
|
||||
|
||||
|
||||
def six_moves_transform():
|
||||
code = dedent('''
|
||||
class Moves(object):
|
||||
{}
|
||||
moves = Moves()
|
||||
''').format(_indent(_IMPORTS, " "))
|
||||
module = AstroidBuilder(MANAGER).string_build(code)
|
||||
module.name = 'six.moves'
|
||||
return module
|
||||
|
||||
|
||||
def _six_fail_hook(modname):
|
||||
"""Fix six.moves imports due to the dynamic nature of this
|
||||
class.
|
||||
|
||||
Construct a psuedo-module which contains all the nessecary imports
|
||||
for six
|
||||
|
||||
:param modname: Name of failed module
|
||||
:type modname: str
|
||||
|
||||
:return: An astroid module
|
||||
:rtype: nodes.Module
|
||||
"""
|
||||
|
||||
attribute_of = (modname != "six.moves" and
|
||||
modname.startswith("six.moves"))
|
||||
if modname != 'six.moves' and not attribute_of:
|
||||
raise AstroidBuildingError(modname=modname)
|
||||
module = AstroidBuilder(MANAGER).string_build(_IMPORTS)
|
||||
module.name = 'six.moves'
|
||||
if attribute_of:
|
||||
# Facilitate import of submodules in Moves
|
||||
start_index = len(module.name)
|
||||
attribute = modname[start_index:].lstrip(".").replace(".", "_")
|
||||
try:
|
||||
import_attr = module.getattr(attribute)[0]
|
||||
except AttributeInferenceError:
|
||||
raise AstroidBuildingError(modname=modname)
|
||||
if isinstance(import_attr, nodes.Import):
|
||||
submodule = MANAGER.ast_from_module_name(import_attr.names[0][0])
|
||||
return submodule
|
||||
# Let dummy submodule imports pass through
|
||||
# This will cause an Uninferable result, which is okay
|
||||
return module
|
||||
|
||||
def transform_six_add_metaclass(node):
|
||||
"""Check if the given class node is decorated with *six.add_metaclass*
|
||||
|
||||
If so, inject its argument as the metaclass of the underlying class.
|
||||
"""
|
||||
if not node.decorators:
|
||||
return
|
||||
|
||||
for decorator in node.decorators.nodes:
|
||||
if not isinstance(decorator, nodes.Call):
|
||||
continue
|
||||
|
||||
try:
|
||||
func = next(decorator.func.infer())
|
||||
except InferenceError:
|
||||
continue
|
||||
if func.qname() == SIX_ADD_METACLASS and decorator.args:
|
||||
metaclass = decorator.args[0]
|
||||
node._metaclass = metaclass
|
||||
return node
|
||||
|
||||
|
||||
register_module_extender(MANAGER, 'six', six_moves_transform)
|
||||
register_module_extender(MANAGER, 'requests.packages.urllib3.packages.six',
|
||||
six_moves_transform)
|
||||
MANAGER.register_failed_import_hook(_six_fail_hook)
|
||||
MANAGER.register_transform(nodes.ClassDef, transform_six_add_metaclass)
|
@ -0,0 +1,70 @@
|
||||
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for the ssl library."""
|
||||
|
||||
from astroid import MANAGER, register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid import nodes
|
||||
from astroid import parse
|
||||
|
||||
|
||||
def ssl_transform():
|
||||
return parse('''
|
||||
from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION
|
||||
from _ssl import _SSLContext, MemoryBIO
|
||||
from _ssl import (
|
||||
SSLError, SSLZeroReturnError, SSLWantReadError, SSLWantWriteError,
|
||||
SSLSyscallError, SSLEOFError,
|
||||
)
|
||||
from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
|
||||
from _ssl import txt2obj as _txt2obj, nid2obj as _nid2obj
|
||||
from _ssl import RAND_status, RAND_add, RAND_bytes, RAND_pseudo_bytes
|
||||
try:
|
||||
from _ssl import RAND_egd
|
||||
except ImportError:
|
||||
# LibreSSL does not provide RAND_egd
|
||||
pass
|
||||
from _ssl import (OP_ALL, OP_CIPHER_SERVER_PREFERENCE,
|
||||
OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3,
|
||||
OP_NO_TLSv1, OP_NO_TLSv1_1, OP_NO_TLSv1_2,
|
||||
OP_SINGLE_DH_USE, OP_SINGLE_ECDH_USE)
|
||||
|
||||
from _ssl import (ALERT_DESCRIPTION_ACCESS_DENIED, ALERT_DESCRIPTION_BAD_CERTIFICATE,
|
||||
ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE,
|
||||
ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE,
|
||||
ALERT_DESCRIPTION_BAD_RECORD_MAC,
|
||||
ALERT_DESCRIPTION_CERTIFICATE_EXPIRED,
|
||||
ALERT_DESCRIPTION_CERTIFICATE_REVOKED,
|
||||
ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN,
|
||||
ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE,
|
||||
ALERT_DESCRIPTION_CLOSE_NOTIFY, ALERT_DESCRIPTION_DECODE_ERROR,
|
||||
ALERT_DESCRIPTION_DECOMPRESSION_FAILURE,
|
||||
ALERT_DESCRIPTION_DECRYPT_ERROR,
|
||||
ALERT_DESCRIPTION_HANDSHAKE_FAILURE,
|
||||
ALERT_DESCRIPTION_ILLEGAL_PARAMETER,
|
||||
ALERT_DESCRIPTION_INSUFFICIENT_SECURITY,
|
||||
ALERT_DESCRIPTION_INTERNAL_ERROR,
|
||||
ALERT_DESCRIPTION_NO_RENEGOTIATION,
|
||||
ALERT_DESCRIPTION_PROTOCOL_VERSION,
|
||||
ALERT_DESCRIPTION_RECORD_OVERFLOW,
|
||||
ALERT_DESCRIPTION_UNEXPECTED_MESSAGE,
|
||||
ALERT_DESCRIPTION_UNKNOWN_CA,
|
||||
ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY,
|
||||
ALERT_DESCRIPTION_UNRECOGNIZED_NAME,
|
||||
ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE,
|
||||
ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION,
|
||||
ALERT_DESCRIPTION_USER_CANCELLED)
|
||||
from _ssl import (SSL_ERROR_EOF, SSL_ERROR_INVALID_ERROR_CODE, SSL_ERROR_SSL,
|
||||
SSL_ERROR_SYSCALL, SSL_ERROR_WANT_CONNECT, SSL_ERROR_WANT_READ,
|
||||
SSL_ERROR_WANT_WRITE, SSL_ERROR_WANT_X509_LOOKUP, SSL_ERROR_ZERO_RETURN)
|
||||
from _ssl import VERIFY_CRL_CHECK_CHAIN, VERIFY_CRL_CHECK_LEAF, VERIFY_DEFAULT, VERIFY_X509_STRICT
|
||||
from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN
|
||||
from _ssl import _OPENSSL_API_VERSION
|
||||
from _ssl import PROTOCOL_SSLv23, PROTOCOL_TLSv1, PROTOCOL_TLSv1_1, PROTOCOL_TLSv1_2
|
||||
''')
|
||||
|
||||
|
||||
register_module_extender(MANAGER, 'ssl', ssl_transform)
|
@ -0,0 +1,94 @@
|
||||
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
import six
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
PY34 = sys.version_info >= (3, 4)
|
||||
PY36 = sys.version_info >= (3, 6)
|
||||
|
||||
|
||||
def _subprocess_transform():
|
||||
if six.PY3:
|
||||
communicate = (bytes('string', 'ascii'), bytes('string', 'ascii'))
|
||||
communicate_signature = 'def communicate(self, input=None, timeout=None)'
|
||||
if PY36:
|
||||
init = """
|
||||
def __init__(self, args, bufsize=0, executable=None,
|
||||
stdin=None, stdout=None, stderr=None,
|
||||
preexec_fn=None, close_fds=False, shell=False,
|
||||
cwd=None, env=None, universal_newlines=False,
|
||||
startupinfo=None, creationflags=0, restore_signals=True,
|
||||
start_new_session=False, pass_fds=(), *,
|
||||
encoding=None, errors=None):
|
||||
pass
|
||||
"""
|
||||
else:
|
||||
init = """
|
||||
def __init__(self, args, bufsize=0, executable=None,
|
||||
stdin=None, stdout=None, stderr=None,
|
||||
preexec_fn=None, close_fds=False, shell=False,
|
||||
cwd=None, env=None, universal_newlines=False,
|
||||
startupinfo=None, creationflags=0, restore_signals=True,
|
||||
start_new_session=False, pass_fds=()):
|
||||
pass
|
||||
"""
|
||||
else:
|
||||
communicate = ('string', 'string')
|
||||
communicate_signature = 'def communicate(self, input=None)'
|
||||
init = """
|
||||
def __init__(self, args, bufsize=0, executable=None,
|
||||
stdin=None, stdout=None, stderr=None,
|
||||
preexec_fn=None, close_fds=False, shell=False,
|
||||
cwd=None, env=None, universal_newlines=False,
|
||||
startupinfo=None, creationflags=0):
|
||||
pass
|
||||
"""
|
||||
if PY34:
|
||||
wait_signature = 'def wait(self, timeout=None)'
|
||||
else:
|
||||
wait_signature = 'def wait(self)'
|
||||
if six.PY3:
|
||||
ctx_manager = '''
|
||||
def __enter__(self): return self
|
||||
def __exit__(self, *args): pass
|
||||
'''
|
||||
else:
|
||||
ctx_manager = ''
|
||||
code = textwrap.dedent('''
|
||||
class Popen(object):
|
||||
returncode = pid = 0
|
||||
stdin = stdout = stderr = file()
|
||||
|
||||
%(communicate_signature)s:
|
||||
return %(communicate)r
|
||||
%(wait_signature)s:
|
||||
return self.returncode
|
||||
def poll(self):
|
||||
return self.returncode
|
||||
def send_signal(self, signal):
|
||||
pass
|
||||
def terminate(self):
|
||||
pass
|
||||
def kill(self):
|
||||
pass
|
||||
%(ctx_manager)s
|
||||
''' % {'communicate': communicate,
|
||||
'communicate_signature': communicate_signature,
|
||||
'wait_signature': wait_signature,
|
||||
'ctx_manager': ctx_manager})
|
||||
|
||||
init_lines = textwrap.dedent(init).splitlines()
|
||||
indented_init = '\n'.join([' ' * 4 + line for line in init_lines])
|
||||
code += indented_init
|
||||
return astroid.parse(code)
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, 'subprocess', _subprocess_transform)
|
@ -0,0 +1,26 @@
|
||||
# Copyright (c) 2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
import astroid
|
||||
|
||||
|
||||
def _thread_transform():
|
||||
return astroid.parse('''
|
||||
class lock(object):
|
||||
def acquire(self, blocking=True):
|
||||
pass
|
||||
def release(self):
|
||||
pass
|
||||
def __enter__(self):
|
||||
return True
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
|
||||
def Lock():
|
||||
return lock()
|
||||
''')
|
||||
|
||||
|
||||
astroid.register_module_extender(astroid.MANAGER, 'threading', _thread_transform)
|
@ -0,0 +1,97 @@
|
||||
# Copyright (c) 2016 David Euresti <david@dropbox.com>
|
||||
|
||||
"""Astroid hooks for typing.py support."""
|
||||
import textwrap
|
||||
|
||||
from astroid import (
|
||||
MANAGER, UseInferenceDefault, extract_node, inference_tip,
|
||||
nodes, InferenceError)
|
||||
from astroid.nodes import List, Tuple
|
||||
|
||||
|
||||
TYPING_NAMEDTUPLE_BASENAMES = {
|
||||
'NamedTuple',
|
||||
'typing.NamedTuple'
|
||||
}
|
||||
|
||||
|
||||
def infer_typing_namedtuple(node, context=None):
|
||||
"""Infer a typing.NamedTuple(...) call."""
|
||||
# This is essentially a namedtuple with different arguments
|
||||
# so we extract the args and infer a named tuple.
|
||||
try:
|
||||
func = next(node.func.infer())
|
||||
except InferenceError:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if func.qname() != 'typing.NamedTuple':
|
||||
raise UseInferenceDefault
|
||||
|
||||
if len(node.args) != 2:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if not isinstance(node.args[1], (List, Tuple)):
|
||||
raise UseInferenceDefault
|
||||
|
||||
names = []
|
||||
for elt in node.args[1].elts:
|
||||
if not isinstance(elt, (List, Tuple)):
|
||||
raise UseInferenceDefault
|
||||
if len(elt.elts) != 2:
|
||||
raise UseInferenceDefault
|
||||
names.append(elt.elts[0].as_string())
|
||||
|
||||
typename = node.args[0].as_string()
|
||||
node = extract_node('namedtuple(%(typename)s, (%(fields)s,)) ' %
|
||||
{'typename': typename, 'fields': ",".join(names)})
|
||||
return node.infer(context=context)
|
||||
|
||||
|
||||
def infer_typing_namedtuple_class(node, context=None):
|
||||
"""Infer a subclass of typing.NamedTuple"""
|
||||
|
||||
# Check if it has the corresponding bases
|
||||
annassigns_fields = [
|
||||
annassign.target.name for annassign in node.body
|
||||
if isinstance(annassign, nodes.AnnAssign)
|
||||
]
|
||||
code = textwrap.dedent('''
|
||||
from collections import namedtuple
|
||||
namedtuple({typename!r}, {fields!r})
|
||||
''').format(
|
||||
typename=node.name,
|
||||
fields=",".join(annassigns_fields)
|
||||
)
|
||||
node = extract_node(code)
|
||||
return node.infer(context=context)
|
||||
|
||||
|
||||
def has_namedtuple_base(node):
|
||||
"""Predicate for class inference tip
|
||||
|
||||
:type node: ClassDef
|
||||
:rtype: bool
|
||||
"""
|
||||
return set(node.basenames) & TYPING_NAMEDTUPLE_BASENAMES
|
||||
|
||||
|
||||
def looks_like_typing_namedtuple(node):
|
||||
func = node.func
|
||||
if isinstance(func, nodes.Attribute):
|
||||
return func.attrname == 'NamedTuple'
|
||||
if isinstance(func, nodes.Name):
|
||||
return func.name == 'NamedTuple'
|
||||
return False
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
nodes.Call,
|
||||
inference_tip(infer_typing_namedtuple),
|
||||
looks_like_typing_namedtuple
|
||||
)
|
||||
|
||||
MANAGER.register_transform(
|
||||
nodes.ClassDef,
|
||||
inference_tip(infer_typing_namedtuple_class),
|
||||
has_namedtuple_base
|
||||
)
|
@ -0,0 +1,22 @@
|
||||
# Copyright (c) 2017 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Astroid hooks for the UUID module."""
|
||||
|
||||
|
||||
from astroid import MANAGER
|
||||
from astroid import nodes
|
||||
|
||||
|
||||
def _patch_uuid_class(node):
|
||||
# The .int member is patched using __dict__
|
||||
node.locals['int'] = [nodes.Const(0, parent=node)]
|
||||
|
||||
|
||||
MANAGER.register_transform(
|
||||
nodes.ClassDef,
|
||||
_patch_uuid_class,
|
||||
lambda node: node.qname() == 'uuid.UUID'
|
||||
)
|
438
thesisenv/lib/python3.6/site-packages/astroid/builder.py
Normal file
438
thesisenv/lib/python3.6/site-packages/astroid/builder.py
Normal file
@ -0,0 +1,438 @@
|
||||
# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014-2015 Google, Inc.
|
||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""The AstroidBuilder makes astroid from living object and / or from _ast
|
||||
|
||||
The builder is not thread safe and can't be used to parse different sources
|
||||
at the same time.
|
||||
"""
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import textwrap
|
||||
import _ast
|
||||
|
||||
from astroid import bases
|
||||
from astroid import exceptions
|
||||
from astroid import manager
|
||||
from astroid import modutils
|
||||
from astroid import raw_building
|
||||
from astroid import rebuilder
|
||||
from astroid import nodes
|
||||
from astroid import util
|
||||
|
||||
# The name of the transient function that is used to
|
||||
# wrap expressions to be extracted when calling
|
||||
# extract_node.
|
||||
_TRANSIENT_FUNCTION = '__'
|
||||
|
||||
# The comment used to select a statement to be extracted
|
||||
# when calling extract_node.
|
||||
_STATEMENT_SELECTOR = '#@'
|
||||
|
||||
|
||||
def _parse(string):
|
||||
return compile(string, "<string>", 'exec', _ast.PyCF_ONLY_AST)
|
||||
|
||||
|
||||
if sys.version_info >= (3, 0):
|
||||
from tokenize import detect_encoding
|
||||
|
||||
def open_source_file(filename):
|
||||
with open(filename, 'rb') as byte_stream:
|
||||
encoding = detect_encoding(byte_stream.readline)[0]
|
||||
stream = open(filename, 'r', newline=None, encoding=encoding)
|
||||
data = stream.read()
|
||||
return stream, encoding, data
|
||||
|
||||
else:
|
||||
_ENCODING_RGX = re.compile(r"\s*#+.*coding[:=]\s*([-\w.]+)")
|
||||
|
||||
def _guess_encoding(string):
|
||||
"""get encoding from a python file as string or return None if not found"""
|
||||
# check for UTF-8 byte-order mark
|
||||
if string.startswith('\xef\xbb\xbf'):
|
||||
return 'UTF-8'
|
||||
for line in string.split('\n', 2)[:2]:
|
||||
# check for encoding declaration
|
||||
match = _ENCODING_RGX.match(line)
|
||||
if match is not None:
|
||||
return match.group(1)
|
||||
return None
|
||||
|
||||
def open_source_file(filename):
|
||||
"""get data for parsing a file"""
|
||||
stream = open(filename, 'U')
|
||||
data = stream.read()
|
||||
encoding = _guess_encoding(data)
|
||||
return stream, encoding, data
|
||||
|
||||
|
||||
MANAGER = manager.AstroidManager()
|
||||
|
||||
|
||||
def _can_assign_attr(node, attrname):
|
||||
try:
|
||||
slots = node.slots()
|
||||
except NotImplementedError:
|
||||
pass
|
||||
else:
|
||||
if slots and attrname not in set(slot.value for slot in slots):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class AstroidBuilder(raw_building.InspectBuilder):
|
||||
"""Class for building an astroid tree from source code or from a live module.
|
||||
|
||||
The param *manager* specifies the manager class which should be used.
|
||||
If no manager is given, then the default one will be used. The
|
||||
param *apply_transforms* determines if the transforms should be
|
||||
applied after the tree was built from source or from a live object,
|
||||
by default being True.
|
||||
"""
|
||||
# pylint: disable=redefined-outer-name
|
||||
def __init__(self, manager=None, apply_transforms=True):
|
||||
super(AstroidBuilder, self).__init__()
|
||||
self._manager = manager or MANAGER
|
||||
self._apply_transforms = apply_transforms
|
||||
|
||||
def module_build(self, module, modname=None):
|
||||
"""Build an astroid from a living module instance."""
|
||||
node = None
|
||||
path = getattr(module, '__file__', None)
|
||||
if path is not None:
|
||||
path_, ext = os.path.splitext(modutils._path_from_filename(path))
|
||||
if ext in ('.py', '.pyc', '.pyo') and os.path.exists(path_ + '.py'):
|
||||
node = self.file_build(path_ + '.py', modname)
|
||||
if node is None:
|
||||
# this is a built-in module
|
||||
# get a partial representation by introspection
|
||||
node = self.inspect_build(module, modname=modname, path=path)
|
||||
if self._apply_transforms:
|
||||
# We have to handle transformation by ourselves since the
|
||||
# rebuilder isn't called for builtin nodes
|
||||
node = self._manager.visit_transforms(node)
|
||||
return node
|
||||
|
||||
def file_build(self, path, modname=None):
|
||||
"""Build astroid from a source code file (i.e. from an ast)
|
||||
|
||||
*path* is expected to be a python source file
|
||||
"""
|
||||
try:
|
||||
stream, encoding, data = open_source_file(path)
|
||||
except IOError as exc:
|
||||
util.reraise(exceptions.AstroidBuildingError(
|
||||
'Unable to load file {path}:\n{error}',
|
||||
modname=modname, path=path, error=exc))
|
||||
except (SyntaxError, LookupError) as exc:
|
||||
util.reraise(exceptions.AstroidSyntaxError(
|
||||
'Python 3 encoding specification error or unknown encoding:\n'
|
||||
'{error}', modname=modname, path=path, error=exc))
|
||||
except UnicodeError: # wrong encoding
|
||||
# detect_encoding returns utf-8 if no encoding specified
|
||||
util.reraise(exceptions.AstroidBuildingError(
|
||||
'Wrong or no encoding specified for {filename}.',
|
||||
filename=path))
|
||||
with stream:
|
||||
# get module name if necessary
|
||||
if modname is None:
|
||||
try:
|
||||
modname = '.'.join(modutils.modpath_from_file(path))
|
||||
except ImportError:
|
||||
modname = os.path.splitext(os.path.basename(path))[0]
|
||||
# build astroid representation
|
||||
module = self._data_build(data, modname, path)
|
||||
return self._post_build(module, encoding)
|
||||
|
||||
def string_build(self, data, modname='', path=None):
|
||||
"""Build astroid from source code string."""
|
||||
module = self._data_build(data, modname, path)
|
||||
module.file_bytes = data.encode('utf-8')
|
||||
return self._post_build(module, 'utf-8')
|
||||
|
||||
def _post_build(self, module, encoding):
|
||||
"""Handles encoding and delayed nodes after a module has been built"""
|
||||
module.file_encoding = encoding
|
||||
self._manager.cache_module(module)
|
||||
# post tree building steps after we stored the module in the cache:
|
||||
for from_node in module._import_from_nodes:
|
||||
if from_node.modname == '__future__':
|
||||
for symbol, _ in from_node.names:
|
||||
module.future_imports.add(symbol)
|
||||
self.add_from_names_to_locals(from_node)
|
||||
# handle delayed assattr nodes
|
||||
for delayed in module._delayed_assattr:
|
||||
self.delayed_assattr(delayed)
|
||||
|
||||
# Visit the transforms
|
||||
if self._apply_transforms:
|
||||
module = self._manager.visit_transforms(module)
|
||||
return module
|
||||
|
||||
def _data_build(self, data, modname, path):
|
||||
"""Build tree node from data and add some informations"""
|
||||
try:
|
||||
node = _parse(data + '\n')
|
||||
except (TypeError, ValueError, SyntaxError) as exc:
|
||||
util.reraise(exceptions.AstroidSyntaxError(
|
||||
'Parsing Python code failed:\n{error}',
|
||||
source=data, modname=modname, path=path, error=exc))
|
||||
if path is not None:
|
||||
node_file = os.path.abspath(path)
|
||||
else:
|
||||
node_file = '<?>'
|
||||
if modname.endswith('.__init__'):
|
||||
modname = modname[:-9]
|
||||
package = True
|
||||
else:
|
||||
package = path is not None and os.path.splitext(os.path.basename(path))[0] == '__init__'
|
||||
builder = rebuilder.TreeRebuilder(self._manager)
|
||||
module = builder.visit_module(node, modname, node_file, package)
|
||||
module._import_from_nodes = builder._import_from_nodes
|
||||
module._delayed_assattr = builder._delayed_assattr
|
||||
return module
|
||||
|
||||
def add_from_names_to_locals(self, node):
|
||||
"""Store imported names to the locals
|
||||
|
||||
Resort the locals if coming from a delayed node
|
||||
"""
|
||||
_key_func = lambda node: node.fromlineno
|
||||
def sort_locals(my_list):
|
||||
my_list.sort(key=_key_func)
|
||||
|
||||
for (name, asname) in node.names:
|
||||
if name == '*':
|
||||
try:
|
||||
imported = node.do_import_module()
|
||||
except exceptions.AstroidBuildingError:
|
||||
continue
|
||||
for name in imported.public_names():
|
||||
node.parent.set_local(name, node)
|
||||
sort_locals(node.parent.scope().locals[name])
|
||||
else:
|
||||
node.parent.set_local(asname or name, node)
|
||||
sort_locals(node.parent.scope().locals[asname or name])
|
||||
|
||||
def delayed_assattr(self, node):
|
||||
"""Visit a AssAttr node
|
||||
|
||||
This adds name to locals and handle members definition.
|
||||
"""
|
||||
try:
|
||||
frame = node.frame()
|
||||
for inferred in node.expr.infer():
|
||||
if inferred is util.Uninferable:
|
||||
continue
|
||||
try:
|
||||
if inferred.__class__ is bases.Instance:
|
||||
inferred = inferred._proxied
|
||||
iattrs = inferred.instance_attrs
|
||||
if not _can_assign_attr(inferred, node.attrname):
|
||||
continue
|
||||
elif isinstance(inferred, bases.Instance):
|
||||
# Const, Tuple, ... we may be wrong, may be not, but
|
||||
# anyway we don't want to pollute builtin's namespace
|
||||
continue
|
||||
elif inferred.is_function:
|
||||
iattrs = inferred.instance_attrs
|
||||
else:
|
||||
iattrs = inferred.locals
|
||||
except AttributeError:
|
||||
# XXX log error
|
||||
continue
|
||||
values = iattrs.setdefault(node.attrname, [])
|
||||
if node in values:
|
||||
continue
|
||||
# get assign in __init__ first XXX useful ?
|
||||
if (frame.name == '__init__' and values and
|
||||
values[0].frame().name != '__init__'):
|
||||
values.insert(0, node)
|
||||
else:
|
||||
values.append(node)
|
||||
except exceptions.InferenceError:
|
||||
pass
|
||||
|
||||
|
||||
def build_namespace_package_module(name, path):
|
||||
return nodes.Module(name, doc='', path=path, package=True)
|
||||
|
||||
|
||||
def parse(code, module_name='', path=None, apply_transforms=True):
|
||||
"""Parses a source string in order to obtain an astroid AST from it
|
||||
|
||||
:param str code: The code for the module.
|
||||
:param str module_name: The name for the module, if any
|
||||
:param str path: The path for the module
|
||||
:param bool apply_transforms:
|
||||
Apply the transforms for the give code. Use it if you
|
||||
don't want the default transforms to be applied.
|
||||
"""
|
||||
code = textwrap.dedent(code)
|
||||
builder = AstroidBuilder(manager=MANAGER,
|
||||
apply_transforms=apply_transforms)
|
||||
return builder.string_build(code, modname=module_name, path=path)
|
||||
|
||||
|
||||
def _extract_expressions(node):
|
||||
"""Find expressions in a call to _TRANSIENT_FUNCTION and extract them.
|
||||
|
||||
The function walks the AST recursively to search for expressions that
|
||||
are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an
|
||||
expression, it completely removes the function call node from the tree,
|
||||
replacing it by the wrapped expression inside the parent.
|
||||
|
||||
:param node: An astroid node.
|
||||
:type node: astroid.bases.NodeNG
|
||||
:yields: The sequence of wrapped expressions on the modified tree
|
||||
expression can be found.
|
||||
"""
|
||||
if (isinstance(node, nodes.Call)
|
||||
and isinstance(node.func, nodes.Name)
|
||||
and node.func.name == _TRANSIENT_FUNCTION):
|
||||
real_expr = node.args[0]
|
||||
real_expr.parent = node.parent
|
||||
# Search for node in all _astng_fields (the fields checked when
|
||||
# get_children is called) of its parent. Some of those fields may
|
||||
# be lists or tuples, in which case the elements need to be checked.
|
||||
# When we find it, replace it by real_expr, so that the AST looks
|
||||
# like no call to _TRANSIENT_FUNCTION ever took place.
|
||||
for name in node.parent._astroid_fields:
|
||||
child = getattr(node.parent, name)
|
||||
if isinstance(child, (list, tuple)):
|
||||
for idx, compound_child in enumerate(child):
|
||||
if compound_child is node:
|
||||
child[idx] = real_expr
|
||||
elif child is node:
|
||||
setattr(node.parent, name, real_expr)
|
||||
yield real_expr
|
||||
else:
|
||||
for child in node.get_children():
|
||||
for result in _extract_expressions(child):
|
||||
yield result
|
||||
|
||||
|
||||
def _find_statement_by_line(node, line):
|
||||
"""Extracts the statement on a specific line from an AST.
|
||||
|
||||
If the line number of node matches line, it will be returned;
|
||||
otherwise its children are iterated and the function is called
|
||||
recursively.
|
||||
|
||||
:param node: An astroid node.
|
||||
:type node: astroid.bases.NodeNG
|
||||
:param line: The line number of the statement to extract.
|
||||
:type line: int
|
||||
:returns: The statement on the line, or None if no statement for the line
|
||||
can be found.
|
||||
:rtype: astroid.bases.NodeNG or None
|
||||
"""
|
||||
if isinstance(node, (nodes.ClassDef, nodes.FunctionDef)):
|
||||
# This is an inaccuracy in the AST: the nodes that can be
|
||||
# decorated do not carry explicit information on which line
|
||||
# the actual definition (class/def), but .fromline seems to
|
||||
# be close enough.
|
||||
node_line = node.fromlineno
|
||||
else:
|
||||
node_line = node.lineno
|
||||
|
||||
if node_line == line:
|
||||
return node
|
||||
|
||||
for child in node.get_children():
|
||||
result = _find_statement_by_line(child, line)
|
||||
if result:
|
||||
return result
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def extract_node(code, module_name=''):
|
||||
"""Parses some Python code as a module and extracts a designated AST node.
|
||||
|
||||
Statements:
|
||||
To extract one or more statement nodes, append #@ to the end of the line
|
||||
|
||||
Examples:
|
||||
>>> def x():
|
||||
>>> def y():
|
||||
>>> return 1 #@
|
||||
|
||||
The return statement will be extracted.
|
||||
|
||||
>>> class X(object):
|
||||
>>> def meth(self): #@
|
||||
>>> pass
|
||||
|
||||
The function object 'meth' will be extracted.
|
||||
|
||||
Expressions:
|
||||
To extract arbitrary expressions, surround them with the fake
|
||||
function call __(...). After parsing, the surrounded expression
|
||||
will be returned and the whole AST (accessible via the returned
|
||||
node's parent attribute) will look like the function call was
|
||||
never there in the first place.
|
||||
|
||||
Examples:
|
||||
>>> a = __(1)
|
||||
|
||||
The const node will be extracted.
|
||||
|
||||
>>> def x(d=__(foo.bar)): pass
|
||||
|
||||
The node containing the default argument will be extracted.
|
||||
|
||||
>>> def foo(a, b):
|
||||
>>> return 0 < __(len(a)) < b
|
||||
|
||||
The node containing the function call 'len' will be extracted.
|
||||
|
||||
If no statements or expressions are selected, the last toplevel
|
||||
statement will be returned.
|
||||
|
||||
If the selected statement is a discard statement, (i.e. an expression
|
||||
turned into a statement), the wrapped expression is returned instead.
|
||||
|
||||
For convenience, singleton lists are unpacked.
|
||||
|
||||
:param str code: A piece of Python code that is parsed as
|
||||
a module. Will be passed through textwrap.dedent first.
|
||||
:param str module_name: The name of the module.
|
||||
:returns: The designated node from the parse tree, or a list of nodes.
|
||||
:rtype: astroid.bases.NodeNG, or a list of nodes.
|
||||
"""
|
||||
def _extract(node):
|
||||
if isinstance(node, nodes.Expr):
|
||||
return node.value
|
||||
|
||||
return node
|
||||
|
||||
requested_lines = []
|
||||
for idx, line in enumerate(code.splitlines()):
|
||||
if line.strip().endswith(_STATEMENT_SELECTOR):
|
||||
requested_lines.append(idx + 1)
|
||||
|
||||
tree = parse(code, module_name=module_name)
|
||||
extracted = []
|
||||
if requested_lines:
|
||||
for line in requested_lines:
|
||||
extracted.append(_find_statement_by_line(tree, line))
|
||||
|
||||
# Modifies the tree.
|
||||
extracted.extend(_extract_expressions(tree))
|
||||
|
||||
if not extracted:
|
||||
extracted.append(tree.body[-1])
|
||||
|
||||
extracted = [_extract(node) for node in extracted]
|
||||
if len(extracted) == 1:
|
||||
return extracted[0]
|
||||
return extracted
|
108
thesisenv/lib/python3.6/site-packages/astroid/context.py
Normal file
108
thesisenv/lib/python3.6/site-packages/astroid/context.py
Normal file
@ -0,0 +1,108 @@
|
||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""Various context related utilities, including inference and call contexts."""
|
||||
|
||||
import contextlib
|
||||
import copy
|
||||
import pprint
|
||||
|
||||
|
||||
class InferenceContext(object):
|
||||
"""Provide context for inference
|
||||
|
||||
Store already inferred nodes to save time
|
||||
Account for already visited nodes to infinite stop infinite recursion
|
||||
"""
|
||||
|
||||
__slots__ = ('path', 'lookupname', 'callcontext', 'boundnode', 'inferred')
|
||||
|
||||
def __init__(self, path=None, inferred=None):
|
||||
self.path = path or set()
|
||||
"""Path of visited nodes and their lookupname
|
||||
:type: set(tuple(NodeNG, optional(str)))"""
|
||||
self.lookupname = None
|
||||
self.callcontext = None
|
||||
self.boundnode = None
|
||||
self.inferred = inferred or {}
|
||||
"""
|
||||
:type: dict(seq, seq)
|
||||
|
||||
Inferred node contexts to their mapped results
|
||||
Currently the key is (node, lookupname, callcontext, boundnode)
|
||||
and the value is tuple of the inferred results
|
||||
"""
|
||||
|
||||
def push(self, node):
|
||||
"""Push node into inference path
|
||||
|
||||
:return: True if node is already in context path else False
|
||||
:rtype: bool
|
||||
|
||||
Allows one to see if the given node has already
|
||||
been looked at for this inference context"""
|
||||
name = self.lookupname
|
||||
if (node, name) in self.path:
|
||||
return True
|
||||
|
||||
self.path.add((node, name))
|
||||
return False
|
||||
|
||||
def clone(self):
|
||||
"""Clone inference path
|
||||
|
||||
For example, each side of a binary operation (BinOp)
|
||||
starts with the same context but diverge as each side is inferred
|
||||
so the InferenceContext will need be cloned"""
|
||||
# XXX copy lookupname/callcontext ?
|
||||
clone = InferenceContext(copy.copy(self.path), inferred=self.inferred)
|
||||
clone.callcontext = self.callcontext
|
||||
clone.boundnode = self.boundnode
|
||||
return clone
|
||||
|
||||
def cache_generator(self, key, generator):
|
||||
"""Cache result of generator into dictionary
|
||||
|
||||
Used to cache inference results"""
|
||||
results = []
|
||||
for result in generator:
|
||||
results.append(result)
|
||||
yield result
|
||||
|
||||
self.inferred[key] = tuple(results)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def restore_path(self):
|
||||
path = set(self.path)
|
||||
yield
|
||||
self.path = path
|
||||
|
||||
def __str__(self):
|
||||
state = ('%s=%s' % (field, pprint.pformat(getattr(self, field),
|
||||
width=80 - len(field)))
|
||||
for field in self.__slots__)
|
||||
return '%s(%s)' % (type(self).__name__, ',\n '.join(state))
|
||||
|
||||
|
||||
class CallContext(object):
|
||||
"""Holds information for a call site."""
|
||||
|
||||
__slots__ = ('args', 'keywords')
|
||||
|
||||
def __init__(self, args, keywords=None):
|
||||
self.args = args
|
||||
if keywords:
|
||||
keywords = [(arg.arg, arg.value) for arg in keywords]
|
||||
else:
|
||||
keywords = []
|
||||
self.keywords = keywords
|
||||
|
||||
|
||||
def copy_context(context):
|
||||
if context is not None:
|
||||
return context.clone()
|
||||
|
||||
return InferenceContext()
|
151
thesisenv/lib/python3.6/site-packages/astroid/decorators.py
Normal file
151
thesisenv/lib/python3.6/site-packages/astroid/decorators.py
Normal file
@ -0,0 +1,151 @@
|
||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
""" A few useful function/method decorators."""
|
||||
|
||||
import functools
|
||||
|
||||
import wrapt
|
||||
|
||||
from astroid import context as contextmod
|
||||
from astroid import exceptions
|
||||
from astroid import util
|
||||
|
||||
|
||||
@wrapt.decorator
|
||||
def cached(func, instance, args, kwargs):
|
||||
"""Simple decorator to cache result of method calls without args."""
|
||||
cache = getattr(instance, '__cache', None)
|
||||
if cache is None:
|
||||
instance.__cache = cache = {}
|
||||
try:
|
||||
return cache[func]
|
||||
except KeyError:
|
||||
cache[func] = result = func(*args, **kwargs)
|
||||
return result
|
||||
|
||||
|
||||
class cachedproperty(object):
|
||||
""" Provides a cached property equivalent to the stacking of
|
||||
@cached and @property, but more efficient.
|
||||
|
||||
After first usage, the <property_name> becomes part of the object's
|
||||
__dict__. Doing:
|
||||
|
||||
del obj.<property_name> empties the cache.
|
||||
|
||||
Idea taken from the pyramid_ framework and the mercurial_ project.
|
||||
|
||||
.. _pyramid: http://pypi.python.org/pypi/pyramid
|
||||
.. _mercurial: http://pypi.python.org/pypi/Mercurial
|
||||
"""
|
||||
__slots__ = ('wrapped',)
|
||||
|
||||
def __init__(self, wrapped):
|
||||
try:
|
||||
wrapped.__name__
|
||||
except AttributeError:
|
||||
util.reraise(TypeError('%s must have a __name__ attribute'
|
||||
% wrapped))
|
||||
self.wrapped = wrapped
|
||||
|
||||
@property
|
||||
def __doc__(self):
|
||||
doc = getattr(self.wrapped, '__doc__', None)
|
||||
return ('<wrapped by the cachedproperty decorator>%s'
|
||||
% ('\n%s' % doc if doc else ''))
|
||||
|
||||
def __get__(self, inst, objtype=None):
|
||||
if inst is None:
|
||||
return self
|
||||
val = self.wrapped(inst)
|
||||
setattr(inst, self.wrapped.__name__, val)
|
||||
return val
|
||||
|
||||
|
||||
def path_wrapper(func):
|
||||
"""return the given infer function wrapped to handle the path
|
||||
|
||||
Used to stop inference if the node has already been looked
|
||||
at for a given `InferenceContext` to prevent infinite recursion
|
||||
"""
|
||||
# TODO: switch this to wrapt after the monkey-patching is fixed (ceridwen)
|
||||
@functools.wraps(func)
|
||||
def wrapped(node, context=None, _func=func, **kwargs):
|
||||
"""wrapper function handling context"""
|
||||
if context is None:
|
||||
context = contextmod.InferenceContext()
|
||||
if context.push(node):
|
||||
return
|
||||
|
||||
yielded = set()
|
||||
generator = _func(node, context, **kwargs)
|
||||
try:
|
||||
while True:
|
||||
res = next(generator)
|
||||
# unproxy only true instance, not const, tuple, dict...
|
||||
if res.__class__.__name__ == 'Instance':
|
||||
ares = res._proxied
|
||||
else:
|
||||
ares = res
|
||||
if ares not in yielded:
|
||||
yield res
|
||||
yielded.add(ares)
|
||||
except StopIteration as error:
|
||||
# Explicit StopIteration to return error information, see
|
||||
# comment in raise_if_nothing_inferred.
|
||||
if error.args:
|
||||
raise StopIteration(error.args[0])
|
||||
else:
|
||||
raise StopIteration
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
@wrapt.decorator
|
||||
def yes_if_nothing_inferred(func, instance, args, kwargs):
|
||||
inferred = False
|
||||
for node in func(*args, **kwargs):
|
||||
inferred = True
|
||||
yield node
|
||||
if not inferred:
|
||||
yield util.Uninferable
|
||||
|
||||
|
||||
@wrapt.decorator
|
||||
def raise_if_nothing_inferred(func, instance, args, kwargs):
|
||||
'''All generators wrapped with raise_if_nothing_inferred *must*
|
||||
explicitly raise StopIteration with information to create an
|
||||
appropriate structured InferenceError.
|
||||
|
||||
'''
|
||||
# TODO: Explicitly raising StopIteration in a generator will cause
|
||||
# a RuntimeError in Python >=3.7, as per
|
||||
# http://legacy.python.org/dev/peps/pep-0479/ . Before 3.7 is
|
||||
# released, this code will need to use one of four possible
|
||||
# solutions: a decorator that restores the current behavior as
|
||||
# described in
|
||||
# http://legacy.python.org/dev/peps/pep-0479/#sub-proposal-decorator-to-explicitly-request-current-behaviour
|
||||
# , dynamic imports or exec to generate different code for
|
||||
# different versions, drop support for all Python versions <3.3,
|
||||
# or refactoring to change how these decorators work. In any
|
||||
# event, after dropping support for Python <3.3 this code should
|
||||
# be refactored to use `yield from`.
|
||||
inferred = False
|
||||
try:
|
||||
generator = func(*args, **kwargs)
|
||||
while True:
|
||||
yield next(generator)
|
||||
inferred = True
|
||||
except StopIteration as error:
|
||||
if not inferred:
|
||||
if error.args:
|
||||
# pylint: disable=not-a-mapping
|
||||
raise exceptions.InferenceError(**error.args[0])
|
||||
else:
|
||||
raise exceptions.InferenceError(
|
||||
'StopIteration raised without any error information.')
|
216
thesisenv/lib/python3.6/site-packages/astroid/exceptions.py
Normal file
216
thesisenv/lib/python3.6/site-packages/astroid/exceptions.py
Normal file
@ -0,0 +1,216 @@
|
||||
# Copyright (c) 2007, 2009-2010, 2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""this module contains exceptions used in the astroid library
|
||||
"""
|
||||
from astroid import util
|
||||
|
||||
|
||||
class AstroidError(Exception):
|
||||
"""base exception class for all astroid related exceptions
|
||||
|
||||
AstroidError and its subclasses are structured, intended to hold
|
||||
objects representing state when the exception is thrown. Field
|
||||
values are passed to the constructor as keyword-only arguments.
|
||||
Each subclass has its own set of standard fields, but use your
|
||||
best judgment to decide whether a specific exception instance
|
||||
needs more or fewer fields for debugging. Field values may be
|
||||
used to lazily generate the error message: self.message.format()
|
||||
will be called with the field names and values supplied as keyword
|
||||
arguments.
|
||||
"""
|
||||
def __init__(self, message='', **kws):
|
||||
super(AstroidError, self).__init__(message)
|
||||
self.message = message
|
||||
for key, value in kws.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
def __str__(self):
|
||||
return self.message.format(**vars(self))
|
||||
|
||||
|
||||
class AstroidBuildingError(AstroidError):
|
||||
"""exception class when we are unable to build an astroid representation
|
||||
|
||||
Standard attributes:
|
||||
modname: Name of the module that AST construction failed for.
|
||||
error: Exception raised during construction.
|
||||
"""
|
||||
|
||||
def __init__(self, message='Failed to import module {modname}.', **kws):
|
||||
super(AstroidBuildingError, self).__init__(message, **kws)
|
||||
|
||||
|
||||
class AstroidImportError(AstroidBuildingError):
|
||||
"""Exception class used when a module can't be imported by astroid."""
|
||||
|
||||
|
||||
class TooManyLevelsError(AstroidImportError):
|
||||
"""Exception class which is raised when a relative import was beyond the top-level.
|
||||
|
||||
Standard attributes:
|
||||
level: The level which was attempted.
|
||||
name: the name of the module on which the relative import was attempted.
|
||||
"""
|
||||
level = None
|
||||
name = None
|
||||
|
||||
def __init__(self, message='Relative import with too many levels '
|
||||
'({level}) for module {name!r}', **kws):
|
||||
super(TooManyLevelsError, self).__init__(message, **kws)
|
||||
|
||||
|
||||
class AstroidSyntaxError(AstroidBuildingError):
|
||||
"""Exception class used when a module can't be parsed."""
|
||||
|
||||
|
||||
class NoDefault(AstroidError):
|
||||
"""raised by function's `default_value` method when an argument has
|
||||
no default value
|
||||
|
||||
Standard attributes:
|
||||
func: Function node.
|
||||
name: Name of argument without a default.
|
||||
"""
|
||||
func = None
|
||||
name = None
|
||||
|
||||
def __init__(self, message='{func!r} has no default for {name!r}.', **kws):
|
||||
super(NoDefault, self).__init__(message, **kws)
|
||||
|
||||
|
||||
class ResolveError(AstroidError):
|
||||
"""Base class of astroid resolution/inference error.
|
||||
|
||||
ResolveError is not intended to be raised.
|
||||
|
||||
Standard attributes:
|
||||
context: InferenceContext object.
|
||||
"""
|
||||
context = None
|
||||
|
||||
|
||||
class MroError(ResolveError):
|
||||
"""Error raised when there is a problem with method resolution of a class.
|
||||
|
||||
Standard attributes:
|
||||
mros: A sequence of sequences containing ClassDef nodes.
|
||||
cls: ClassDef node whose MRO resolution failed.
|
||||
context: InferenceContext object.
|
||||
"""
|
||||
mros = ()
|
||||
cls = None
|
||||
|
||||
def __str__(self):
|
||||
mro_names = ", ".join("({})".format(", ".join(b.name for b in m))
|
||||
for m in self.mros)
|
||||
return self.message.format(mros=mro_names, cls=self.cls)
|
||||
|
||||
|
||||
class DuplicateBasesError(MroError):
|
||||
"""Error raised when there are duplicate bases in the same class bases."""
|
||||
|
||||
|
||||
class InconsistentMroError(MroError):
|
||||
"""Error raised when a class's MRO is inconsistent."""
|
||||
|
||||
|
||||
class SuperError(ResolveError):
|
||||
|
||||
"""Error raised when there is a problem with a super call.
|
||||
|
||||
Standard attributes:
|
||||
super_: The Super instance that raised the exception.
|
||||
context: InferenceContext object.
|
||||
"""
|
||||
super_ = None
|
||||
|
||||
def __str__(self):
|
||||
return self.message.format(**vars(self.super_))
|
||||
|
||||
|
||||
class InferenceError(ResolveError):
|
||||
"""raised when we are unable to infer a node
|
||||
|
||||
Standard attributes:
|
||||
node: The node inference was called on.
|
||||
context: InferenceContext object.
|
||||
"""
|
||||
node = None
|
||||
context = None
|
||||
|
||||
def __init__(self, message='Inference failed for {node!r}.', **kws):
|
||||
super(InferenceError, self).__init__(message, **kws)
|
||||
|
||||
|
||||
# Why does this inherit from InferenceError rather than ResolveError?
|
||||
# Changing it causes some inference tests to fail.
|
||||
class NameInferenceError(InferenceError):
|
||||
"""Raised when a name lookup fails, corresponds to NameError.
|
||||
|
||||
Standard attributes:
|
||||
name: The name for which lookup failed, as a string.
|
||||
scope: The node representing the scope in which the lookup occurred.
|
||||
context: InferenceContext object.
|
||||
"""
|
||||
name = None
|
||||
scope = None
|
||||
|
||||
def __init__(self, message='{name!r} not found in {scope!r}.', **kws):
|
||||
super(NameInferenceError, self).__init__(message, **kws)
|
||||
|
||||
|
||||
class AttributeInferenceError(ResolveError):
|
||||
"""Raised when an attribute lookup fails, corresponds to AttributeError.
|
||||
|
||||
Standard attributes:
|
||||
target: The node for which lookup failed.
|
||||
attribute: The attribute for which lookup failed, as a string.
|
||||
context: InferenceContext object.
|
||||
"""
|
||||
target = None
|
||||
attribute = None
|
||||
|
||||
def __init__(self, message='{attribute!r} not found on {target!r}.', **kws):
|
||||
super(AttributeInferenceError, self).__init__(message, **kws)
|
||||
|
||||
|
||||
class UseInferenceDefault(Exception):
|
||||
"""exception to be raised in custom inference function to indicate that it
|
||||
should go back to the default behaviour
|
||||
"""
|
||||
|
||||
|
||||
class _NonDeducibleTypeHierarchy(Exception):
|
||||
"""Raised when is_subtype / is_supertype can't deduce the relation between two types."""
|
||||
|
||||
|
||||
class AstroidIndexError(AstroidError):
|
||||
"""Raised when an Indexable / Mapping does not have an index / key."""
|
||||
|
||||
|
||||
class AstroidTypeError(AstroidError):
|
||||
"""Raised when a TypeError would be expected in Python code."""
|
||||
|
||||
|
||||
class InferenceOverwriteError(AstroidError):
|
||||
"""Raised when an inference tip is overwritten
|
||||
|
||||
Currently only used for debugging.
|
||||
"""
|
||||
|
||||
|
||||
# Backwards-compatibility aliases
|
||||
OperationError = util.BadOperationMessage
|
||||
UnaryOperationError = util.BadUnaryOperationMessage
|
||||
BinaryOperationError = util.BadBinaryOperationMessage
|
||||
|
||||
SuperArgumentTypeError = SuperError
|
||||
UnresolvableName = NameInferenceError
|
||||
NotFoundError = AttributeInferenceError
|
||||
AstroidBuildingException = AstroidBuildingError
|
174
thesisenv/lib/python3.6/site-packages/astroid/helpers.py
Normal file
174
thesisenv/lib/python3.6/site-packages/astroid/helpers.py
Normal file
@ -0,0 +1,174 @@
|
||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
|
||||
"""
|
||||
Various helper utilities.
|
||||
"""
|
||||
|
||||
import six
|
||||
|
||||
from astroid import bases
|
||||
from astroid import context as contextmod
|
||||
from astroid import exceptions
|
||||
from astroid import manager
|
||||
from astroid import nodes
|
||||
from astroid import raw_building
|
||||
from astroid import scoped_nodes
|
||||
from astroid import util
|
||||
|
||||
|
||||
BUILTINS = six.moves.builtins.__name__
|
||||
|
||||
|
||||
def _build_proxy_class(cls_name, builtins):
|
||||
proxy = raw_building.build_class(cls_name)
|
||||
proxy.parent = builtins
|
||||
return proxy
|
||||
|
||||
|
||||
def _function_type(function, builtins):
|
||||
if isinstance(function, scoped_nodes.Lambda):
|
||||
if function.root().name == BUILTINS:
|
||||
cls_name = 'builtin_function_or_method'
|
||||
else:
|
||||
cls_name = 'function'
|
||||
elif isinstance(function, bases.BoundMethod):
|
||||
if six.PY2:
|
||||
cls_name = 'instancemethod'
|
||||
else:
|
||||
cls_name = 'method'
|
||||
elif isinstance(function, bases.UnboundMethod):
|
||||
if six.PY2:
|
||||
cls_name = 'instancemethod'
|
||||
else:
|
||||
cls_name = 'function'
|
||||
return _build_proxy_class(cls_name, builtins)
|
||||
|
||||
|
||||
def _object_type(node, context=None):
|
||||
astroid_manager = manager.AstroidManager()
|
||||
builtins = astroid_manager.astroid_cache[BUILTINS]
|
||||
context = context or contextmod.InferenceContext()
|
||||
|
||||
for inferred in node.infer(context=context):
|
||||
if isinstance(inferred, scoped_nodes.ClassDef):
|
||||
if inferred.newstyle:
|
||||
metaclass = inferred.metaclass()
|
||||
if metaclass:
|
||||
yield metaclass
|
||||
continue
|
||||
yield builtins.getattr('type')[0]
|
||||
elif isinstance(inferred, (scoped_nodes.Lambda, bases.UnboundMethod)):
|
||||
yield _function_type(inferred, builtins)
|
||||
elif isinstance(inferred, scoped_nodes.Module):
|
||||
yield _build_proxy_class('module', builtins)
|
||||
else:
|
||||
yield inferred._proxied
|
||||
|
||||
|
||||
def object_type(node, context=None):
|
||||
"""Obtain the type of the given node
|
||||
|
||||
This is used to implement the ``type`` builtin, which means that it's
|
||||
used for inferring type calls, as well as used in a couple of other places
|
||||
in the inference.
|
||||
The node will be inferred first, so this function can support all
|
||||
sorts of objects, as long as they support inference.
|
||||
"""
|
||||
|
||||
try:
|
||||
types = set(_object_type(node, context))
|
||||
except exceptions.InferenceError:
|
||||
return util.Uninferable
|
||||
if len(types) > 1 or not types:
|
||||
return util.Uninferable
|
||||
return list(types)[0]
|
||||
|
||||
|
||||
def safe_infer(node, context=None):
|
||||
"""Return the inferred value for the given node.
|
||||
|
||||
Return None if inference failed or if there is some ambiguity (more than
|
||||
one node has been inferred).
|
||||
"""
|
||||
try:
|
||||
inferit = node.infer(context=context)
|
||||
value = next(inferit)
|
||||
except exceptions.InferenceError:
|
||||
return None
|
||||
try:
|
||||
next(inferit)
|
||||
return None # None if there is ambiguity on the inferred node
|
||||
except exceptions.InferenceError:
|
||||
return None# there is some kind of ambiguity
|
||||
except StopIteration:
|
||||
return value
|
||||
|
||||
|
||||
def has_known_bases(klass, context=None):
|
||||
"""Return true if all base classes of a class could be inferred."""
|
||||
try:
|
||||
return klass._all_bases_known
|
||||
except AttributeError:
|
||||
pass
|
||||
for base in klass.bases:
|
||||
result = safe_infer(base, context=context)
|
||||
# TODO: check for A->B->A->B pattern in class structure too?
|
||||
if (not isinstance(result, scoped_nodes.ClassDef) or
|
||||
result is klass or
|
||||
not has_known_bases(result, context=context)):
|
||||
klass._all_bases_known = False
|
||||
return False
|
||||
klass._all_bases_known = True
|
||||
return True
|
||||
|
||||
|
||||
def _type_check(type1, type2):
|
||||
if not all(map(has_known_bases, (type1, type2))):
|
||||
raise exceptions._NonDeducibleTypeHierarchy
|
||||
|
||||
if not all([type1.newstyle, type2.newstyle]):
|
||||
return False
|
||||
try:
|
||||
return type1 in type2.mro()[:-1]
|
||||
except exceptions.MroError:
|
||||
# The MRO is invalid.
|
||||
raise exceptions._NonDeducibleTypeHierarchy
|
||||
|
||||
|
||||
def is_subtype(type1, type2):
|
||||
"""Check if *type1* is a subtype of *typ2*."""
|
||||
return _type_check(type2, type1)
|
||||
|
||||
|
||||
def is_supertype(type1, type2):
|
||||
"""Check if *type2* is a supertype of *type1*."""
|
||||
return _type_check(type1, type2)
|
||||
|
||||
|
||||
def class_instance_as_index(node):
|
||||
"""Get the value as an index for the given instance.
|
||||
|
||||
If an instance provides an __index__ method, then it can
|
||||
be used in some scenarios where an integer is expected,
|
||||
for instance when multiplying or subscripting a list.
|
||||
"""
|
||||
context = contextmod.InferenceContext()
|
||||
context.callcontext = contextmod.CallContext(args=[node])
|
||||
|
||||
try:
|
||||
for inferred in node.igetattr('__index__', context=context):
|
||||
if not isinstance(inferred, bases.BoundMethod):
|
||||
continue
|
||||
|
||||
for result in inferred.infer_call_result(node, context=context):
|
||||
if (isinstance(result, nodes.Const)
|
||||
and isinstance(result.value, int)):
|
||||
return result
|
||||
except exceptions.InferenceError:
|
||||
pass
|
||||
return None
|
812
thesisenv/lib/python3.6/site-packages/astroid/inference.py
Normal file
812
thesisenv/lib/python3.6/site-packages/astroid/inference.py
Normal file
@ -0,0 +1,812 @@
|
||||
# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2014-2016 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
|
||||
|
||||
"""this module contains a set of functions to handle inference on astroid trees
|
||||
"""
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
import operator
|
||||
|
||||
from astroid import bases
|
||||
from astroid import context as contextmod
|
||||
from astroid import exceptions
|
||||
from astroid import decorators
|
||||
from astroid import helpers
|
||||
from astroid import manager
|
||||
from astroid import nodes
|
||||
from astroid.interpreter import dunder_lookup
|
||||
from astroid import protocols
|
||||
from astroid import util
|
||||
|
||||
|
||||
MANAGER = manager.AstroidManager()
|
||||
|
||||
|
||||
# .infer method ###############################################################
|
||||
|
||||
|
||||
def infer_end(self, context=None):
|
||||
"""inference's end for node such as Module, ClassDef, FunctionDef,
|
||||
Const...
|
||||
|
||||
"""
|
||||
yield self
|
||||
nodes.Module._infer = infer_end
|
||||
nodes.ClassDef._infer = infer_end
|
||||
nodes.FunctionDef._infer = infer_end
|
||||
nodes.Lambda._infer = infer_end
|
||||
nodes.Const._infer = infer_end
|
||||
nodes.Slice._infer = infer_end
|
||||
|
||||
|
||||
def infer_seq(self, context=None):
|
||||
if not any(isinstance(e, nodes.Starred) for e in self.elts):
|
||||
yield self
|
||||
else:
|
||||
values = _infer_seq(self, context)
|
||||
new_seq = type(self)(self.lineno, self.col_offset, self.parent)
|
||||
new_seq.postinit(values)
|
||||
yield new_seq
|
||||
|
||||
|
||||
def _infer_seq(node, context=None):
|
||||
"""Infer all values based on _BaseContainer.elts"""
|
||||
values = []
|
||||
|
||||
for elt in node.elts:
|
||||
if isinstance(elt, nodes.Starred):
|
||||
starred = helpers.safe_infer(elt.value, context)
|
||||
if starred in (None, util.Uninferable):
|
||||
raise exceptions.InferenceError(node=node,
|
||||
context=context)
|
||||
if not hasattr(starred, 'elts'):
|
||||
raise exceptions.InferenceError(node=node,
|
||||
context=context)
|
||||
values.extend(_infer_seq(starred))
|
||||
else:
|
||||
values.append(elt)
|
||||
return values
|
||||
|
||||
|
||||
nodes.List._infer = infer_seq
|
||||
nodes.Tuple._infer = infer_seq
|
||||
nodes.Set._infer = infer_seq
|
||||
|
||||
|
||||
def infer_map(self, context=None):
|
||||
if not any(isinstance(k, nodes.DictUnpack) for k, _ in self.items):
|
||||
yield self
|
||||
else:
|
||||
items = _infer_map(self, context)
|
||||
new_seq = type(self)(self.lineno, self.col_offset, self.parent)
|
||||
new_seq.postinit(list(items.items()))
|
||||
yield new_seq
|
||||
|
||||
|
||||
def _infer_map(node, context):
|
||||
"""Infer all values based on Dict.items"""
|
||||
values = {}
|
||||
for name, value in node.items:
|
||||
if isinstance(name, nodes.DictUnpack):
|
||||
double_starred = helpers.safe_infer(value, context)
|
||||
if double_starred in (None, util.Uninferable):
|
||||
raise exceptions.InferenceError
|
||||
if not isinstance(double_starred, nodes.Dict):
|
||||
raise exceptions.InferenceError(node=node,
|
||||
context=context)
|
||||
values.update(_infer_map(double_starred, context))
|
||||
else:
|
||||
key = helpers.safe_infer(name, context=context)
|
||||
value = helpers.safe_infer(value, context=context)
|
||||
if any(elem in (None, util.Uninferable) for elem in (key, value)):
|
||||
raise exceptions.InferenceError(node=node,
|
||||
context=context)
|
||||
values[key] = value
|
||||
return values
|
||||
|
||||
|
||||
nodes.Dict._infer = infer_map
|
||||
|
||||
|
||||
def _higher_function_scope(node):
|
||||
""" Search for the first function which encloses the given
|
||||
scope. This can be used for looking up in that function's
|
||||
scope, in case looking up in a lower scope for a particular
|
||||
name fails.
|
||||
|
||||
:param node: A scope node.
|
||||
:returns:
|
||||
``None``, if no parent function scope was found,
|
||||
otherwise an instance of :class:`astroid.scoped_nodes.Function`,
|
||||
which encloses the given node.
|
||||
"""
|
||||
current = node
|
||||
while current.parent and not isinstance(current.parent, nodes.FunctionDef):
|
||||
current = current.parent
|
||||
if current and current.parent:
|
||||
return current.parent
|
||||
return None
|
||||
|
||||
def infer_name(self, context=None):
|
||||
"""infer a Name: use name lookup rules"""
|
||||
frame, stmts = self.lookup(self.name)
|
||||
if not stmts:
|
||||
# Try to see if the name is enclosed in a nested function
|
||||
# and use the higher (first function) scope for searching.
|
||||
# TODO: should this be promoted to other nodes as well?
|
||||
parent_function = _higher_function_scope(self.scope())
|
||||
if parent_function:
|
||||
_, stmts = parent_function.lookup(self.name)
|
||||
|
||||
if not stmts:
|
||||
raise exceptions.NameInferenceError(name=self.name,
|
||||
scope=self.scope(),
|
||||
context=context)
|
||||
context = context.clone()
|
||||
context.lookupname = self.name
|
||||
return bases._infer_stmts(stmts, context, frame)
|
||||
nodes.Name._infer = decorators.path_wrapper(infer_name)
|
||||
nodes.AssignName.infer_lhs = infer_name # won't work with a path wrapper
|
||||
|
||||
|
||||
@decorators.raise_if_nothing_inferred
|
||||
@decorators.path_wrapper
|
||||
def infer_call(self, context=None):
|
||||
"""infer a Call node by trying to guess what the function returns"""
|
||||
callcontext = context.clone()
|
||||
callcontext.callcontext = contextmod.CallContext(args=self.args,
|
||||
keywords=self.keywords)
|
||||
callcontext.boundnode = None
|
||||
for callee in self.func.infer(context):
|
||||
if callee is util.Uninferable:
|
||||
yield callee
|
||||
continue
|
||||
try:
|
||||
if hasattr(callee, 'infer_call_result'):
|
||||
for inferred in callee.infer_call_result(self, callcontext):
|
||||
yield inferred
|
||||
except exceptions.InferenceError:
|
||||
## XXX log error ?
|
||||
continue
|
||||
# Explicit StopIteration to return error information, see comment
|
||||
# in raise_if_nothing_inferred.
|
||||
raise StopIteration(dict(node=self, context=context))
|
||||
nodes.Call._infer = infer_call
|
||||
|
||||
|
||||
@decorators.path_wrapper
|
||||
def infer_import(self, context=None, asname=True):
|
||||
"""infer an Import node: return the imported module/object"""
|
||||
name = context.lookupname
|
||||
if name is None:
|
||||
raise exceptions.InferenceError(node=self, context=context)
|
||||
|
||||
try:
|
||||
if asname:
|
||||
yield self.do_import_module(self.real_name(name))
|
||||
else:
|
||||
yield self.do_import_module(name)
|
||||
except exceptions.AstroidBuildingError as exc:
|
||||
util.reraise(exceptions.InferenceError(node=self, error=exc,
|
||||
context=context))
|
||||
|
||||
nodes.Import._infer = infer_import
|
||||
|
||||
|
||||
def infer_name_module(self, name):
|
||||
context = contextmod.InferenceContext()
|
||||
context.lookupname = name
|
||||
return self.infer(context, asname=False)
|
||||
nodes.Import.infer_name_module = infer_name_module
|
||||
|
||||
|
||||
@decorators.path_wrapper
|
||||
def infer_import_from(self, context=None, asname=True):
|
||||
"""infer a ImportFrom node: return the imported module/object"""
|
||||
name = context.lookupname
|
||||
if name is None:
|
||||
raise exceptions.InferenceError(node=self, context=context)
|
||||
if asname:
|
||||
name = self.real_name(name)
|
||||
|
||||
try:
|
||||
module = self.do_import_module()
|
||||
except exceptions.AstroidBuildingError as exc:
|
||||
util.reraise(exceptions.InferenceError(node=self, error=exc,
|
||||
context=context))
|
||||
|
||||
try:
|
||||
context = contextmod.copy_context(context)
|
||||
context.lookupname = name
|
||||
stmts = module.getattr(name, ignore_locals=module is self.root())
|
||||
return bases._infer_stmts(stmts, context)
|
||||
except exceptions.AttributeInferenceError as error:
|
||||
util.reraise(exceptions.InferenceError(
|
||||
error.message, target=self, attribute=name, context=context))
|
||||
nodes.ImportFrom._infer = infer_import_from
|
||||
|
||||
|
||||
@decorators.raise_if_nothing_inferred
|
||||
def infer_attribute(self, context=None):
|
||||
"""infer an Attribute node by using getattr on the associated object"""
|
||||
for owner in self.expr.infer(context):
|
||||
if owner is util.Uninferable:
|
||||
yield owner
|
||||
continue
|
||||
|
||||
if context and context.boundnode:
|
||||
# This handles the situation where the attribute is accessed through a subclass
|
||||
# of a base class and the attribute is defined at the base class's level,
|
||||
# by taking in consideration a redefinition in the subclass.
|
||||
if (isinstance(owner, bases.Instance)
|
||||
and isinstance(context.boundnode, bases.Instance)):
|
||||
try:
|
||||
if helpers.is_subtype(helpers.object_type(context.boundnode),
|
||||
helpers.object_type(owner)):
|
||||
owner = context.boundnode
|
||||
except exceptions._NonDeducibleTypeHierarchy:
|
||||
# Can't determine anything useful.
|
||||
pass
|
||||
|
||||
try:
|
||||
context.boundnode = owner
|
||||
for obj in owner.igetattr(self.attrname, context):
|
||||
yield obj
|
||||
context.boundnode = None
|
||||
except (exceptions.AttributeInferenceError, exceptions.InferenceError):
|
||||
context.boundnode = None
|
||||
except AttributeError:
|
||||
# XXX method / function
|
||||
context.boundnode = None
|
||||
# Explicit StopIteration to return error information, see comment
|
||||
# in raise_if_nothing_inferred.
|
||||
raise StopIteration(dict(node=self, context=context))
|
||||
nodes.Attribute._infer = decorators.path_wrapper(infer_attribute)
|
||||
nodes.AssignAttr.infer_lhs = infer_attribute # # won't work with a path wrapper
|
||||
|
||||
|
||||
@decorators.path_wrapper
|
||||
def infer_global(self, context=None):
|
||||
if context.lookupname is None:
|
||||
raise exceptions.InferenceError(node=self, context=context)
|
||||
try:
|
||||
return bases._infer_stmts(self.root().getattr(context.lookupname),
|
||||
context)
|
||||
except exceptions.AttributeInferenceError as error:
|
||||
util.reraise(exceptions.InferenceError(
|
||||
error.message, target=self, attribute=context.lookupname,
|
||||
context=context))
|
||||
nodes.Global._infer = infer_global
|
||||
|
||||
|
||||
_SUBSCRIPT_SENTINEL = object()
|
||||
|
||||
|
||||
@decorators.raise_if_nothing_inferred
|
||||
def infer_subscript(self, context=None):
|
||||
"""Inference for subscripts
|
||||
|
||||
We're understanding if the index is a Const
|
||||
or a slice, passing the result of inference
|
||||
to the value's `getitem` method, which should
|
||||
handle each supported index type accordingly.
|
||||
"""
|
||||
|
||||
value = next(self.value.infer(context))
|
||||
if value is util.Uninferable:
|
||||
yield util.Uninferable
|
||||
return
|
||||
|
||||
index = next(self.slice.infer(context))
|
||||
if index is util.Uninferable:
|
||||
yield util.Uninferable
|
||||
return
|
||||
|
||||
# Try to deduce the index value.
|
||||
index_value = _SUBSCRIPT_SENTINEL
|
||||
if value.__class__ == bases.Instance:
|
||||
index_value = index
|
||||
else:
|
||||
if index.__class__ == bases.Instance:
|
||||
instance_as_index = helpers.class_instance_as_index(index)
|
||||
if instance_as_index:
|
||||
index_value = instance_as_index
|
||||
else:
|
||||
index_value = index
|
||||
if index_value is _SUBSCRIPT_SENTINEL:
|
||||
raise exceptions.InferenceError(node=self, context=context)
|
||||
|
||||
try:
|
||||
assigned = value.getitem(index_value, context)
|
||||
except (exceptions.AstroidTypeError,
|
||||
exceptions.AstroidIndexError,
|
||||
exceptions.AttributeInferenceError,
|
||||
AttributeError) as exc:
|
||||
util.reraise(exceptions.InferenceError(node=self, error=exc,
|
||||
context=context))
|
||||
|
||||
# Prevent inferring if the inferred subscript
|
||||
# is the same as the original subscripted object.
|
||||
if self is assigned or assigned is util.Uninferable:
|
||||
yield util.Uninferable
|
||||
return
|
||||
for inferred in assigned.infer(context):
|
||||
yield inferred
|
||||
|
||||
# Explicit StopIteration to return error information, see comment
|
||||
# in raise_if_nothing_inferred.
|
||||
raise StopIteration(dict(node=self, context=context))
|
||||
|
||||
nodes.Subscript._infer = decorators.path_wrapper(infer_subscript)
|
||||
nodes.Subscript.infer_lhs = infer_subscript
|
||||
|
||||
|
||||
@decorators.raise_if_nothing_inferred
|
||||
@decorators.path_wrapper
|
||||
def _infer_boolop(self, context=None):
|
||||
"""Infer a boolean operation (and / or / not).
|
||||
|
||||
The function will calculate the boolean operation
|
||||
for all pairs generated through inference for each component
|
||||
node.
|
||||
"""
|
||||
values = self.values
|
||||
if self.op == 'or':
|
||||
predicate = operator.truth
|
||||
else:
|
||||
predicate = operator.not_
|
||||
|
||||
try:
|
||||
values = [value.infer(context=context) for value in values]
|
||||
except exceptions.InferenceError:
|
||||
yield util.Uninferable
|
||||
return
|
||||
|
||||
for pair in itertools.product(*values):
|
||||
if any(item is util.Uninferable for item in pair):
|
||||
# Can't infer the final result, just yield Uninferable.
|
||||
yield util.Uninferable
|
||||
continue
|
||||
|
||||
bool_values = [item.bool_value() for item in pair]
|
||||
if any(item is util.Uninferable for item in bool_values):
|
||||
# Can't infer the final result, just yield Uninferable.
|
||||
yield util.Uninferable
|
||||
continue
|
||||
|
||||
# Since the boolean operations are short circuited operations,
|
||||
# this code yields the first value for which the predicate is True
|
||||
# and if no value respected the predicate, then the last value will
|
||||
# be returned (or Uninferable if there was no last value).
|
||||
# This is conforming to the semantics of `and` and `or`:
|
||||
# 1 and 0 -> 1
|
||||
# 0 and 1 -> 0
|
||||
# 1 or 0 -> 1
|
||||
# 0 or 1 -> 1
|
||||
value = util.Uninferable
|
||||
for value, bool_value in zip(pair, bool_values):
|
||||
if predicate(bool_value):
|
||||
yield value
|
||||
break
|
||||
else:
|
||||
yield value
|
||||
|
||||
# Explicit StopIteration to return error information, see comment
|
||||
# in raise_if_nothing_inferred.
|
||||
raise StopIteration(dict(node=self, context=context))
|
||||
|
||||
nodes.BoolOp._infer = _infer_boolop
|
||||
|
||||
|
||||
# UnaryOp, BinOp and AugAssign inferences
|
||||
|
||||
def _filter_operation_errors(self, infer_callable, context, error):
|
||||
for result in infer_callable(self, context):
|
||||
if isinstance(result, error):
|
||||
# For the sake of .infer(), we don't care about operation
|
||||
# errors, which is the job of pylint. So return something
|
||||
# which shows that we can't infer the result.
|
||||
yield util.Uninferable
|
||||
else:
|
||||
yield result
|
||||
|
||||
|
||||
def _infer_unaryop(self, context=None):
|
||||
"""Infer what an UnaryOp should return when evaluated."""
|
||||
for operand in self.operand.infer(context):
|
||||
try:
|
||||
yield operand.infer_unary_op(self.op)
|
||||
except TypeError as exc:
|
||||
# The operand doesn't support this operation.
|
||||
yield util.BadUnaryOperationMessage(operand, self.op, exc)
|
||||
except AttributeError as exc:
|
||||
meth = protocols.UNARY_OP_METHOD[self.op]
|
||||
if meth is None:
|
||||
# `not node`. Determine node's boolean
|
||||
# value and negate its result, unless it is
|
||||
# Uninferable, which will be returned as is.
|
||||
bool_value = operand.bool_value()
|
||||
if bool_value is not util.Uninferable:
|
||||
yield nodes.const_factory(not bool_value)
|
||||
else:
|
||||
yield util.Uninferable
|
||||
else:
|
||||
if not isinstance(operand, (bases.Instance, nodes.ClassDef)):
|
||||
# The operation was used on something which
|
||||
# doesn't support it.
|
||||
yield util.BadUnaryOperationMessage(operand, self.op, exc)
|
||||
continue
|
||||
|
||||
try:
|
||||
try:
|
||||
methods = dunder_lookup.lookup(operand, meth)
|
||||
except exceptions.AttributeInferenceError:
|
||||
yield util.BadUnaryOperationMessage(operand, self.op, exc)
|
||||
continue
|
||||
|
||||
meth = methods[0]
|
||||
inferred = next(meth.infer(context=context))
|
||||
if inferred is util.Uninferable or not inferred.callable():
|
||||
continue
|
||||
|
||||
context = contextmod.copy_context(context)
|
||||
context.callcontext = contextmod.CallContext(args=[operand])
|
||||
call_results = inferred.infer_call_result(self, context=context)
|
||||
result = next(call_results, None)
|
||||
if result is None:
|
||||
# Failed to infer, return the same type.
|
||||
yield operand
|
||||
else:
|
||||
yield result
|
||||
except exceptions.AttributeInferenceError as exc:
|
||||
# The unary operation special method was not found.
|
||||
yield util.BadUnaryOperationMessage(operand, self.op, exc)
|
||||
except exceptions.InferenceError:
|
||||
yield util.Uninferable
|
||||
|
||||
|
||||
@decorators.raise_if_nothing_inferred
|
||||
@decorators.path_wrapper
|
||||
def infer_unaryop(self, context=None):
|
||||
"""Infer what an UnaryOp should return when evaluated."""
|
||||
for inferred in _filter_operation_errors(self, _infer_unaryop, context,
|
||||
util.BadUnaryOperationMessage):
|
||||
yield inferred
|
||||
# Explicit StopIteration to return error information, see comment
|
||||
# in raise_if_nothing_inferred.
|
||||
raise StopIteration(dict(node=self, context=context))
|
||||
|
||||
nodes.UnaryOp._infer_unaryop = _infer_unaryop
|
||||
nodes.UnaryOp._infer = infer_unaryop
|
||||
|
||||
|
||||
def _is_not_implemented(const):
|
||||
"""Check if the given const node is NotImplemented."""
|
||||
return isinstance(const, nodes.Const) and const.value is NotImplemented
|
||||
|
||||
|
||||
def _invoke_binop_inference(instance, opnode, op, other, context, method_name):
|
||||
"""Invoke binary operation inference on the given instance."""
|
||||
methods = dunder_lookup.lookup(instance, method_name)
|
||||
method = methods[0]
|
||||
inferred = next(method.infer(context=context))
|
||||
if inferred is util.Uninferable:
|
||||
raise exceptions.InferenceError
|
||||
return instance.infer_binary_op(opnode, op, o |