Browse Source

updated pyton versions and wrote hilfsbibliotheken

master
Esther Kleinhenz 5 years ago
parent
commit
a577e47acc
100 changed files with 13143 additions and 566 deletions
  1. 34
    7
      Dockerfile
  2. 2
    2
      application/forms.py
  3. 2
    12
      application/templates/student_page.html
  4. 9
    5
      application/views.py
  5. 0
    0
      busybox.tar
  6. 36
    0
      doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/AcknowledgmentsDedicationSentence/acknowledgements.aux
  7. 4
    1
      doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.aux
  8. 18
    0
      doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.bbl
  9. 41
    34
      doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.blg
  10. 3
    3
      doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.lof
  11. 160
    117
      doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.log
  12. 20
    16
      doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.out
  13. BIN
      doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.synctex.gz
  14. 8
    4
      doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.toc
  15. 1
    1
      doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/ausblick.aux
  16. 1
    1
      doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/ergebnis.aux
  17. 1
    1
      doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/fazit.aux
  18. 12
    8
      doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/framework.aux
  19. 2
    2
      doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/prototyp.aux
  20. BIN
      doc/bachelorarbeit_EstherKleinhenz/bachelorabeit_EstherKleinhenz.pdf
  21. 4
    1
      doc/bachelorarbeit_EstherKleinhenz/chapters/einleitung.tex
  22. 13
    0
      doc/bachelorarbeit_EstherKleinhenz/chapters/ergebnis.tex
  23. 34
    37
      doc/bachelorarbeit_EstherKleinhenz/chapters/framework.tex
  24. 1
    1
      doc/bachelorarbeit_EstherKleinhenz/chapters/prototyp.tex
  25. BIN
      doc/bachelorarbeit_EstherKleinhenz/figures/decorator_example(unused).png
  26. 31
    0
      doc/bachelorarbeit_EstherKleinhenz/references/References_2.bib
  27. 6
    9
      doc/bachelorarbeit_EstherKleinhenz/titlepage/titlepage.tex
  28. 93
    0
      log.txt
  29. 24
    20
      requirements.txt
  30. 4
    0
      start.sh
  31. 11
    0
      thesisenv/bin/gunicorn
  32. 11
    0
      thesisenv/bin/gunicorn_django
  33. 11
    0
      thesisenv/bin/gunicorn_paster
  34. 59
    0
      thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/DESCRIPTION.rst
  35. 0
    0
      thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/INSTALLER
  36. 90
    0
      thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/METADATA
  37. 99
    0
      thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/RECORD
  38. 6
    0
      thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/WHEEL
  39. 9
    0
      thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/entry_points.txt
  40. 1
    0
      thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/metadata.json
  41. 1
    0
      thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/top_level.txt
  42. 8
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/__init__.py
  43. 264
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/_compat.py
  44. 4
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/app/__init__.py
  45. 192
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/app/base.py
  46. 120
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/app/django_wsgi.py
  47. 160
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/app/djangoapp.py
  48. 210
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/app/pasterapp.py
  49. 78
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/app/wsgiapp.py
  50. 620
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/arbiter.py
  51. 2362
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/argparse_compat.py
  52. 1749
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/config.py
  53. 70
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/debug.py
  54. 23
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/errors.py
  55. 452
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/glogging.py
  56. 9
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/http/__init__.py
  57. 68
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/http/_sendfile.py
  58. 259
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/http/body.py
  59. 109
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/http/errors.py
  60. 343
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/http/message.py
  61. 51
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/http/parser.py
  62. 80
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/http/unreader.py
  63. 420
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/http/wsgi.py
  64. 0
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/instrument/__init__.py
  65. 124
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/instrument/statsd.py
  66. 0
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/management/__init__.py
  67. 0
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/management/commands/__init__.py
  68. 113
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/management/commands/run_gunicorn.py
  69. 84
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/pidfile.py
  70. 53
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/reloader.py
  71. 592
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/selectors.py
  72. 762
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/six.py
  73. 233
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/sock.py
  74. 548
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/util.py
  75. 22
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/workers/__init__.py
  76. 168
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/workers/_gaiohttp.py
  77. 143
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/workers/async.py
  78. 257
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/workers/base.py
  79. 17
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/workers/gaiohttp.py
  80. 135
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/workers/geventlet.py
  81. 233
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/workers/ggevent.py
  82. 371
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/workers/gthread.py
  83. 130
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/workers/gtornado.py
  84. 208
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/workers/sync.py
  85. 56
    0
      thesisenv/lib/python3.6/site-packages/gunicorn/workers/workertmp.py
  86. 0
    8
      thesisenv/lib/python3.6/site-packages/hitcount/models.py
  87. 72
    37
      thesisenv/lib/python3.6/site-packages/pkg_resources/__init__.py
  88. 71
    15
      thesisenv/lib/python3.6/site-packages/pkg_resources/_vendor/appdirs.py
  89. 71
    25
      thesisenv/lib/python3.6/site-packages/pkg_resources/_vendor/pyparsing.py
  90. 1
    1
      thesisenv/lib/python3.6/site-packages/pkg_resources/extern/__init__.py
  91. 3
    2
      thesisenv/lib/python3.6/site-packages/pkg_resources/py31compat.py
  92. 0
    36
      thesisenv/lib/python3.6/site-packages/setuptools-39.0.1.dist-info/DESCRIPTION.rst
  93. 0
    1
      thesisenv/lib/python3.6/site-packages/setuptools-39.0.1.dist-info/metadata.json
  94. 1
    0
      thesisenv/lib/python3.6/site-packages/setuptools-40.5.0.dist-info/INSTALLER
  95. 0
    0
      thesisenv/lib/python3.6/site-packages/setuptools-40.5.0.dist-info/LICENSE
  96. 15
    10
      thesisenv/lib/python3.6/site-packages/setuptools-40.5.0.dist-info/METADATA
  97. 146
    148
      thesisenv/lib/python3.6/site-packages/setuptools-40.5.0.dist-info/RECORD
  98. 1
    1
      thesisenv/lib/python3.6/site-packages/setuptools-40.5.0.dist-info/WHEEL
  99. 0
    0
      thesisenv/lib/python3.6/site-packages/setuptools-40.5.0.dist-info/dependency_links.txt
  100. 0
    0
      thesisenv/lib/python3.6/site-packages/setuptools-40.5.0.dist-info/entry_points.txt

+ 34
- 7
Dockerfile View File

# The first instruction is what image we want to base our container on # The first instruction is what image we want to base our container on
# We Use an official Python runtime as a parent image # We Use an official Python runtime as a parent image
FROM python:3.6
#FROM python:3.6

# FROM directive instructing base image to build upon
FROM python:3.6.6


# The enviroment variable ensures that the python output is set straight # The enviroment variable ensures that the python output is set straight
# to the terminal with out buffering it first # to the terminal with out buffering it first
ENV PYTHONUNBUFFERED 1
#ENV PYTHONUNBUFFERED 1

# App directory
RUN mkdir -p /usr/src/app
WORKDIR /usr/src/app

# Install azure event hub client dependencies
#COPY p /usr/src/app/

# Bundle app source
COPY . /usr/src/app

# COPY startup script into known file location in container
COPY start.sh /start.sh


# create root directory for our project in the container # create root directory for our project in the container
RUN mkdir /esther_kleinhenz_ba
#RUN mkdir /esther_kleinhenz_ba

# EXPOSE port 8000 to allow communication to/from server
#EXPOSE 8000

RUN set -x \
&& buildDeps='curl gcc libc6-dev libsqlite3-dev libssl-dev make xz-utils zlib1g-dev'

# Install any needed packages specified in requirements.txt
RUN pip install -r requirements.txt


# Set the working directory to /esther_kleinhenz_ba # Set the working directory to /esther_kleinhenz_ba
WORKDIR /esther_kleinhenz_ba
#WORKDIR /esther_kleinhenz_ba

# CMD specifcies the command to execute to start the server running.
CMD ["/start.sh"]
# done!


# Copy the current directory contents into the container at /esther_kleinhenz_ba # Copy the current directory contents into the container at /esther_kleinhenz_ba
ADD . /esther_kleinhenz_ba/
#ADD . /esther_kleinhenz_ba/


# Install any needed packages specified in requirements.txt
RUN pip install -r requirements.txt

+ 2
- 2
application/forms.py View File

fields = ('title', 'text', 'published_date','tags') fields = ('title', 'text', 'published_date','tags')


class NewTagForm(forms.ModelForm): class NewTagForm(forms.ModelForm):
m_tags = TagField()
tags = TagField()
class Meta: class Meta:
model = CustomUser model = CustomUser
fields = ['m_tags']
fields = ['tags']

+ 2
- 12
application/templates/student_page.html View File

{% extends "base.html" %} {% block content %} {% load taggit_templatetags2_tags %} {% get_taglist as tags for 'application.post'%}
{% extends "base.html" %} {% block content %} {% load taggit_templatetags2_tags %}
{% get_taglist as tags for 'application.post'%}


<div id="">
<ul>
{% for tag in tags %}
<li>{{tag}}
<a class="btn btn-outline-dark" href="{% url 'tag_remove' tag.slug %}">
<span class="glyphicon glyphicon-remove">Remove</span>
</a>
</li>
{{ result }} {% endfor %}
</ul>
</div>
<div> <div>
<form class="post-form" method="post"> <form class="post-form" method="post">
{% csrf_token %} {{form.as_p}} {% csrf_token %} {{form.as_p}}

+ 9
- 5
application/views.py View File



@login_required @login_required
def tag_remove(request, slug=None): def tag_remove(request, slug=None):
log = logging.getLogger('mysite')
user_instance = get_object_or_404(CustomUser, user=request.user)
log.info(u)
tag = Tag.get_object_or_404(Tag, slug = slug)
log.info(tag)
if slug: if slug:
tag = get_object_or_404(Tag, slug=slug)
tag.delete()
user_instance.tags.remove(tag)
save_m2m() save_m2m()
return redirect('student_page')
return redirect('student_page')



@login_required @login_required
def student_page(request): def student_page(request):
log = logging.getLogger('mysite') log = logging.getLogger('mysite')
user_instance = get_object_or_404(CustomUser, user=request.user) user_instance = get_object_or_404(CustomUser, user=request.user)
log.info(user_instance)
if request.method == "POST": if request.method == "POST":
log.info('post method') log.info('post method')
form = NewTagForm(request.POST, instance=user_instance) form = NewTagForm(request.POST, instance=user_instance)
obj.save() obj.save()
tag_names = [tag.name for tag in Tag.objects.all()] tag_names = [tag.name for tag in Tag.objects.all()]
log.info(tag_names) log.info(tag_names)
m_tags = form.cleaned_data['m_tags']
m_tags = form.cleaned_data['tags']
m_tags = ' '.join(str(m_tags) for m_tags in m_tags) m_tags = ' '.join(str(m_tags) for m_tags in m_tags)
log.info(m_tags) log.info(m_tags)
if m_tags in tag_names: if m_tags in tag_names:

+ 0
- 0
busybox.tar View File


+ 36
- 0
doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/AcknowledgmentsDedicationSentence/acknowledgements.aux View File

\relax
\providecommand\hyper@newdestlabel[2]{}
\@writefile{toc}{\contentsline {chapter}{Acknowledgements}{iii}{chapter*.2}}
\@setckpt{AcknowledgmentsDedicationSentence/acknowledgements}{
\setcounter{page}{4}
\setcounter{equation}{0}
\setcounter{enumi}{0}
\setcounter{enumii}{0}
\setcounter{enumiii}{0}
\setcounter{enumiv}{0}
\setcounter{footnote}{0}
\setcounter{mpfootnote}{0}
\setcounter{part}{0}
\setcounter{chapter}{0}
\setcounter{section}{0}
\setcounter{subsection}{0}
\setcounter{subsubsection}{0}
\setcounter{paragraph}{0}
\setcounter{subparagraph}{0}
\setcounter{figure}{0}
\setcounter{table}{0}
\setcounter{float@type}{8}
\setcounter{parentequation}{0}
\setcounter{lstnumber}{1}
\setcounter{ContinuedFloat}{0}
\setcounter{subfigure}{0}
\setcounter{subtable}{0}
\setcounter{r@tfl@t}{0}
\setcounter{Item}{0}
\setcounter{Hfootnote}{0}
\setcounter{Hy@AnnotLevel}{0}
\setcounter{bookmark@seq@number}{2}
\setcounter{NAT@ctr}{0}
\setcounter{lstlisting}{0}
\setcounter{section@level}{0}
}

+ 4
- 1
doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.aux View File

\bibcite{King}{{Kin17}{}{{}}{{}}} \bibcite{King}{{Kin17}{}{{}}{{}}}
\bibcite{Leipner}{{Lei13}{}{{}}{{}}} \bibcite{Leipner}{{Lei13}{}{{}}{{}}}
\bibcite{Ndukwe}{{Ndu17}{}{{}}{{}}} \bibcite{Ndukwe}{{Ndu17}{}{{}}{{}}}
\bibcite{Ong}{{Ong18}{}{{}}{{}}}
\bibcite{Shabda}{{Sha09}{}{{}}{{}}}
\bibcite{Shelest}{{She09}{}{{}}{{}}} \bibcite{Shelest}{{She09}{}{{}}{{}}}
\providecommand\NAT@force@numbers{}\NAT@force@numbers
\@writefile{toc}{\contentsline {chapter}{Referenzen}{19}{chapter*.11}} \@writefile{toc}{\contentsline {chapter}{Referenzen}{19}{chapter*.11}}
\bibcite{Timm}{{Tim15}{}{{}}{{}}}
\providecommand\NAT@force@numbers{}\NAT@force@numbers

+ 18
- 0
doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.bbl View File

\newblock \newblock
https://medium.com/@nnennahacks/https-medium-com-nnennandukwe-python-is-the-back-end-programming-language-of-the-future-heres-why. https://medium.com/@nnennahacks/https-medium-com-nnennandukwe-python-is-the-back-end-programming-language-of-the-future-heres-why.


\bibitem[Ong18]{Ong}
Selwin Ong.
\newblock django-post\_office git repository.
\newblock 2018.
\newblock https://github.com/ui/django-post\_office/blob/master/AUTHORS.rst.

\bibitem[Sha09]{Shabda}
Shabda.
\newblock Understanding decorators.
\newblock 2009.
\newblock https://www.agiliq.com/blog/2009/06/understanding-decorators/.

\bibitem[She09]{Shelest} \bibitem[She09]{Shelest}
Alexy Shelest. Alexy Shelest.
\newblock Model view controller, model view presenter, and model view viewmodel \newblock Model view controller, model view presenter, and model view viewmodel
\newblock \newblock
https://www.codeproject.com/Articles/42830/Model-View-Controller-Model-View-Presenter-and-Mod. https://www.codeproject.com/Articles/42830/Model-View-Controller-Model-View-Presenter-and-Mod.


\bibitem[Tim15]{Timm}
Damon Timm.
\newblock django-hitcount documentation.
\newblock 2015.
\newblock https://django-hitcount.readthedocs.io/en/latest/overview.html.

\end{thebibliography} \end{thebibliography}

+ 41
- 34
doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.blg View File

A level-1 auxiliary file: chapters/fazit.aux A level-1 auxiliary file: chapters/fazit.aux
The style file: alpha.bst The style file: alpha.bst
Database file #1: ../references/References_2.bib Database file #1: ../references/References_2.bib
Repeated entry---line 78 of file ../references/References_2.bib
: @article{Ong
: ,
I'm skipping whatever remains of this entry
Warning--empty journal in Dixit Warning--empty journal in Dixit
Warning--empty journal in Python Warning--empty journal in Python
Warning--empty journal in Gaynor Warning--empty journal in Gaynor
Warning--empty journal in King Warning--empty journal in King
Warning--empty journal in Leipner Warning--empty journal in Leipner
Warning--empty journal in Ndukwe Warning--empty journal in Ndukwe
Warning--empty journal in Ong
Warning--empty journal in Shabda
Warning--empty journal in Shelest Warning--empty journal in Shelest
You've used 8 entries,
Warning--empty journal in Timm
You've used 11 entries,
2543 wiz_defined-function locations, 2543 wiz_defined-function locations,
611 strings with 6085 characters,
and the built_in function-call counts, 2196 in all, are:
= -- 216
> -- 64
< -- 8
+ -- 16
- -- 16
* -- 106
:= -- 390
add.period$ -- 32
call.type$ -- 8
change.case$ -- 40
chr.to.int$ -- 8
cite$ -- 16
duplicate$ -- 112
empty$ -- 161
format.name$ -- 32
if$ -- 426
626 strings with 6410 characters,
and the built_in function-call counts, 3011 in all, are:
= -- 297
> -- 88
< -- 11
+ -- 22
- -- 22
* -- 145
:= -- 533
add.period$ -- 44
call.type$ -- 11
change.case$ -- 55
chr.to.int$ -- 11
cite$ -- 22
duplicate$ -- 154
empty$ -- 221
format.name$ -- 44
if$ -- 585
int.to.chr$ -- 1 int.to.chr$ -- 1
int.to.str$ -- 0 int.to.str$ -- 0
missing$ -- 8
newline$ -- 51
num.names$ -- 24
pop$ -- 48
missing$ -- 11
newline$ -- 69
num.names$ -- 33
pop$ -- 66
preamble$ -- 1 preamble$ -- 1
purify$ -- 48
purify$ -- 66
quote$ -- 0 quote$ -- 0
skip$ -- 88
skip$ -- 120
stack$ -- 0 stack$ -- 0
substring$ -- 56
substring$ -- 77
swap$ -- 0 swap$ -- 0
text.length$ -- 8
text.prefix$ -- 8
text.length$ -- 11
text.prefix$ -- 11
top$ -- 0 top$ -- 0
type$ -- 64
warning$ -- 8
while$ -- 16
width$ -- 10
write$ -- 106
(There were 8 warnings)
type$ -- 88
warning$ -- 11
while$ -- 22
width$ -- 14
write$ -- 145
(There was 1 error message)

+ 3
- 3
doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.lof View File

\babel@toc {german}{} \babel@toc {german}{}
\addvspace {10\p@ } \addvspace {10\p@ }
\addvspace {10\p@ } \addvspace {10\p@ }
\contentsline {figure}{\numberline {2.1}{\ignorespaces Vereinfachter MVP\relax }}{6}{figure.caption.5}
\contentsline {figure}{\numberline {2.2}{\ignorespaces Request-Response-Kreislauf des Django Frameworks\relax }}{7}{figure.caption.6}
\contentsline {figure}{\numberline {2.1}{\ignorespaces Vereinfachter MVP ([She09])\relax }}{6}{figure.caption.5}
\contentsline {figure}{\numberline {2.2}{\ignorespaces Request-Response-Kreislauf des Django Frameworks ([Nev15])\relax }}{7}{figure.caption.6}
\contentsline {figure}{\numberline {2.3}{\ignorespaces Erstellen der virtuelle Umgebung im Terminal\relax }}{8}{figure.caption.7} \contentsline {figure}{\numberline {2.3}{\ignorespaces Erstellen der virtuelle Umgebung im Terminal\relax }}{8}{figure.caption.7}
\contentsline {figure}{\numberline {2.4}{\ignorespaces Beispiel eines LDAP-Trees\relax }}{9}{figure.caption.8} \contentsline {figure}{\numberline {2.4}{\ignorespaces Beispiel eines LDAP-Trees\relax }}{9}{figure.caption.8}
\contentsline {figure}{\numberline {2.5}{\ignorespaces Einbindung von Bootstrap in einer HTML-Datei\relax }}{11}{figure.caption.9}
\contentsline {figure}{\numberline {2.5}{\ignorespaces Einbindung von Bootstrap in einer HTML-Datei\relax }}{12}{figure.caption.9}
\contentsline {figure}{\numberline {2.6}{\ignorespaces Bootstrap-Klassen in HTML-Tag\relax }}{12}{figure.caption.10} \contentsline {figure}{\numberline {2.6}{\ignorespaces Bootstrap-Klassen in HTML-Tag\relax }}{12}{figure.caption.10}
\addvspace {10\p@ } \addvspace {10\p@ }
\addvspace {10\p@ } \addvspace {10\p@ }

+ 160
- 117
doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.log View File

This is XeTeX, Version 3.14159265-2.6-0.99999 (TeX Live 2018) (preloaded format=xelatex 2018.6.7) 15 OCT 2018 21:49
This is XeTeX, Version 3.14159265-2.6-0.99999 (TeX Live 2018) (preloaded format=xelatex 2018.6.7) 2 NOV 2018 22:59
entering extended mode entering extended mode
\write18 enabled. \write18 enabled.
file:line:error style messages enabled. file:line:error style messages enabled.
[] []


Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 85.
(babel) in language on input line 82.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 85.
(babel) in language on input line 82.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 85.
(babel) in language on input line 82.
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 85.
(babel) in language on input line 82.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 85.
(babel) in language on input line 82.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 85.
(babel) in language on input line 82.
[2]) [2])
\openout2 = `abstract/abstract.aux'. \openout2 = `abstract/abstract.aux'.








] (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.toc)
\tf@toc=\write6
\openout6 = `bachelorabeit_EstherKleinhenz.toc'.

] (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.toc
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 50.
(babel) in language on input line 28.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 50.
(babel) in language on input line 28.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 50.
(babel) in language on input line 28.
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 50.
(babel) in language on input line 28.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 50.
(babel) in language on input line 28.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 50.
[3]
(babel) in language on input line 28.
[3])
\tf@toc=\write6
\openout6 = `bachelorabeit_EstherKleinhenz.toc'.



Package Fancyhdr Warning: \headheight is too small (12.0pt):
Make it at least 14.49998pt.
We now make it that large for the rest of the document.
This may cause the page layout to be inconsistent, however.

Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 50. (babel) in language on input line 50.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 50. (babel) in language on input line 50.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 50. (babel) in language on input line 50.
[4


]
[4]
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 52. (babel) in language on input line 52.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
[1 [1






] ]
\openout2 = `chapters/einleitung.aux'. \openout2 = `chapters/einleitung.aux'.


Missing character: There is no ̈ in font aer12! Missing character: There is no ̈ in font aer12!
Missing character: There is no ̈ in font aer12! Missing character: There is no ̈ in font aer12!
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 11.
(babel) in language on input line 12.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 11.
(babel) in language on input line 12.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 11.
(babel) in language on input line 12.
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 11.
(babel) in language on input line 12.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 11.
(babel) in language on input line 12.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 11.
(babel) in language on input line 12.
[3] [3]
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 11.
(babel) in language on input line 13.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 11.
(babel) in language on input line 13.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 11.
(babel) in language on input line 13.
Missing character: There is no ̈ in font aer12! Missing character: There is no ̈ in font aer12!
Missing character: There is no ̈ in font aer12! Missing character: There is no ̈ in font aer12!
) )

Package Fancyhdr Warning: \headheight is too small (12.0pt):
Make it at least 14.49998pt.
We now make it that large for the rest of the document.
This may cause the page layout to be inconsistent, however.

Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 57. (babel) in language on input line 57.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 57. (babel) in language on input line 57.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 57. (babel) in language on input line 57.
[4]
[4]
\openout2 = `chapters/framework.aux'. \openout2 = `chapters/framework.aux'.


(./chapters/framework.tex (./chapters/framework.tex
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 1. (babel) in language on input line 1.
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 7.
(babel) in language on input line 8.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 7.
(babel) in language on input line 8.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 7.
(babel) in language on input line 8.
File: figures/MVP.png Graphic file (type bmp) File: figures/MVP.png Graphic file (type bmp)
<figures/MVP.png> <figures/MVP.png>


LaTeX Warning: `!h' float specifier changed to `!ht'. LaTeX Warning: `!h' float specifier changed to `!ht'.


LaTeX Font Info: Try loading font information for TS1+aer on input line 21.
LaTeX Font Info: No file TS1aer.fd. on input line 21.

LaTeX Font Warning: Font shape `TS1/aer/m/n' undefined
(Font) using `TS1/cmr/m/n' instead
(Font) for symbol `textbullet' on input line 21.

Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 22. (babel) in language on input line 22.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|




] ]
LaTeX Font Info: Try loading font information for TS1+aer on input line 22.
LaTeX Font Info: No file TS1aer.fd. on input line 22.


LaTeX Font Warning: Font shape `TS1/aer/m/n' undefined
(Font) using `TS1/cmr/m/n' instead
(Font) for symbol `textbullet' on input line 22.

File: figures/request-response-cycle.png Graphic file (type bmp) File: figures/request-response-cycle.png Graphic file (type bmp)
<figures/request-response-cycle.png> <figures/request-response-cycle.png>



LaTeX Warning: `!h' float specifier changed to `!ht'. LaTeX Warning: `!h' float specifier changed to `!ht'.


Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 36.
(babel) in language on input line 37.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 36.
(babel) in language on input line 37.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 36.
(babel) in language on input line 37.
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 51.
(babel) in language on input line 40.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 51.
(babel) in language on input line 40.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 51.
(babel) in language on input line 40.
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 51.
(babel) in language on input line 40.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 51.
(babel) in language on input line 40.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 51.
(babel) in language on input line 40.
[6] [6]
Underfull \vbox (badness 10000) has occurred while \output is active []
Underfull \vbox (badness 2951) has occurred while \output is active []


Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 53.
(babel) in language on input line 52.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 53.
(babel) in language on input line 52.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 53.
(babel) in language on input line 52.
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 53.
(babel) in language on input line 52.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 53.
(babel) in language on input line 52.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 53.
(babel) in language on input line 52.
[7] [7]
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 58.
(babel) in language on input line 56.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 58.
(babel) in language on input line 56.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 58.
(babel) in language on input line 56.
File: figures/virt-env-terminal.png Graphic file (type bmp) File: figures/virt-env-terminal.png Graphic file (type bmp)
<figures/virt-env-terminal.png> <figures/virt-env-terminal.png>
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 80.
(babel) in language on input line 70.
Package babel Info: Redefining german shorthand "|
(babel) in language on input line 70.
Package babel Info: Redefining german shorthand "~
(babel) in language on input line 70.
Package babel Info: Redefining german shorthand "f
(babel) in language on input line 73.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 80.
(babel) in language on input line 73.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 80.
(babel) in language on input line 73.
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 80.
(babel) in language on input line 73.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 80.
(babel) in language on input line 73.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 80.
(babel) in language on input line 73.
[8] [8]
File: figures/ldap-tree.png Graphic file (type bmp) File: figures/ldap-tree.png Graphic file (type bmp)
<figures/ldap-tree.png> <figures/ldap-tree.png>
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 95.
(babel) in language on input line 81.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 95.
(babel) in language on input line 81.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 95.
(babel) in language on input line 81.
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 97.
(babel) in language on input line 84.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 97.
(babel) in language on input line 84.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 97.
(babel) in language on input line 84.
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 97.
(babel) in language on input line 86.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 97.
(babel) in language on input line 86.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 97.
(babel) in language on input line 86.
Package babel Info: Redefining german shorthand "f
(babel) in language on input line 86.
Package babel Info: Redefining german shorthand "|
(babel) in language on input line 86.
Package babel Info: Redefining german shorthand "~
(babel) in language on input line 86.
[9] [9]
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 118.
(babel) in language on input line 104.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 118.
(babel) in language on input line 104.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 118.

Underfull \vbox (badness 10000) has occurred while \output is active []

(babel) in language on input line 104.
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 120.
(babel) in language on input line 108.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 120.
(babel) in language on input line 108.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 120.
(babel) in language on input line 108.
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 120.
(babel) in language on input line 108.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 120.
(babel) in language on input line 108.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 120.
(babel) in language on input line 108.
[10] [10]
Package babel Info: Redefining german shorthand "f
(babel) in language on input line 115.
Package babel Info: Redefining german shorthand "|
(babel) in language on input line 115.
Package babel Info: Redefining german shorthand "~
(babel) in language on input line 115.
File: figures/bootstrap-head-tag.png Graphic file (type bmp) File: figures/bootstrap-head-tag.png Graphic file (type bmp)
<figures/bootstrap-head-tag.png> <figures/bootstrap-head-tag.png>
File: figures/bootstrap-class-example.png Graphic file (type bmp)
<figures/bootstrap-class-example.png>




LaTeX Warning: `!h' float specifier changed to `!ht'. LaTeX Warning: `!h' float specifier changed to `!ht'.


)
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 58.
(babel) in language on input line 128.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 58.
(babel) in language on input line 128.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 58.
(babel) in language on input line 128.
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 58.
(babel) in language on input line 128.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
(babel) in language on input line 58.
(babel) in language on input line 128.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 58.
[11]
(babel) in language on input line 128.
[11]
File: figures/bootstrap-class-example.png Graphic file (type bmp)
<figures/bootstrap-class-example.png>
Package babel Info: Redefining german shorthand "f
(babel) in language on input line 140.
Package babel Info: Redefining german shorthand "|
(babel) in language on input line 140.
Package babel Info: Redefining german shorthand "~
(babel) in language on input line 140.
)
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 58. (babel) in language on input line 58.
Package babel Info: Redefining german shorthand "| Package babel Info: Redefining german shorthand "|
[] []




Underfull \hbox (badness 10000) in paragraph at lines 54--60
Underfull \hbox (badness 10000) in paragraph at lines 54--58
[]\T1/aer/m/n/12 Selwin Ong. django-post_office git re-po-si-to-ry. 2018.
[]


Underfull \hbox (badness 10000) in paragraph at lines 60--64
[]\T1/aer/m/n/12 Shabda. Un-der-stan-ding de-co-ra-tors. 2009.
[]


Underfull \hbox (badness 10000) in paragraph at lines 66--72
[]\T1/aer/m/n/12 Alexy She-lest. Mo-del view con-trol-ler, mo-del view pre- []\T1/aer/m/n/12 Alexy She-lest. Mo-del view con-trol-ler, mo-del view pre-
[] []




Underfull \hbox (badness 10000) in paragraph at lines 54--60
Underfull \hbox (badness 10000) in paragraph at lines 66--72
\T1/aer/m/n/12 sen-ter, and mo-del view view-mo-del de-sign pat-terns. 2009. \T1/aer/m/n/12 sen-ter, and mo-del view view-mo-del de-sign pat-terns. 2009.
[] []




Underfull \hbox (badness 10000) in paragraph at lines 54--60
Underfull \hbox (badness 10000) in paragraph at lines 66--72
\T1/aer/m/n/12 https://www.codeproject.com/Articles/42830/Model-View-Controller- \T1/aer/m/n/12 https://www.codeproject.com/Articles/42830/Model-View-Controller-
[] []


)
Package babel Info: Redefining german shorthand "f
(babel) in language on input line 72.
Package babel Info: Redefining german shorthand "|
(babel) in language on input line 72.
Package babel Info: Redefining german shorthand "~
(babel) in language on input line 72.
Package babel Info: Redefining german shorthand "f
(babel) in language on input line 72.
Package babel Info: Redefining german shorthand "|
(babel) in language on input line 72.
Package babel Info: Redefining german shorthand "~
(babel) in language on input line 72.
[19])
Package atveryend Info: Empty hook `BeforeClearDocument' on input line 77. Package atveryend Info: Empty hook `BeforeClearDocument' on input line 77.
Package babel Info: Redefining german shorthand "f Package babel Info: Redefining german shorthand "f
(babel) in language on input line 77. (babel) in language on input line 77.
(babel) in language on input line 77. (babel) in language on input line 77.
Package babel Info: Redefining german shorthand "~ Package babel Info: Redefining german shorthand "~
(babel) in language on input line 77. (babel) in language on input line 77.
[19]
[20]
Package atveryend Info: Empty hook `AfterLastShipout' on input line 77. Package atveryend Info: Empty hook `AfterLastShipout' on input line 77.
(/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.aux (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/titlepage/titlepage.aux) (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/abstract/abstract.aux) (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/einleitung.aux) (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/framework.aux) (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/prototyp.aux) (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/ergebnis.aux) (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/ausblick.aux) (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/fazit.aux)) (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.aux (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/titlepage/titlepage.aux) (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/abstract/abstract.aux) (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/einleitung.aux) (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/framework.aux) (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/prototyp.aux) (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/ergebnis.aux) (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/ausblick.aux) (/Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/fazit.aux))
Package atveryend Info: Empty hook `AtVeryEndDocument' on input line 77. Package atveryend Info: Empty hook `AtVeryEndDocument' on input line 77.
) )
(\end occurred when \ifnum on line 5 was incomplete) (\end occurred when \ifnum on line 5 was incomplete)
Here is how much of TeX's memory you used: Here is how much of TeX's memory you used:
26561 strings out of 492970
476474 string characters out of 6133939
547394 words of memory out of 5000000
30175 multiletter control sequences out of 15000+600000
554220 words of font info for 61 fonts, out of 8000000 for 9000
26572 strings out of 492970
476605 string characters out of 6133939
546800 words of memory out of 5000000
30185 multiletter control sequences out of 15000+600000
555998 words of font info for 62 fonts, out of 8000000 for 9000
1348 hyphenation exceptions out of 8191 1348 hyphenation exceptions out of 8191
58i,11n,50p,10437b,892s stack positions out of 5000i,500n,10000p,200000b,80000s
58i,12n,50p,10437b,943s stack positions out of 5000i,500n,10000p,200000b,80000s


Output written on /Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.pdf (25 pages).
Output written on /Users/Esthi/thesis_ek/doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.pdf (26 pages).

+ 20
- 16
doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.out View File

\BOOKMARK [1][-]{section.1.2}{Ziel\040der\040Arbeit}{chapter.1}% 5 \BOOKMARK [1][-]{section.1.2}{Ziel\040der\040Arbeit}{chapter.1}% 5
\BOOKMARK [0][-]{chapter.2}{Framework}{}% 6 \BOOKMARK [0][-]{chapter.2}{Framework}{}% 6
\BOOKMARK [1][-]{section.2.1}{Django}{chapter.2}% 7 \BOOKMARK [1][-]{section.2.1}{Django}{chapter.2}% 7
\BOOKMARK [2][-]{subsection.2.1.1}{Besonderheiten}{section.2.1}% 8
\BOOKMARK [1][-]{section.2.2}{Erweiterungen}{chapter.2}% 9
\BOOKMARK [2][-]{subsection.2.2.1}{Taggable-Manager}{section.2.2}% 10
\BOOKMARK [1][-]{section.2.3}{Bootstrap}{chapter.2}% 11
\BOOKMARK [0][-]{chapter.3}{Prototyp}{}% 12
\BOOKMARK [1][-]{section.3.1}{Organisation}{chapter.3}% 13
\BOOKMARK [2][-]{subsection.3.1.1}{Verwaltung\040im\040Administrator-Backend}{section.3.1}% 14
\BOOKMARK [2][-]{subsection.3.1.2}{Berechtigung\040der\040User}{section.3.1}% 15
\BOOKMARK [1][-]{section.3.2}{Funktion}{chapter.3}% 16
\BOOKMARK [2][-]{subsection.3.2.1}{Abonnieren}{section.3.2}% 17
\BOOKMARK [2][-]{subsection.3.2.2}{Filtern}{section.3.2}% 18
\BOOKMARK [2][-]{subsection.3.2.3}{Benachrichtigung}{section.3.2}% 19
\BOOKMARK [0][-]{chapter.4}{Ergebnis}{}% 20
\BOOKMARK [1][-]{subsection.4.0.1}{Evaluierung}{chapter.4}% 21
\BOOKMARK [0][-]{chapter.5}{Zusammenfassung\040und\040Ausblick}{}% 22
\BOOKMARK [0][-]{chapter*.11}{Referenzen}{}% 23
\BOOKMARK [2][-]{subsection.2.1.1}{Besonderheiten\040Django's}{section.2.1}% 8
\BOOKMARK [2][-]{subsection.2.1.2}{Virtuelle\040Umgebung}{section.2.1}% 9
\BOOKMARK [2][-]{subsection.2.1.3}{Lightweight\040Directory\040Access\040Protocol}{section.2.1}% 10
\BOOKMARK [1][-]{section.2.2}{Erweiterungen}{chapter.2}% 11
\BOOKMARK [2][-]{subsection.2.2.1}{Taggable-Manager}{section.2.2}% 12
\BOOKMARK [2][-]{subsection.2.2.2}{Hilfsbibliotheken}{section.2.2}% 13
\BOOKMARK [1][-]{section.2.3}{Bootstrap}{chapter.2}% 14
\BOOKMARK [1][-]{section.2.4}{Cron}{chapter.2}% 15
\BOOKMARK [0][-]{chapter.3}{Prototyp}{}% 16
\BOOKMARK [1][-]{section.3.1}{Organisation}{chapter.3}% 17
\BOOKMARK [2][-]{subsection.3.1.1}{Verwaltung\040im\040Administrator-Backend}{section.3.1}% 18
\BOOKMARK [2][-]{subsection.3.1.2}{Berechtigung\040der\040User}{section.3.1}% 19
\BOOKMARK [1][-]{section.3.2}{Funktionen}{chapter.3}% 20
\BOOKMARK [2][-]{subsection.3.2.1}{Abonnieren}{section.3.2}% 21
\BOOKMARK [2][-]{subsection.3.2.2}{Filtern}{section.3.2}% 22
\BOOKMARK [2][-]{subsection.3.2.3}{Benachrichtigung}{section.3.2}% 23
\BOOKMARK [0][-]{chapter.4}{Ergebnis}{}% 24
\BOOKMARK [1][-]{subsection.4.0.1}{Evaluierung}{chapter.4}% 25
\BOOKMARK [0][-]{chapter.5}{Zusammenfassung\040und\040Ausblick}{}% 26
\BOOKMARK [0][-]{chapter*.11}{Referenzen}{}% 27

BIN
doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.synctex.gz View File


+ 8
- 4
doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/bachelorabeit_EstherKleinhenz.toc View File

\contentsline {section}{\numberline {1.2}Ziel der Arbeit}{4}{section.1.2} \contentsline {section}{\numberline {1.2}Ziel der Arbeit}{4}{section.1.2}
\contentsline {chapter}{\numberline {2}Framework}{5}{chapter.2} \contentsline {chapter}{\numberline {2}Framework}{5}{chapter.2}
\contentsline {section}{\numberline {2.1}Django}{5}{section.2.1} \contentsline {section}{\numberline {2.1}Django}{5}{section.2.1}
\contentsline {subsection}{\numberline {2.1.1}Besonderheiten}{6}{subsection.2.1.1}
\contentsline {section}{\numberline {2.2}Erweiterungen}{8}{section.2.2}
\contentsline {subsection}{\numberline {2.2.1}Taggable-Manager}{10}{subsection.2.2.1}
\contentsline {subsection}{\numberline {2.1.1}Besonderheiten Django's}{7}{subsection.2.1.1}
\contentsline {subsection}{\numberline {2.1.2}Virtuelle Umgebung}{8}{subsection.2.1.2}
\contentsline {subsection}{\numberline {2.1.3}Lightweight Directory Access Protocol}{8}{subsection.2.1.3}
\contentsline {section}{\numberline {2.2}Erweiterungen}{9}{section.2.2}
\contentsline {subsection}{\numberline {2.2.1}Taggable-Manager}{9}{subsection.2.2.1}
\contentsline {subsection}{\numberline {2.2.2}Hilfsbibliotheken}{10}{subsection.2.2.2}
\contentsline {section}{\numberline {2.3}Bootstrap}{11}{section.2.3} \contentsline {section}{\numberline {2.3}Bootstrap}{11}{section.2.3}
\contentsline {section}{\numberline {2.4}Cron}{12}{section.2.4}
\contentsline {chapter}{\numberline {3}Prototyp}{13}{chapter.3} \contentsline {chapter}{\numberline {3}Prototyp}{13}{chapter.3}
\contentsline {section}{\numberline {3.1}Organisation}{13}{section.3.1} \contentsline {section}{\numberline {3.1}Organisation}{13}{section.3.1}
\contentsline {subsection}{\numberline {3.1.1}Verwaltung im Administrator-Backend}{13}{subsection.3.1.1} \contentsline {subsection}{\numberline {3.1.1}Verwaltung im Administrator-Backend}{13}{subsection.3.1.1}
\contentsline {subsection}{\numberline {3.1.2}Berechtigung der User}{13}{subsection.3.1.2} \contentsline {subsection}{\numberline {3.1.2}Berechtigung der User}{13}{subsection.3.1.2}
\contentsline {section}{\numberline {3.2}Funktion}{13}{section.3.2}
\contentsline {section}{\numberline {3.2}Funktionen}{13}{section.3.2}
\contentsline {subsection}{\numberline {3.2.1}Abonnieren}{13}{subsection.3.2.1} \contentsline {subsection}{\numberline {3.2.1}Abonnieren}{13}{subsection.3.2.1}
\contentsline {subsection}{\numberline {3.2.2}Filtern}{13}{subsection.3.2.2} \contentsline {subsection}{\numberline {3.2.2}Filtern}{13}{subsection.3.2.2}
\contentsline {subsection}{\numberline {3.2.3}Benachrichtigung}{14}{subsection.3.2.3} \contentsline {subsection}{\numberline {3.2.3}Benachrichtigung}{14}{subsection.3.2.3}

+ 1
- 1
doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/ausblick.aux View File

\setcounter{Item}{0} \setcounter{Item}{0}
\setcounter{Hfootnote}{0} \setcounter{Hfootnote}{0}
\setcounter{Hy@AnnotLevel}{0} \setcounter{Hy@AnnotLevel}{0}
\setcounter{bookmark@seq@number}{21}
\setcounter{bookmark@seq@number}{25}
\setcounter{NAT@ctr}{0} \setcounter{NAT@ctr}{0}
\setcounter{lstlisting}{0} \setcounter{lstlisting}{0}
\setcounter{section@level}{0} \setcounter{section@level}{0}

+ 1
- 1
doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/ergebnis.aux View File

\setcounter{Item}{0} \setcounter{Item}{0}
\setcounter{Hfootnote}{0} \setcounter{Hfootnote}{0}
\setcounter{Hy@AnnotLevel}{0} \setcounter{Hy@AnnotLevel}{0}
\setcounter{bookmark@seq@number}{21}
\setcounter{bookmark@seq@number}{25}
\setcounter{NAT@ctr}{0} \setcounter{NAT@ctr}{0}
\setcounter{lstlisting}{0} \setcounter{lstlisting}{0}
\setcounter{section@level}{0} \setcounter{section@level}{0}

+ 1
- 1
doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/fazit.aux View File

\setcounter{Item}{0} \setcounter{Item}{0}
\setcounter{Hfootnote}{0} \setcounter{Hfootnote}{0}
\setcounter{Hy@AnnotLevel}{0} \setcounter{Hy@AnnotLevel}{0}
\setcounter{bookmark@seq@number}{22}
\setcounter{bookmark@seq@number}{26}
\setcounter{NAT@ctr}{0} \setcounter{NAT@ctr}{0}
\setcounter{lstlisting}{0} \setcounter{lstlisting}{0}
\setcounter{section@level}{0} \setcounter{section@level}{0}

+ 12
- 8
doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/framework.aux View File

\@writefile{lot}{\addvspace {10\p@ }} \@writefile{lot}{\addvspace {10\p@ }}
\newlabel{ch:framework}{{2}{5}{Framework}{chapter.2}{}} \newlabel{ch:framework}{{2}{5}{Framework}{chapter.2}{}}
\@writefile{toc}{\contentsline {section}{\numberline {2.1}Django}{5}{section.2.1}} \@writefile{toc}{\contentsline {section}{\numberline {2.1}Django}{5}{section.2.1}}
\@writefile{lof}{\contentsline {figure}{\numberline {2.1}{\ignorespaces Vereinfachter MVP\relax }}{6}{figure.caption.5}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.1.1}Besonderheiten}{6}{subsection.2.1.1}}
\@writefile{lof}{\contentsline {figure}{\numberline {2.2}{\ignorespaces Request-Response-Kreislauf des Django Frameworks\relax }}{7}{figure.caption.6}}
\@writefile{toc}{\contentsline {section}{\numberline {2.2}Erweiterungen}{8}{section.2.2}}
\@writefile{lof}{\contentsline {figure}{\numberline {2.1}{\ignorespaces Vereinfachter MVP ([She09])\relax }}{6}{figure.caption.5}}
\@writefile{lof}{\contentsline {figure}{\numberline {2.2}{\ignorespaces Request-Response-Kreislauf des Django Frameworks ([Nev15])\relax }}{7}{figure.caption.6}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.1.1}Besonderheiten Django's}{7}{subsection.2.1.1}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.1.2}Virtuelle Umgebung}{8}{subsection.2.1.2}}
\@writefile{lof}{\contentsline {figure}{\numberline {2.3}{\ignorespaces Erstellen der virtuelle Umgebung im Terminal\relax }}{8}{figure.caption.7}} \@writefile{lof}{\contentsline {figure}{\numberline {2.3}{\ignorespaces Erstellen der virtuelle Umgebung im Terminal\relax }}{8}{figure.caption.7}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.1.3}Lightweight Directory Access Protocol}{8}{subsection.2.1.3}}
\@writefile{lof}{\contentsline {figure}{\numberline {2.4}{\ignorespaces Beispiel eines LDAP-Trees\relax }}{9}{figure.caption.8}} \@writefile{lof}{\contentsline {figure}{\numberline {2.4}{\ignorespaces Beispiel eines LDAP-Trees\relax }}{9}{figure.caption.8}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.1}Taggable-Manager}{10}{subsection.2.2.1}}
\@writefile{toc}{\contentsline {section}{\numberline {2.2}Erweiterungen}{9}{section.2.2}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.1}Taggable-Manager}{9}{subsection.2.2.1}}
\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.2}Hilfsbibliotheken}{10}{subsection.2.2.2}}
\@writefile{toc}{\contentsline {section}{\numberline {2.3}Bootstrap}{11}{section.2.3}} \@writefile{toc}{\contentsline {section}{\numberline {2.3}Bootstrap}{11}{section.2.3}}
\@writefile{lof}{\contentsline {figure}{\numberline {2.5}{\ignorespaces Einbindung von Bootstrap in einer HTML-Datei\relax }}{11}{figure.caption.9}}
\@writefile{lof}{\contentsline {figure}{\numberline {2.5}{\ignorespaces Einbindung von Bootstrap in einer HTML-Datei\relax }}{12}{figure.caption.9}}
\@writefile{lof}{\contentsline {figure}{\numberline {2.6}{\ignorespaces Bootstrap-Klassen in HTML-Tag\relax }}{12}{figure.caption.10}} \@writefile{lof}{\contentsline {figure}{\numberline {2.6}{\ignorespaces Bootstrap-Klassen in HTML-Tag\relax }}{12}{figure.caption.10}}
\@writefile{toc}{\contentsline {section}{\numberline {2.4}Cron}{12}{section.2.4}}
\@setckpt{chapters/framework}{ \@setckpt{chapters/framework}{
\setcounter{page}{13} \setcounter{page}{13}
\setcounter{equation}{0} \setcounter{equation}{0}
\setcounter{mpfootnote}{0} \setcounter{mpfootnote}{0}
\setcounter{part}{0} \setcounter{part}{0}
\setcounter{chapter}{2} \setcounter{chapter}{2}
\setcounter{section}{3}
\setcounter{section}{4}
\setcounter{subsection}{0} \setcounter{subsection}{0}
\setcounter{subsubsection}{0} \setcounter{subsubsection}{0}
\setcounter{paragraph}{0} \setcounter{paragraph}{0}
\setcounter{Item}{0} \setcounter{Item}{0}
\setcounter{Hfootnote}{0} \setcounter{Hfootnote}{0}
\setcounter{Hy@AnnotLevel}{0} \setcounter{Hy@AnnotLevel}{0}
\setcounter{bookmark@seq@number}{11}
\setcounter{bookmark@seq@number}{15}
\setcounter{NAT@ctr}{0} \setcounter{NAT@ctr}{0}
\setcounter{lstlisting}{0} \setcounter{lstlisting}{0}
\setcounter{section@level}{0} \setcounter{section@level}{0}

+ 2
- 2
doc/bachelorarbeit_EstherKleinhenz/.texpadtmp/chapters/prototyp.aux View File

\@writefile{toc}{\contentsline {section}{\numberline {3.1}Organisation}{13}{section.3.1}} \@writefile{toc}{\contentsline {section}{\numberline {3.1}Organisation}{13}{section.3.1}}
\@writefile{toc}{\contentsline {subsection}{\numberline {3.1.1}Verwaltung im Administrator-Backend}{13}{subsection.3.1.1}} \@writefile{toc}{\contentsline {subsection}{\numberline {3.1.1}Verwaltung im Administrator-Backend}{13}{subsection.3.1.1}}
\@writefile{toc}{\contentsline {subsection}{\numberline {3.1.2}Berechtigung der User}{13}{subsection.3.1.2}} \@writefile{toc}{\contentsline {subsection}{\numberline {3.1.2}Berechtigung der User}{13}{subsection.3.1.2}}
\@writefile{toc}{\contentsline {section}{\numberline {3.2}Funktion}{13}{section.3.2}}
\@writefile{toc}{\contentsline {section}{\numberline {3.2}Funktionen}{13}{section.3.2}}
\@writefile{toc}{\contentsline {subsection}{\numberline {3.2.1}Abonnieren}{13}{subsection.3.2.1}} \@writefile{toc}{\contentsline {subsection}{\numberline {3.2.1}Abonnieren}{13}{subsection.3.2.1}}
\@writefile{toc}{\contentsline {subsection}{\numberline {3.2.2}Filtern}{13}{subsection.3.2.2}} \@writefile{toc}{\contentsline {subsection}{\numberline {3.2.2}Filtern}{13}{subsection.3.2.2}}
\@writefile{toc}{\contentsline {subsection}{\numberline {3.2.3}Benachrichtigung}{14}{subsection.3.2.3}} \@writefile{toc}{\contentsline {subsection}{\numberline {3.2.3}Benachrichtigung}{14}{subsection.3.2.3}}
\setcounter{Item}{0} \setcounter{Item}{0}
\setcounter{Hfootnote}{0} \setcounter{Hfootnote}{0}
\setcounter{Hy@AnnotLevel}{0} \setcounter{Hy@AnnotLevel}{0}
\setcounter{bookmark@seq@number}{19}
\setcounter{bookmark@seq@number}{23}
\setcounter{NAT@ctr}{0} \setcounter{NAT@ctr}{0}
\setcounter{lstlisting}{0} \setcounter{lstlisting}{0}
\setcounter{section@level}{0} \setcounter{section@level}{0}

BIN
doc/bachelorarbeit_EstherKleinhenz/bachelorabeit_EstherKleinhenz.pdf View File


+ 4
- 1
doc/bachelorarbeit_EstherKleinhenz/chapters/einleitung.tex View File



\section{Ausgangssituation} \section{Ausgangssituation}
Alle Informationen der Fakultät Elektrotechnik Feinwerktechnik Informationstechnik, kurz efi, werden über die globalen Verteiler des Hochschulinternen Postfaches versendet. Viele dieser Daten sind jedoch nur für eine geringe Schnittmenge der Empfänger relevant und lassen sich nur schwer priorisieren. Das ständig überlastete Postfach muss somit regelmä"sig gepflegt werden. Einen massiven Administrativen Aufwand bedeutet es, E-Mails zu filtern und nach persönlichem Ermessen zu verwalten. Alle Informationen der Fakultät Elektrotechnik Feinwerktechnik Informationstechnik, kurz efi, werden über die globalen Verteiler des Hochschulinternen Postfaches versendet. Viele dieser Daten sind jedoch nur für eine geringe Schnittmenge der Empfänger relevant und lassen sich nur schwer priorisieren. Das ständig überlastete Postfach muss somit regelmä"sig gepflegt werden. Einen massiven Administrativen Aufwand bedeutet es, E-Mails zu filtern und nach persönlichem Ermessen zu verwalten.
---genauer sagen woher ich mir sicher bin, dass das postfach überlastet ist
Zudem leidet die Nachhaltigkeit der Informationen. Möchten die Empfänger ältere E-Mails abrufen, mussten diese meist schon entfernt werden um Platz für den neuen, eintreffenden E-Mail-Verkehr zu schaffen. Zudem leidet die Nachhaltigkeit der Informationen. Möchten die Empfänger ältere E-Mails abrufen, mussten diese meist schon entfernt werden um Platz für den neuen, eintreffenden E-Mail-Verkehr zu schaffen.
Diese Situation führt dazu, dass Empfänger die Informationen meist nicht lesen und sofort entfernen. Die Ersteller haben keinerlei Möglichkeiten zu überprüfen ob und wie viele Studierende und Dozenten eingehende Nachrichten öffnen und lesen. Diese Situation führt dazu, dass Empfänger die Informationen meist nicht lesen und sofort entfernen. Die Ersteller haben keinerlei Möglichkeiten zu überprüfen ob und wie viele Studierende und Dozenten eingehende Nachrichten öffnen und lesen.
---Forschungsfrage


\section{Ziel der Arbeit} \section{Ziel der Arbeit}
Ziel der Arbeit ist es, durch die Einbindung einer Social Media Plattform den Speicheraufwand des Hochschulpostfaches für Studierende der Efi-Fakultät zu reduzieren. Die Flut an E-Mails soll durch das Verwenden eines personalisierte Dashboard gedrosselt werden. Hierbei wird zunächst der Fokus auf die grundlegenden Funktionen der Website gelegt. Dazu gehört das Abonnieren, einpflegen von neuen und löschen von alten Nachrichten. Ziel der Arbeit ist es, durch die Einbindung einer Social Media Plattform den Speicheraufwand des Hochschulpostfaches für Studierende der Efi-Fakultät zu reduzieren. Die Flut an E-Mails soll durch das Verwenden eines personalisierte Dashboard gedrosselt werden. Hierbei wird zunächst der Fokus auf die grundlegenden Funktionen der Website gelegt. Dazu gehört das Abonnieren, einpflegen von neuen und löschen von alten Nachrichten.
Zudem sollen die Autoren benachrichtigt werden, in welchem Umfang die hochgeladenen Informationen bereits abonniert und gelesen wurden.
Zudem sollen die Autoren benachrichtigt werden, in welchem Umfang die hochgeladenen Informationen bereits abonniert und gelesen wurden.
---zu kurz

+ 13
- 0
doc/bachelorarbeit_EstherKleinhenz/chapters/ergebnis.tex View File

\label{ch:ergebnis} \label{ch:ergebnis}
\subsection{Evaluierung} \subsection{Evaluierung}



Eine weitere hilfreiche Erweiterung ist pylint. Das Tool sucht nicht nur nach Fehlern im Code, sondern versucht diesen sauber und einheitlich zu gestalten. Hierbei wird auf den Code-Standard PEP-8 geprüft [Dix18]. Die folgende Liste zeigt eine Kurzfassung der wichtigsten Regeln:

\begin{itemize}
\item Einrückung, meist 4 Leerzeichen
\item Maximale Zeichenanzahl pro Zeile
\item Zwei Leerzeile zwischen Klassen und Funktionen
\item Eine Leerzeile zwischen Methoden innerhalb einer Klasse
\item Leerzeichen in Ausdrücke und Anweisungen vermeiden
\item Die Reihenfolge der Importe: Standartbibliotheken, Drittanbieterbibliotheken, Lokale Anwendungen
\item Konventionen der Namensgebung von Funktionen, Modulen usw.
\end{itemize}
Natürlich sind dies Vorgaben, die eingehalten werden können, aber nicht notwendig sind um den Code fertig kompilieren und ausgeben zu lassen.

+ 34
- 37
doc/bachelorarbeit_EstherKleinhenz/chapters/framework.tex View File

\chapter{Framework} \chapter{Framework}
\label{ch:framework} \label{ch:framework}
Um die Website-Erweiterung realisieren zu können, wird zunächst festgelegt welche Programmierschnittstellen verwendet werden. Im Web-Backend fällt die Wahl auf die objektorientierte Sprache Python, die ausschlie"slich Serverseitig anwendbar ist. Der Programmaufbau Pythons macht den Code leicht lesbar und der einfache Syntax ermöglicht einen strukturierte Implementierung der Website([Ndu17]). Die vielen abstrakten Datentypen, wie dynamische Arrays und Wörterbücher, sind gro"sflächig einsetzbar. Um die Website-Erweiterung realisieren zu können, wird zunächst festgelegt welche Programmierschnittstellen verwendet werden. Im Web-Backend fällt die Wahl auf die objektorientierte Sprache Python, die ausschlie"slich Serverseitig anwendbar ist. Der Programmaufbau Pythons macht den Code leicht lesbar und der einfache Syntax ermöglicht einen strukturierte Implementierung der Website([Ndu17]). Die vielen abstrakten Datentypen, wie dynamische Arrays und Wörterbücher, sind gro"sflächig einsetzbar.
--- Warum genau python?
Ein entscheidender Vorteil hierbei ist das dazugehörige Framework Django, auf das im folgenden Kapitel genauer eingegangen wird. Ein entscheidender Vorteil hierbei ist das dazugehörige Framework Django, auf das im folgenden Kapitel genauer eingegangen wird.
---Ist pyton nur serverseitig?


\section{Django} \section{Django}
Django ist ein Web-Framework, das auf einer Model-View-Presenter (MVP) Architektur basiert. Ähnlich wie der Model-View-Controller sind die Interaktionen zwischen Model und View die Auswahl und Ausführung von Befehlen und das Auslösen von Ereignissen (vgl. Abbildung 2.1). Da die View aber hier bereits den Gro"steil des Controllers übernimmt, ist der MVP eine Überarbeitung. Der Teil, der Elemente des Modells auswählt, Operationen durchführt und alle Ereignisse kapselt, ergibt die Presenter-Klasse([She09]). Durch die direkte Bindung von Daten und View, geregelt durch den Presenter, wird die Codemenge der Applikation stark reduziert.
Django ist ein Web-Framework, das eine schnelle, strukturierte Entwicklung ermöglicht und dabei ein einfaches Design beibehält. Der darin enthaltene Model-View-Presenter (MVP) kann, ähnlich wie der Model-View-Controller, die Interaktionen zwischen Model und View, die Auswahl und Ausführung von Befehlen und das Auslösen von Ereignissen steuern (vgl. Abbildung 2.1). Da die View aber hier bereits den Gro"steil des Controllers übernimmt, ist der MVP eine Überarbeitung. Der Teil, der Elemente des Modells auswählt, Operationen durchführt und alle Ereignisse kapselt, ergibt die Presenter-Klasse vgl. [She09]. Durch die direkte Bindung von Daten und View, geregelt durch den Presenter, wird die Codemenge der Applikation stark reduziert.


\begin{figure}[!h] \begin{figure}[!h]
\centering \centering
\includegraphics[width=0.5\textwidth]{figures/MVP}
\caption{Vereinfachter MVP}
\includegraphics[width=0.6\textwidth]{figures/MVP}
\caption{Vereinfachter MVP ([She09])}
\hfill \hfill
\end{figure} \end{figure}


Der Prozess vom Anfragen der URL über den Server, bis hin zur fertig gerenderten Website kann wie folgt vereinfacht darstellen. Der User gibt eine URL im Browser ein und sendet sie an den Web-Server.
Der Prozess vom Anfragen der URL über den Server, bis hin zur fertig gerenderten Website kann wie folgt vereinfacht dargestellt werden.


Das Interface WSGI am Web-Server verbindet diesen mit dem Web-Framework, indem es den Request zum passenden Objekt weiterleitet. Hier wird der Applikation eine Callback-Funktion zur Verfügung gestellt [Kin17]. Au"serdem werden folgende Schritte durchgeführt:
Der User gibt eine URL im Browser ein und sendet sie an den Web-Server. Das Interface WSGI am Web-Server verbindet diesen mit dem Web-Framework, indem es den Request zum passenden Objekt weiterleitet. Hier wird der Applikation eine Callback-Funktion zur Verfügung gestellt (vgl. [Kin17]). Au"serdem werden folgende Schritte durchgeführt:
\begin{itemize} \begin{itemize}
\item Die Middleware-Klassen aus der settings.py werden geladen \item Die Middleware-Klassen aus der settings.py werden geladen
\item Die Methoden der Listen Request, View, Response und Excpetion werden geladen \item Die Methoden der Listen Request, View, Response und Excpetion werden geladen
\end{itemize} \end{itemize}


Der WSGI-Handler fungiert also als Pförtner und Manager zwischen dem Web-Server und dem Django-Projekt. Der WSGI-Handler fungiert also als Pförtner und Manager zwischen dem Web-Server und dem Django-Projekt.
Um die URL, wie weiter oben erwähnt, aufzulösen, benötigt WSGI einen urlresolver. Durch die explizite Zuweisung der vorhandenen Seiten, kann dieser über die regulären Ausdrücke der url.py-Datei iterieren. Gibt es eine Übereinstimmung, wird die damit verknüpfte Funktion in der View (view.py) aufgerufen. Hier ist die gesamte Logik der Website lokalisiert. Wie bereits erwähnt, ist es möglich unter Anderem auf die Datenbank der Applikation zuzugreifen und Eingaben des Users über eine Form zu verarbeiten. Nachdem werden die Informationen der View an das Template weitergereicht. Es handelt sich dabei um eine einfache HTML-Seite in der der strukturelle Aufbau im Frontend festgelegt wird. Die Informationen der View können hier zwischen doppelt-geschweiften Klammern eingebunden und, wenn nötig, mit einfachen Python-Befehlen angepasst werden. Nun kann das Template, die vom WSGI-Framework zur Verfügung gestellte Callback-Funktion befüllen und einen Response an den Web-Server schicken. Die fertige Seite ist beim Klienten im Browserfenster zum rendern bereit (vgl. Abbildung 2.2.).
Um die URL, wie weiter oben erwähnt, aufzulösen, benötigt WSGI einen \textit {urlresolver}(vgl. ). Durch die explizite Zuweisung der vorhandenen Seiten, kann dieser über die regulären Ausdrücke der url.py-Datei iterieren. Gibt es eine Übereinstimmung, wird die damit verknüpfte Funktion in der View (view.py) aufgerufen. Hier ist die gesamte Logik der Website lokalisiert. Es ist möglich unter Anderem auf die Datenbank der Applikation zuzugreifen und Eingaben des Users über eine Form zu verarbeiten. Nachdem werden die Informationen der View an das Template weitergereicht. Es handelt sich dabei um eine einfache HTML-Seite in der der strukturelle Aufbau im Frontend festgelegt wird. Die Informationen der View können hier zwischen doppelt-geschweiften Klammern eingebunden und, wenn nötig, mit einfachen Python-Befehlen angepasst werden. Nun kann das Template, die vom WSGI-Framework zur Verfügung gestellte Callback-Funktion befüllen und einen Response an den Web-Server schicken. Die fertige Seite ist beim Klienten im Browserfenster zum rendern bereit (vgl. [Kin17], Abbildung 2.2.).


\begin{figure}[!h] \begin{figure}[!h]
\centering \centering
\includegraphics[width=0.5\textwidth]{figures/request-response-cycle} \includegraphics[width=0.5\textwidth]{figures/request-response-cycle}
\caption{Request-Response-Kreislauf des Django Frameworks}
\caption{Request-Response-Kreislauf des Django Frameworks ([Nev15])}
\hfill \hfill
\end{figure} \end{figure}


\subsection {Besonderheiten}
\subsection {Besonderheiten Django's}
Das Django-Framework bringt einige Besonderheiten mit sich, die beim implementiern des Prototypen von Bedeutung sind. Diese werden im Folgenden beschrieben. Das Django-Framework bringt einige Besonderheiten mit sich, die beim implementiern des Prototypen von Bedeutung sind. Diese werden im Folgenden beschrieben.


Die Administratoroberfläche ist eines der hilfreichsten Werkzeugen des gesamten Frameworks. Es stellt die Metadaten der Modelle aus dem Code visuell dar. Verifizierte Benutzer können die Daten nicht nur schnell erfassen, sondern diese auch editieren und verwalten. Das Recht, das Admin-Backend uneingeschränkt zu benutzen, ist dem sogenannten superuser vorenthalten. Dieser kann beim erstmaligen zuweisen nur über die Kommandozeile eingerichtet werden. Ist bereits ein superuser vorhanden, kann dieser im Admin-Backend weiteren Benutzern den gleichen Handlungsfreiraum einräumen. Zudem gibt es noch weitere Stufen der Zugangsberechtigungen, Staff- und Active-Status, die für eine breitere Gruppe von Benutzern geeignet ist. Die Administratoroberfläche ist eines der hilfreichsten Werkzeugen des gesamten Frameworks. Es stellt die Metadaten der Modelle aus dem Code visuell dar. Verifizierte Benutzer können die Daten nicht nur schnell erfassen, sondern diese auch editieren und verwalten. Das Recht, das Admin-Backend uneingeschränkt zu benutzen, ist dem sogenannten superuser vorenthalten. Dieser kann beim erstmaligen zuweisen nur über die Kommandozeile eingerichtet werden. Ist bereits ein superuser vorhanden, kann dieser im Admin-Backend weiteren Benutzern den gleichen Handlungsfreiraum einräumen. Zudem gibt es noch weitere Stufen der Zugangsberechtigungen, Staff- und Active-Status, die für eine breitere Gruppe von Benutzern geeignet ist.
Um die gestaffelten Zugangsberechtigungen auch auf der Website umsetzen zu können, stellt Django verschiedene Decorator zur Verfügung. Soll eine bestimmte Seite nur von eingeloggten Benutzern besucht werden dürfen, so importiert man die Decorator des, von Django zur Verfügung gestellten, Authentifizierungssystems mit
Um die gestaffelten Zugangsberechtigungen auch auf der Website umsetzen zu können, stellt Django verschiedene Decorator zur Verfügung. Soll eine bestimmte Seite nur von eingeloggten Benutzern besucht werden, so importiert man die Decorator des, von Django zur Verfügung gestellten, Authentifizierungssystems mit
\\ \\
\noindent\hspace*{10mm}% \noindent\hspace*{10mm}%
from django.contrib.auth.decorators import login\_required from django.contrib.auth.decorators import login\_required
\\ \\
Direkt über den Beginn der Funktion in view.py, oder auch single-view-function, wird zudem folgende Zeile ergänzt:
Vor der Definition der Funktion wird dann folgende Zeile ergänzt:
\\ \\
\noindent\hspace*{10mm}% \noindent\hspace*{10mm}%
\makeatletter @login\_required \makeatletter @login\_required
\\ \\
Natürlich lassen sich die Decoratoren auch für andere Zwecke vor Funktionen platzieren. Begrenzte Zugänge zu den Views können durch die Art der Anfrage realisiert werden. Der Benutzer muss also zum Beispiel durch GET auf eine Seite zugreifen wollen um Einsicht zu erhalten.

Benutzerdefinierte Decorator sind im Django-Framework möglich, darauf wird hier aber nicht weiter eingegangen.

Natürlich lassen sich Decorator auch für andere Zwecke vor Funktionen platzieren. Unter Anderem werden so die Views vor möglichen Angriffen, wie Cross-Site-Scripting, geschützt.


Durch den einfachen Aufbau ist es au"serdem möglich diese selbst zu implementieren. Ein einfaches Beispiel wäre das prüfen des, an die Funktion übergebenen, Parameter. Sollen nur positive Zahlen verarbeitet werden, so kann der Decorator alle anderen Eingaben abfangen.




\subsection{Virtuelle Umgebung}


\section{Erweiterungen}

Die Programmiersprache Python bringt viele hilfreiche Erweiterungen mit sich. Die im folgenden aufgeführten sind hauptsächlich für die Paketverwaltung der Applikation zuständig.

Wird ein neues Projekt gestartet, ist es üblich, verschiedensten Programme und Module dafür zu verwenden. Die Versionen dieser spielen hierbei eine entscheidende Rolle, um Konflikte zu vermeiden [Fou18]. Um diesem Problem vorzubeugen, wird eine virtuelle Umgebung implementiert. Diese besitzt einen eigenen Projektpfad, beinhaltet alle nötigen Pakete und Bibliotheken, und lässt sich nach dem Einrichten im Terminal benutzten. Die folgende Abbildung (2.3) zeigt das Erstellen eines neunen Ordners, das Erstellen der virtuellen Umgebung und den Aktivierungsbefehl. Ist der Name des Environment in Klammern am Anfang der Zeile, bedeutet das, diese ist jetzt aktiv.
Wird ein neues Projekt gestartet, ist es üblich, verschiedensten Programme und Module zu verwenden. Die Versionen dieser spielen hierbei eine entscheidende Rolle, um Konflikte zu vermeiden [Fou18]. Um diesem Problem vorzubeugen, wird eine virtuelle Umgebung implementiert. Diese besitzt einen eigenen Projektpfad, beinhaltet alle nötigen Pakete und Bibliotheken, und lässt sich nach dem Einrichten im Terminal starten. Die folgende Abbildung (2.3) zeigt das Erstellen eines neunen Ordners, das Erstellen der virtuellen Umgebung und den Aktivierungsbefehl. Ist der Name des Environment in Klammern am Kommandozeilenanfang, bedeutet das, diese ist jetzt aktiv.


\begin{figure}[!h] \begin{figure}[!h]
\centering \centering


Die Umgebung ist jetzt bereit befüllt zu werden. Um die Pakete und Module kollisionsfrei zu installieren ist es empfehlenswert einen Package-Manager zu verwenden. Mit pip können diese installiert, geupdated und gelöscht werden. Au"serdem kann der Manager Abhängigkeiten, wenn nötig, überschreiben und optimieren. Falls ein, sich von der neuesten Version unterscheidendes, Programm installiert werden soll, so ist dies ebenso möglich. Die Umgebung ist jetzt bereit befüllt zu werden. Um die Pakete und Module kollisionsfrei zu installieren ist es empfehlenswert einen Package-Manager zu verwenden. Mit pip können diese installiert, geupdated und gelöscht werden. Au"serdem kann der Manager Abhängigkeiten, wenn nötig, überschreiben und optimieren. Falls ein, sich von der neuesten Version unterscheidendes, Programm installiert werden soll, so ist dies ebenso möglich.


Eine weitere hilfreiche Erweiterung ist pylint. Das Tool sucht nicht nur nach Fehlern im Code, sondern versucht diesen sauber und einheitlich zu gestalten. Hierbei wird auf den Code-Standard PEP-8 geprüft [Dix18]. Die folgende Liste zeigt eine Kurzfassung der wichtigsten Regeln:


\begin{itemize}
\item Einrückung, meist 4 Leerzeichen
\item Maximale Zeichenanzahl pro Zeile
\item Zwei Leerzeile zwischen Klassen und Funktionen
\item Eine Leerzeile zwischen Methoden innerhalb einer Klasse
\item Leerzeichen in Ausdrücke und Anweisungen vermeiden
\item Die Reihenfolge der Importe: Standartbibliotheken, Drittanbieterbibliotheken, Lokale Anwendungen
\item Konventionen der Namensgebung von Funktionen, Modulen usw.
\end{itemize}
Natürlich sind dies Vorgaben, die eingehalten werden können, aber nicht notwendig sind um den Code fertig kompilieren und ausgeben zu lassen.
\subsection{Lightweight Directory Access Protocol}


Das ldap, Lightweight Directory Access Protocol, muss als Erweiterung in die hier bearbeitende Bachelor-Arbeit eingebunden werden. Dies ist ein Internetprotokoll, welches die Kommunikation mit dem Hochschulinternen Active Directory verwaltet. Es wird eingesetzt um Benutzer so schnell und effizient wie möglich durch eine bereits existierende Datenbank abzufragen und zu authentifizieren. Der Aufbau ist mit einem Baum zu vergleichen (vgl. Abbildung 2.4.). Die Wurzel besteht aus sehr allgemeinen Informationen, umso näher man den Blättern kommt, umso spezifischer werden diese. Ein Objekt in der Struktur wird durch einen einmaligen Namen identifiziert, der aus den gesamten hinterlegten Informationen besteht. Der Name für den in der Abbildung 2.4 dargestellten Baum wäre \glqq cn=John Doe, ou=People, dc=sun.com\grqq. Das ldap, Lightweight Directory Access Protocol, muss als Erweiterung in die hier bearbeitende Bachelor-Arbeit eingebunden werden. Dies ist ein Internetprotokoll, welches die Kommunikation mit dem Hochschulinternen Active Directory verwaltet. Es wird eingesetzt um Benutzer so schnell und effizient wie möglich durch eine bereits existierende Datenbank abzufragen und zu authentifizieren. Der Aufbau ist mit einem Baum zu vergleichen (vgl. Abbildung 2.4.). Die Wurzel besteht aus sehr allgemeinen Informationen, umso näher man den Blättern kommt, umso spezifischer werden diese. Ein Objekt in der Struktur wird durch einen einmaligen Namen identifiziert, der aus den gesamten hinterlegten Informationen besteht. Der Name für den in der Abbildung 2.4 dargestellten Baum wäre \glqq cn=John Doe, ou=People, dc=sun.com\grqq.


\hfill \hfill
\end{figure} \end{figure}


\section {Erweiterungen}
Django bringt viele hilfreiche Erweiterungen mit sich, die mit einem Packagemanager einfach in die virtuelle Umgebung geladen werden können. Um das passende Add-on für ein Projekt zu finden, bietet die Plattform djangopackages.org alle Erweiterungen in übersichtlichen Tabellen mit Eigenschaften und Bewertung an.

\subsection {Taggable-Manager} \subsection {Taggable-Manager}
Django-taggit ist eine Erweiterung von Alex Gaynor, einem Entwickler aus Washingtion DC. Das Add-on ermöglicht das Verwenden von Tags die automatisch mit einem eindeutigen Slug versehen werden. Der darin enthaltene Taggable Manager verwendet Django's Contenttype Framework, welches per Default verwendet wird, um die Modelle der Applikation zu verfolgen und diese durch generische Beziehungen zu verknüpfen. Die Felder app\_label und model machen die Modelle eindeutig zuweisbar. Instanzen des Contenttypes präsentieren und speichern die Informationen und Erstellen automatisch neue Instanzen, wenn Modelle hinzugefügt werden. Zudem stehen Methoden zur Verfügung, die das Abrufen und Arbeiten mit Instanzen der einzelnen Modelle erleichtern.
Django-taggit ist eine Erweiterung, die das Verwenden von Tags vereinfacht. Der darin enthaltene Taggable Manager verwendet Django's Contenttype Framework, welches per Default verwendet wird, um die Modelle der Applikation zu verfolgen und diese durch generische Beziehungen zu verknüpfen. Die Felder app\_label und model machen die Modelle eindeutig zuweisbar. Instanzen des Contenttypes präsentieren und speichern die Informationen und Erstellen automatisch neue Instanzen, wenn Modelle hinzugefügt werden. Zudem stehen Methoden zur Verfügung, die das Abrufen und Arbeiten mit Instanzen der einzelnen Modelle erleichtern.


Der Taggable-Manager ist jedoch nicht an das Contenttype-Framework gebunden ([Her16]). Durch die Verwendung eines echten Fremdschlüssels, kann zum Beispiel die Performance und Referenzgarantie verwirklicht werden. Dazu ist lediglich die Erstellung einer eigenen lookup-Tabelle notwendig, die die Entitäten zweier Tabellen direkt verlinkt, anstatt diese generische zu verbinden. Weiterführend können Modelle in einem benutzerdefinierten Modell vereint werden, sodass er Zugriff auf diese einheitlich geschieht. Außerdem ist es möglich Primary-Keys zu verwenden, die nicht aus ganzen Zahlen bestehen, sondern aus Buchstaben und Wörtern. Der Taggable-Manager ist jedoch nicht an das Contenttype-Framework gebunden ([Her16]). Durch die Verwendung eines echten Fremdschlüssels, kann zum Beispiel die Performance und Referenzgarantie verwirklicht werden. Dazu ist lediglich die Erstellung einer eigenen lookup-Tabelle notwendig, die die Entitäten zweier Tabellen direkt verlinkt, anstatt diese generische zu verbinden. Weiterführend können Modelle in einem benutzerdefinierten Modell vereint werden, sodass er Zugriff auf diese einheitlich geschieht. Außerdem ist es möglich Primary-Keys zu verwenden, die nicht aus ganzen Zahlen bestehen, sondern aus Buchstaben und Wörtern.


\$ pip install django-taggit \$ pip install django-taggit
\\ \\
Im model.py wird das Feld tag neu erstellt und als Taggable Manager definiert. Im model.py wird das Feld tag neu erstellt und als Taggable Manager definiert.
Au"serdem muss taggit in der settings.py unter INSTALLED\_APPS ergänzt werden. Um dem Programm zu sagen, dass nun eine neue Liste der Datenbank hinzugefügt werden muss, werden folgende Befehle in die Kommandozeile eingefügt:
Au"serdem muss taggit in der settings.py unter INSTALLED\_APPS ergänzt werden. Um dem Programm mitzuteilen, dass nun eine neue Liste der Datenbank hinzugefügt werden muss, werden folgende Befehle in die Kommandozeile eingefügt:
\\ \\
\noindent\hspace*{10mm}% \noindent\hspace*{10mm}%
\$ python3 manage.py makemigrations \\ \$ python3 manage.py makemigrations \\
\\ \\
Im Admin-Backend kann nun geprüft werden, ob das neue Feld in die Datenbank integriert wurde. Neue Tags können in das Textfeld eingetragen werden. Der Parser verarbeitet jedes Wort, dass durch ein Komma oder ein Leerzeichen getrennt ist als ein Tag. Soll dieses jedoch aus mehreren Wörtern bestehen so müssen diese mit Anführungszeichen umfasst werden. Standardmäßig unterscheidet der Taggable Manager zwischen Groß- und Kleinschreibung, Tags sind also case sensitive. Ändern kann man das, indem der Boolean TAGGIT\_CASE\_INSENSITIVE in der settings.py auf True gestellt wird. Im Admin-Backend kann nun geprüft werden, ob das neue Feld in die Datenbank integriert wurde. Neue Tags können in das Textfeld eingetragen werden. Der Parser verarbeitet jedes Wort, dass durch ein Komma oder ein Leerzeichen getrennt ist als ein Tag. Soll dieses jedoch aus mehreren Wörtern bestehen so müssen diese mit Anführungszeichen umfasst werden. Standardmäßig unterscheidet der Taggable Manager zwischen Groß- und Kleinschreibung, Tags sind also case sensitive. Ändern kann man das, indem der Boolean TAGGIT\_CASE\_INSENSITIVE in der settings.py auf True gestellt wird.


\subsection {Hilfsbibliotheken}
Weitere Add-ons werden geladen um kleinere Funktionen der Website einfach umsetzen zu können. Zu diesen gehört django-taggit-templatetags, welches durch die Einbindung im HTML-File die Tags der Applikation als Liste ausgibt. Au"serdem lassen sich die eingepflegten Tags als Cloud visualisieren. Kommen bestimmte Tags öfters vor als andere, so werden sie entsprechend grö"sser dargestellt.

Django-hitcount dient zum zählen der Besucher einer Seite ([Tim15]). Dies lässt sich auf drei verschiedene Arten in die Applikation einbinden. Der schnellste Weg ist die Darstellung der Besuche mit Hilfe eines Template Tags im HTML-File. Möchte man die Anzeige aber individueller gestalten so kann durch das integrieren der HitCountDetailView in views.py die Variable hitcount verwenden und im Frontend ausgeben. Eine weiter Möglichkeit ist das Erweitern oder neu Erstellen eines Models im Django Backend. Von dort kann auf das neue Feld im Django-Admin-Backend zugegriffen werden, ebenso wie in der View und im Template. Die im Add-on integrierten Einstellungen, die in der settings.py ergänzt werden müssen, ermöglichen unter Anderem das begrenzen der Lebensdauer des Zählers, bevor dieser zurück gesetzt wird.

Um das Versenden und Verwalten von E-Mails in Django zu realisieren eignet sich django-post-office ([Ong18]). Nach der Installation kann im Admin-Backend ein E-Mail-Templates angelegt werden, Anhänge verwaltet und das Senden dieser im Log überprüft werden. Die Benachrichtigungen können asynchron versendet werden mit einem integrierten Planungsmanager. Der Inhalt kann Text oder HTML-basiert sein und in mehreren Sprachen hinterlegt werden.

--- evtl. logger addon ---






\section{Bootstrap} \section{Bootstrap}
Eine umfangreiche Website einheitlich zu gestalten ist oft sehr komplex und zeitaufwendig. Die Entwickler von Twitter haben deshalb, zunächst Firmenintern, an einem neuen Verwaltungswerkzeug gearbeitet, das mehrere Bibliotheken zusammenführen sollte. Sie merkten, das die neue Bibliothek, die daraus entstand, nicht nur auf Ihre eigene Website anwendbar, sondern so flexible ist, dass jede Art von Website davon profitieren könnte. 2011 entschieden Sie sich Bootstrap für die Öffentlichkeit frei zugänglich zu machen. Die Open-Source-Bibliothek, die auf GitHub abrufbar ist, wird seitdem von vielen interessierten Programmierern weiterentwickelt und ist somit stark gewachsen. Version 2.0 verfügt au"serdem über die Fähigkeit Websites responsive auf verschiedenste mobile Endgeräte anzupassen.
Eine umfangreiche Website einheitlich zu gestalten ist oft sehr komplex und zeitaufwendig. Die Entwickler von Twitter haben deshalb, zunächst Firmenintern, an einem neuen Verwaltungswerkzeug gearbeitet, das mehrere Bibliotheken zusammenführen sollte. Sie merkten, das die neue Bibliothek, die daraus entstand, nicht nur auf Ihre eigene Website anwendbar, sondern so flexible ist, dass jede Art von Website davon profitieren könnte. Die Open-Source-Bibliothek, die auf GitHub abrufbar ist, wird seitdem von vielen Programmierern weiterentwickelt und ist somit stark gewachsen. Version 2.0 verfügt au"serdem über die Fähigkeit Websites responsive auf verschiedenste mobile Endgeräte anzupassen.


Das Bootstrap-Paket beinhaltet vorgefertigte Cascading Stylesheets, kurz CSS, die Farben, Schriftarten und viele weitere Stildefinitionen. Zudem befinden sich auch Erweiterungen des JavaScript-Frameworks jQuery in der Bibliothek, die weiterführende Funktionen beinhalten wie zum Beispiel Filter oder Dropdown-Menüs. Das Paket kann einfach eingebunden werden im head-tag einer HTML-Datei (vgl. Abbildung 2.3). Das bedeutet, dass Medie-Queries oder ähnliche Methoden nicht mehr nötig sind, nicht nur um eine Website mobilfähig zu machen, sondern auch kompatibel für die verschiedensten Browser.
Das Bootstrap-Paket beinhaltet vorgefertigte Cascading Stylesheets, kurz CSS, die Farben, Schriftarten und viele weitere Stildefinitionen. Zudem befinden sich auch Erweiterungen des JavaScript-Frameworks jQuery in der Bibliothek, die weiterführende Funktionen beinhalten wie zum Beispiel Filter oder Dropdown-Menüs. Das Paket kann einfach eingebunden werden im head-tag einer HTML-Datei (vgl. Abbildung 2.3). Das bedeutet, dass Media-Queries oder ähnliche Methoden nicht mehr nötig sind, nicht nur um eine Website mobilfähig zu machen, sondern auch kompatibel für die verschiedensten Browser.


\begin{figure}[!h] \begin{figure}[!h]
\centering \centering
\hfill \hfill
\end{figure} \end{figure}


Durch das Einbinden von Bootstrap in einer HTML-Datei werden einige Styles bereits automatisch auf die darin vorkommenden Tags, wie Links und Überschriften, angewandt. Dies ist jedoch nur ein sehr kleiner Teil den die Bibliothek zur Verfügung stellt. Möchte man Bootstrap umfangreich nutzen so lassen sich die Stildefinitionen mit Klassen oder ID's in diverse HTML-Tags eintragen (vlg. Abbildung 2.4.).
Durch das Einbinden von Bootstrap in einer HTML-Datei werden einige Styles bereits automatisch auf die darin vorkommenden Tags, wie Links und Überschriften, angewendet. Dies ist jedoch nur ein sehr kleiner Teil den die Bibliothek zur Verfügung stellt. Möchte man Bootstrap umfangreich nutzen so lassen sich die Stildefinitionen mit Klassen oder ID's in diverse HTML-Tags eintragen (vlg. Abbildung 2.4.).


\begin{figure}[!h] \begin{figure}[!h]
\centering \centering






\section{Cron}



+ 1
- 1
doc/bachelorarbeit_EstherKleinhenz/chapters/prototyp.tex View File

Welche Berechtigungen gibt es im Prototyp, welche werden vom Active Directory übernommen? Welche Berechtigungen gibt es im Prototyp, welche werden vom Active Directory übernommen?




\section{Funktion}
\section{Funktionen}
Nötige Funktionen Nötige Funktionen


\subsection{Abonnieren} \subsection{Abonnieren}

BIN
doc/bachelorarbeit_EstherKleinhenz/figures/decorator_example(unused).png View File


+ 31
- 0
doc/bachelorarbeit_EstherKleinhenz/references/References_2.bib View File

year = "2018", year = "2018",
note = "https://www.datacamp.com/community/tutorials/pep8-tutorial-python-code" note = "https://www.datacamp.com/community/tutorials/pep8-tutorial-python-code"
} }
@article{Shabda,
author = "Shabda",
title = "Understanding decorators",
year = "2009",
note = "https://www.agiliq.com/blog/2009/06/understanding-decorators/"
}
@article{Timm,
author = "Damon Timm",
title = "django-hitcount Documentation",
year = "2015",
note = "https://django-hitcount.readthedocs.io/en/latest/overview.html"
}
@article{Ong,
author = "Selwin Ong",
title = "django-post\_office git Repository",
year = "2018",
note = "https://github.com/ui/django-post\_office/blob/master/AUTHORS.rst"
}
@article{Ong,
author = "Ryan Nevius",
title = "django-post\_office git Repository",
year = "2015",
note = "https://ryannevius.com"
}


+ 6
- 9
doc/bachelorarbeit_EstherKleinhenz/titlepage/titlepage.tex View File

\line(1,0){1}\\ \line(1,0){1}\\


\vspace{2.0cm} \vspace{2.0cm}
\textbf{\huge{Esther Kleinhenz}}\\
\textbf{\huge{Esther Beate Kleinhenz}}\\
\vspace{0.4cm} \vspace{0.4cm}
{\Large \textbf{Matrikelnummer: }2649270}\\ {\Large \textbf{Matrikelnummer: }2649270}\\
\vspace{2.5cm} \vspace{2.5cm}
%\indent verfasst. %\indent verfasst.
%\vspace{1.5cm} %\vspace{1.5cm}


\indent Hiermit erkläre ich, dass die vorliegende Arbeit von mir selbständig verfasst und nicht\\
\indent anderweitig für Prüfungszwecke vorgelegt wurde, keine anderen als die angegebenen\\
\indent Quellen oder Hilfsmittel benutzt sowie wörtliche oder sinngemäße Zitate als solche\\
\indent gekennzeichnet wurden.
Hiermit erkläre ich, dass die vorliegende Arbeit von mir selbständig verfasst und nicht anderweitig für Prüfungszwecke vorgelegt wurde, keine anderen als die angegebenen. Quellen oder Hilfsmittel benutzt sowie wörtliche oder sinngemäße Zitate als solche gekennzeichnet wurden.


\vspace{0.5cm} \vspace{0.5cm}




\vspace{1.0cm} \vspace{1.0cm}


\indent Katja Cornelia Hader \\
\indent E-mail: {haderka56442@th-nuernberg.de}
\indent Esther Beate Kleinhenz \\
\indent E-mail: {kleinhenzes60188@th-nuernberg.de}




\vspace{\fill} \vspace{\fill}
\indent Studiengang Wirtschaftsinformatik\\
\indent Studiengang Media Engineering\\
\indent Georg Simon Ohm\\ \indent Georg Simon Ohm\\
\indent Technische Hochschule Nürnberg\\ \indent Technische Hochschule Nürnberg\\
\indent Keßlerplatz 12\\
\indent Ke"slerplatz 12\\
\indent 90489 Nürnberg\\ \indent 90489 Nürnberg\\
\indent Deutschland \indent Deutschland



+ 93
- 0
log.txt View File

[30/Oct/2018 17:01:30] INFO [mysite:140] form is valid [30/Oct/2018 17:01:30] INFO [mysite:140] form is valid
[30/Oct/2018 17:01:30] INFO [mysite:145] ['hi', 'first', 'test', 'bayern', 'second'] [30/Oct/2018 17:01:30] INFO [mysite:145] ['hi', 'first', 'test', 'bayern', 'second']
[30/Oct/2018 17:01:30] INFO [mysite:148] testag [30/Oct/2018 17:01:30] INFO [mysite:148] testag
[31/Oct/2018 14:51:36] INFO [mysite:141] CustomUser object (2)
[31/Oct/2018 14:51:39] INFO [mysite:141] CustomUser object (2)
[31/Oct/2018 14:52:57] INFO [mysite:142] CustomUser object (2)
[31/Oct/2018 14:53:00] INFO [mysite:142] CustomUser object (2)
[31/Oct/2018 14:54:10] INFO [mysite:144] CustomUser object (2)
[31/Oct/2018 14:54:12] INFO [mysite:144] CustomUser object (2)
[31/Oct/2018 14:55:09] INFO [mysite:144] CustomUser object (2)
[31/Oct/2018 14:55:38] INFO [mysite:144] CustomUser object (2)
[31/Oct/2018 14:55:41] INFO [mysite:144] CustomUser object (2)
[31/Oct/2018 14:56:20] INFO [mysite:146] CustomUser object (2)
[31/Oct/2018 14:58:29] INFO [mysite:146] CustomUser object (2)
[31/Oct/2018 14:58:31] INFO [mysite:146] CustomUser object (2)
[31/Oct/2018 14:58:43] INFO [mysite:146] CustomUser object (2)
[31/Oct/2018 14:59:00] INFO [mysite:146] CustomUser object (2)
[31/Oct/2018 17:18:53] INFO [mysite:146] CustomUser object (2)
[31/Oct/2018 17:19:02] INFO [mysite:146] CustomUser object (2)
[31/Oct/2018 17:19:04] INFO [mysite:146] CustomUser object (2)
[31/Oct/2018 17:20:10] INFO [mysite:146] CustomUser object (2)
[31/Oct/2018 17:21:24] INFO [mysite:146] CustomUser object (2)
[31/Oct/2018 17:21:27] INFO [mysite:146] CustomUser object (2)
[31/Oct/2018 17:24:55] INFO [mysite:146] CustomUser object (2)
[31/Oct/2018 17:27:16] INFO [mysite:147] CustomUser object (2)
[31/Oct/2018 17:27:59] INFO [mysite:147] CustomUser object (2)
[31/Oct/2018 17:28:02] INFO [mysite:147] CustomUser object (2)
[31/Oct/2018 17:29:13] INFO [mysite:146] CustomUser object (2)
[31/Oct/2018 17:29:17] INFO [mysite:146] CustomUser object (2)
[31/Oct/2018 17:32:43] INFO [mysite:144] CustomUser object (2)
[31/Oct/2018 17:32:45] INFO [mysite:144] CustomUser object (2)
[31/Oct/2018 17:42:25] INFO [mysite:186] None
[31/Oct/2018 17:43:06] INFO [mysite:186] None
[31/Oct/2018 17:43:07] INFO [mysite:144] CustomUser object (3)
[31/Oct/2018 17:43:13] INFO [mysite:144] CustomUser object (3)
[31/Oct/2018 17:44:14] INFO [mysite:144] CustomUser object (3)
[31/Oct/2018 17:44:17] INFO [mysite:175] <QuerySet [<Tag: hi>]>
[31/Oct/2018 17:44:17] INFO [mysite:178] <QuerySet [<Post: Hi there>]>
[31/Oct/2018 17:45:09] INFO [mysite:175] <QuerySet [<Tag: hi>]>
[31/Oct/2018 17:45:09] INFO [mysite:178] <QuerySet [<Post: Hi there>]>
[31/Oct/2018 17:45:15] INFO [mysite:144] CustomUser object (3)
[31/Oct/2018 17:45:24] INFO [mysite:144] CustomUser object (3)
[31/Oct/2018 17:45:24] INFO [mysite:146] post method
[31/Oct/2018 17:45:24] INFO [mysite:153] ['hi', 'first', 'test', 'bayern', 'second']
[31/Oct/2018 17:46:00] INFO [mysite:144] CustomUser object (3)
[31/Oct/2018 17:46:04] INFO [mysite:144] CustomUser object (3)
[31/Oct/2018 17:46:04] INFO [mysite:146] post method
[31/Oct/2018 17:46:05] INFO [mysite:153] ['hi', 'first', 'test', 'bayern', 'second']
[31/Oct/2018 17:46:05] INFO [mysite:156] hi
[31/Oct/2018 17:46:05] INFO [mysite:158] worked out
[31/Oct/2018 17:46:05] INFO [mysite:144] CustomUser object (3)
[31/Oct/2018 17:50:10] INFO [mysite:144] CustomUser object (3)
[31/Oct/2018 17:50:13] INFO [mysite:144] CustomUser object (3)
[31/Oct/2018 17:50:13] INFO [mysite:146] post method
[31/Oct/2018 17:51:14] INFO [mysite:144] CustomUser object (3)
[31/Oct/2018 17:51:14] INFO [mysite:146] post method
[31/Oct/2018 17:51:24] INFO [mysite:144] CustomUser object (3)
[31/Oct/2018 17:51:24] INFO [mysite:146] post method
[31/Oct/2018 17:51:39] INFO [mysite:144] CustomUser object (3)
[31/Oct/2018 17:56:24] INFO [mysite:144] CustomUser object (3)
[31/Oct/2018 18:04:35] INFO [mysite:146] post method
[31/Oct/2018 18:04:35] INFO [mysite:153] ['hi', 'first', 'test', 'bayern', 'second']
[31/Oct/2018 18:04:35] INFO [mysite:157] worked out
[31/Oct/2018 18:06:17] INFO [mysite:146] post method
[31/Oct/2018 18:06:18] INFO [mysite:153] ['hi', 'first', 'test', 'bayern', 'second']
[31/Oct/2018 18:06:18] INFO [mysite:157] worked out
[31/Oct/2018 18:07:46] INFO [mysite:146] post method
[31/Oct/2018 18:07:47] INFO [mysite:153] ['hi', 'first', 'test', 'bayern', 'second']
[31/Oct/2018 18:07:47] INFO [mysite:157] worked out
[31/Oct/2018 18:09:17] INFO [mysite:146] post method
[31/Oct/2018 18:09:18] INFO [mysite:153] ['hi', 'first', 'test', 'bayern', 'second']
[31/Oct/2018 18:09:40] INFO [mysite:146] post method
[31/Oct/2018 18:09:40] INFO [mysite:153] ['hi', 'first', 'test', 'bayern', 'second']
[31/Oct/2018 18:09:47] INFO [mysite:146] post method
[31/Oct/2018 18:09:48] INFO [mysite:153] ['hi', 'first', 'test', 'bayern', 'second']
[31/Oct/2018 18:10:12] INFO [mysite:146] post method
[31/Oct/2018 18:10:12] INFO [mysite:153] ['hi', 'first', 'test', 'bayern', 'second']
[31/Oct/2018 18:10:12] INFO [mysite:157] worked out
[31/Oct/2018 18:22:34] INFO [mysite:144] post method
[31/Oct/2018 18:22:51] INFO [mysite:145] post method
[31/Oct/2018 18:22:51] INFO [mysite:148] form is valid
[31/Oct/2018 18:22:51] INFO [mysite:153] ['hi', 'first', 'test', 'bayern', 'second']
[31/Oct/2018 18:23:10] INFO [mysite:145] post method
[31/Oct/2018 18:23:10] INFO [mysite:148] form is valid
[31/Oct/2018 18:23:10] INFO [mysite:153] ['hi', 'first', 'test', 'bayern', 'second']
[31/Oct/2018 18:23:10] INFO [mysite:156] hi
[31/Oct/2018 18:23:10] INFO [mysite:158] worked out
[31/Oct/2018 18:23:19] INFO [mysite:145] post method
[31/Oct/2018 18:23:19] INFO [mysite:148] form is valid
[31/Oct/2018 18:23:19] INFO [mysite:153] ['hi', 'first', 'test', 'bayern', 'second']
[31/Oct/2018 18:23:19] INFO [mysite:156] hi
[31/Oct/2018 18:23:19] INFO [mysite:158] worked out
[31/Oct/2018 18:23:23] INFO [mysite:145] post method
[31/Oct/2018 18:23:23] INFO [mysite:148] form is valid
[31/Oct/2018 18:23:23] INFO [mysite:153] ['hi', 'first', 'test', 'bayern', 'second']
[31/Oct/2018 18:23:23] INFO [mysite:156] tehere

+ 24
- 20
requirements.txt View File

amqp==1.4.9
anyjson==0.3.3
astroid==1.6.5
altgraph==0.10.2
astroid==1.6.4
autopep8==1.3.5 autopep8==1.3.5
billiard==3.3.0.23
celery==3.1.26.post2
croniter==0.3.25
Django==2.1.2
django-celery==3.2.2
backports-abc==0.5
backports.functools-lru-cache==1.5
bdist-mpkg==0.5.0
configparser==3.5.0
Django==1.11.15
django-classy-tags==0.8.0 django-classy-tags==0.8.0
django-debug-toolbar==1.10.1
django-hitcount==1.3.0
django-post-office==3.1.0
django-taggit==0.23.0 django-taggit==0.23.0
django-taggit-templatetags2==1.6.1 django-taggit-templatetags2==1.6.1
enum34==1.1.6
futures==3.1.1
isort==4.3.4 isort==4.3.4
jsonfield==2.0.2
kombu==3.0.37
lazy-object-proxy==1.3.1 lazy-object-proxy==1.3.1
ldap3==2.5
macholib==1.5.1
mccabe==0.6.1 mccabe==0.6.1
pyasn1==0.4.3
modulegraph==0.10.4
mysql-connector-python==8.0.12
nose==1.3.7
numpy==1.8.2
protobuf==3.6.1
py2app==0.7.3
pyasn1==0.4.4
pyasn1-modules==0.2.2 pyasn1-modules==0.2.2
pycodestyle==2.4.0 pycodestyle==2.4.0
pyldap==3.0.0.post1
pylint==1.9.2
python-dateutil==2.7.3
pylint==1.9.1
pyparsing==2.0.1
python-dateutil==1.5
python-ldap==3.1.0 python-ldap==3.1.0
pytz==2018.4
pytz==2013.7
scipy==1.1.0
singledispatch==3.4.0.3
six==1.11.0 six==1.11.0
sqlparse==0.2.4
tornado==5.1
wrapt==1.10.11 wrapt==1.10.11

+ 4
- 0
start.sh View File

#!/bin/bash

# Start Gunicorn processes
python3 manage.py runserver

+ 11
- 0
thesisenv/bin/gunicorn View File

#!/Users/Esthi/thesis_ek/thesisenv/bin/python3

# -*- coding: utf-8 -*-
import re
import sys

from gunicorn.app.wsgiapp import run

if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(run())

+ 11
- 0
thesisenv/bin/gunicorn_django View File

#!/Users/Esthi/thesis_ek/thesisenv/bin/python3

# -*- coding: utf-8 -*-
import re
import sys

from gunicorn.app.djangoapp import run

if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(run())

+ 11
- 0
thesisenv/bin/gunicorn_paster View File

#!/Users/Esthi/thesis_ek/thesisenv/bin/python3

# -*- coding: utf-8 -*-
import re
import sys

from gunicorn.app.pasterapp import run

if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(run())

+ 59
- 0
thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/DESCRIPTION.rst View File

Gunicorn
--------

.. image::
https://secure.travis-ci.org/benoitc/gunicorn.png?branch=master
:alt: Build Status
:target: https://travis-ci.org/benoitc/gunicorn

Gunicorn 'Green Unicorn' is a Python WSGI HTTP Server for UNIX. It's a pre-fork
worker model ported from Ruby's Unicorn_ project. The Gunicorn server is broadly
compatible with various web frameworks, simply implemented, light on server
resource usage, and fairly speedy.

Feel free to join us in `#gunicorn`_ on Freenode_.

Documentation
-------------

The documentation is hosted at http://docs.gunicorn.org.

Installation
------------

Gunicorn requires **Python 2.x >= 2.6** or **Python 3.x >= 3.2**.

Install from PyPI::

$ pip install gunicorn


Usage
-----

Basic usage::

$ gunicorn [OPTIONS] APP_MODULE

Where ``APP_MODULE`` is of the pattern ``$(MODULE_NAME):$(VARIABLE_NAME)``. The
module name can be a full dotted path. The variable name refers to a WSGI
callable that should be found in the specified module.

Example with test app::

$ cd examples
$ gunicorn --workers=2 test:app


License
-------

Gunicorn is released under the MIT License. See the LICENSE_ file for more
details.

.. _Unicorn: http://unicorn.bogomips.org/
.. _`#gunicorn`: http://webchat.freenode.net/?channels=gunicorn
.. _Freenode: http://freenode.net
.. _LICENSE: http://github.com/benoitc/gunicorn/blob/master/LICENSE



thesisenv/lib/python3.6/site-packages/setuptools-39.0.1.dist-info/INSTALLER → thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/INSTALLER View File


+ 90
- 0
thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/METADATA View File

Metadata-Version: 2.0
Name: gunicorn
Version: 19.6.0
Summary: WSGI HTTP Server for UNIX
Home-page: http://gunicorn.org
Author: Benoit Chesneau
Author-email: benoitc@e-engura.com
License: MIT
Platform: UNKNOWN
Classifier: Development Status :: 4 - Beta
Classifier: Environment :: Other Environment
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: MacOS :: MacOS X
Classifier: Operating System :: POSIX
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.2
Classifier: Programming Language :: Python :: 3.3
Classifier: Programming Language :: Python :: 3.4
Classifier: Topic :: Internet
Classifier: Topic :: Utilities
Classifier: Topic :: Software Development :: Libraries :: Python Modules
Classifier: Topic :: Internet :: WWW/HTTP
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Server
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content

Gunicorn
--------

.. image::
https://secure.travis-ci.org/benoitc/gunicorn.png?branch=master
:alt: Build Status
:target: https://travis-ci.org/benoitc/gunicorn

Gunicorn 'Green Unicorn' is a Python WSGI HTTP Server for UNIX. It's a pre-fork
worker model ported from Ruby's Unicorn_ project. The Gunicorn server is broadly
compatible with various web frameworks, simply implemented, light on server
resource usage, and fairly speedy.

Feel free to join us in `#gunicorn`_ on Freenode_.

Documentation
-------------

The documentation is hosted at http://docs.gunicorn.org.

Installation
------------

Gunicorn requires **Python 2.x >= 2.6** or **Python 3.x >= 3.2**.

Install from PyPI::

$ pip install gunicorn


Usage
-----

Basic usage::

$ gunicorn [OPTIONS] APP_MODULE

Where ``APP_MODULE`` is of the pattern ``$(MODULE_NAME):$(VARIABLE_NAME)``. The
module name can be a full dotted path. The variable name refers to a WSGI
callable that should be found in the specified module.

Example with test app::

$ cd examples
$ gunicorn --workers=2 test:app


License
-------

Gunicorn is released under the MIT License. See the LICENSE_ file for more
details.

.. _Unicorn: http://unicorn.bogomips.org/
.. _`#gunicorn`: http://webchat.freenode.net/?channels=gunicorn
.. _Freenode: http://freenode.net
.. _LICENSE: http://github.com/benoitc/gunicorn/blob/master/LICENSE



+ 99
- 0
thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/RECORD View File

../../../bin/gunicorn,sha256=QwZgUsXVoI-G5n_rSOnlnDW47F0cn8H6EfcHYM3Bxio,249
../../../bin/gunicorn_django,sha256=IrksqvDXDtGcoAjIGDaWlg-LS07HRWgzjUyieZBLIAY,251
../../../bin/gunicorn_paster,sha256=4Em2ld6n94yvE2d4U1GC9gdFFh0KRHKDbiLdM2a3LSQ,251
gunicorn-19.6.0.dist-info/DESCRIPTION.rst,sha256=gCCsiCS_cxp9BYBUISH3yQgOJ7Qwf1doRlU848N7V9A,1398
gunicorn-19.6.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
gunicorn-19.6.0.dist-info/METADATA,sha256=WLiYNVHetUjDGxLR4TJX37K32PU6hCDZUlA80r8GApE,2614
gunicorn-19.6.0.dist-info/RECORD,,
gunicorn-19.6.0.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110
gunicorn-19.6.0.dist-info/entry_points.txt,sha256=O4lN00p02r6nZQ_iK-fMXLjj_JK6fAWdl23_5czAK-I,231
gunicorn-19.6.0.dist-info/metadata.json,sha256=J8wtgZ1uHf39U63vZr-tBK7tivgZI45J5WaIqk2D1ZU,1737
gunicorn-19.6.0.dist-info/top_level.txt,sha256=cdMaa2yhxb8do-WioY9qRHUCfwf55YztjwQCncaInoE,9
gunicorn/__init__.py,sha256=nCZCS-r49yi9ZJS3WLqDEwHpqQIJpwloTFaDXtdeFbE,255
gunicorn/__pycache__/__init__.cpython-36.pyc,,
gunicorn/__pycache__/_compat.cpython-36.pyc,,
gunicorn/__pycache__/arbiter.cpython-36.pyc,,
gunicorn/__pycache__/argparse_compat.cpython-36.pyc,,
gunicorn/__pycache__/config.cpython-36.pyc,,
gunicorn/__pycache__/debug.cpython-36.pyc,,
gunicorn/__pycache__/errors.cpython-36.pyc,,
gunicorn/__pycache__/glogging.cpython-36.pyc,,
gunicorn/__pycache__/pidfile.cpython-36.pyc,,
gunicorn/__pycache__/reloader.cpython-36.pyc,,
gunicorn/__pycache__/selectors.cpython-36.pyc,,
gunicorn/__pycache__/six.cpython-36.pyc,,
gunicorn/__pycache__/sock.cpython-36.pyc,,
gunicorn/__pycache__/util.cpython-36.pyc,,
gunicorn/_compat.py,sha256=IgADLDObcvKcQWExjqm4XgWNULhwigfBC7u8td-xNqQ,8719
gunicorn/app/__init__.py,sha256=GuqstqdkizeV4HRbd8aGMBn0Q8IDOyRU1wMMNqNe5GY,127
gunicorn/app/__pycache__/__init__.cpython-36.pyc,,
gunicorn/app/__pycache__/base.cpython-36.pyc,,
gunicorn/app/__pycache__/django_wsgi.cpython-36.pyc,,
gunicorn/app/__pycache__/djangoapp.cpython-36.pyc,,
gunicorn/app/__pycache__/pasterapp.cpython-36.pyc,,
gunicorn/app/__pycache__/wsgiapp.cpython-36.pyc,,
gunicorn/app/base.py,sha256=PTMXTIbczObiDEcwW2yO1iwQHUCqRJuziv-2RI0uPbk,5746
gunicorn/app/django_wsgi.py,sha256=y57mCZPtg6_bZH8gXO_L5MdWs88kg__-6J8ewszmyLo,4363
gunicorn/app/djangoapp.py,sha256=r1q0BvE7P5Dp4uQpMp60cQU-H5TAM86ieNAM3WjSlWM,5026
gunicorn/app/pasterapp.py,sha256=05rRnbj_UC-vpvUoSrihe7XewtVXt7kEuHpo0ztXHW8,6124
gunicorn/app/wsgiapp.py,sha256=GXVU4rl44bKZp7ZTyRdG5A4l7h_iUtQWM1dQDFQ_JO0,2156
gunicorn/arbiter.py,sha256=8NIWHmdQ6PvNkJjBx4Y7ELdM1WQG8S3PnOLjTp0iTGs,19218
gunicorn/argparse_compat.py,sha256=gsHDGwo4BSJWHdiaEXy0Emr96NKC0LDYmK5nB7PE8Qc,87791
gunicorn/config.py,sha256=xlqyO2KbiMg2dqEkd_Z2gLIr5S9M8KkaXW7c4NibqKA,47330
gunicorn/debug.py,sha256=9z2i59LfELYi3VvbwyrIKezYrQuowVAsWfSLZI75TDI,2303
gunicorn/errors.py,sha256=CNtO7hfBdqJADoEyPGME2kTW8QcxlUWmmA1NfOEJvSQ,632
gunicorn/glogging.py,sha256=zrEWgA8czmyZSj3OefEPbAem4pooS5k57RHtZOI0rBk,14582
gunicorn/http/__init__.py,sha256=b4TF3x5F0VYOPTOeNYwRGR1EYHBaPMhZRMoNeuD5-n0,277
gunicorn/http/__pycache__/__init__.cpython-36.pyc,,
gunicorn/http/__pycache__/_sendfile.cpython-36.pyc,,
gunicorn/http/__pycache__/body.cpython-36.pyc,,
gunicorn/http/__pycache__/errors.cpython-36.pyc,,
gunicorn/http/__pycache__/message.cpython-36.pyc,,
gunicorn/http/__pycache__/parser.cpython-36.pyc,,
gunicorn/http/__pycache__/unreader.cpython-36.pyc,,
gunicorn/http/__pycache__/wsgi.cpython-36.pyc,,
gunicorn/http/_sendfile.py,sha256=lJZV7IsyJIjij9QQNlrSHIDClrcVrI1xD0vmILredwc,2256
gunicorn/http/body.py,sha256=SbFMqhFR_V1AKg1Bm0grZL5gmhJu2zf_8Xz2Kaj-mao,7355
gunicorn/http/errors.py,sha256=57KmM6CA7UldH7ZfYRSF8drsh95iI5sVoTEHoQrlcSI,2446
gunicorn/http/message.py,sha256=YyuWhQ77Ac5_fNOhyyYqO2GO1iqe7nTOzni-TEsDovg,11365
gunicorn/http/parser.py,sha256=IRMvp0veP4wL8Z4vgNV72CPydCNPdNNIy9u-DlDvvSo,1294
gunicorn/http/unreader.py,sha256=1D9E3QD8BBkCrJ4BvIDUdZngT4n7Q1H-X-GLqF19iT4,2024
gunicorn/http/wsgi.py,sha256=nJQZ2yAADyeCkGWj4QFGxxHicaF5wgOp3iqOpGKCntM,13222
gunicorn/instrument/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
gunicorn/instrument/__pycache__/__init__.cpython-36.pyc,,
gunicorn/instrument/__pycache__/statsd.cpython-36.pyc,,
gunicorn/instrument/statsd.py,sha256=Oo1vLvtYzqAwX476tX5IGa0G_5bUE_zIL0NTx_9Y0Ao,4489
gunicorn/management/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
gunicorn/management/__pycache__/__init__.cpython-36.pyc,,
gunicorn/management/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
gunicorn/management/commands/__pycache__/__init__.cpython-36.pyc,,
gunicorn/management/commands/__pycache__/run_gunicorn.cpython-36.pyc,,
gunicorn/management/commands/run_gunicorn.py,sha256=ulaIXOdJWshs4lI7uVTJ2frvRLGtSCktMmRnWRc3yGk,3638
gunicorn/pidfile.py,sha256=_SYX5zNhgpgUgws_R3VwClcqUWRSPOXl-5MDRI9rH74,2272
gunicorn/reloader.py,sha256=Z8XJ581yTqOmrPkheJav-qdsJZS9z2JdYojhocOY6Sc,1533
gunicorn/selectors.py,sha256=14_UESrpE3AQKXWKeeAUG9vBTzJ0yTYDGtEo6xOtlDY,18997
gunicorn/six.py,sha256=6N-6RCENPfBtMpN5UmgDfDKmJebbbuPu_Dk3Zf8ngww,27344
gunicorn/sock.py,sha256=OBZ7u3fNygNEUsM8M1x-bY9b1rQYcj64sjbj8pUu-eE,7124
gunicorn/util.py,sha256=GKpnIaJ_u2ZyJDDJBeHdmOp6xf0iIFDqrg4hEzyjlnA,15723
gunicorn/workers/__init__.py,sha256=Z57G1WjnZDCG52C8PgiXF4mKRKqlv81b2GHkhOJiO6A,774
gunicorn/workers/__pycache__/__init__.cpython-36.pyc,,
gunicorn/workers/__pycache__/_gaiohttp.cpython-36.pyc,,
gunicorn/workers/__pycache__/async.cpython-36.pyc,,
gunicorn/workers/__pycache__/base.cpython-36.pyc,,
gunicorn/workers/__pycache__/gaiohttp.cpython-36.pyc,,
gunicorn/workers/__pycache__/geventlet.cpython-36.pyc,,
gunicorn/workers/__pycache__/ggevent.cpython-36.pyc,,
gunicorn/workers/__pycache__/gthread.cpython-36.pyc,,
gunicorn/workers/__pycache__/gtornado.cpython-36.pyc,,
gunicorn/workers/__pycache__/sync.cpython-36.pyc,,
gunicorn/workers/__pycache__/workertmp.cpython-36.pyc,,
gunicorn/workers/_gaiohttp.py,sha256=CFKiyLNqWqemhDvDovb-JqMRTMNz50gZUwwOpIjrpHw,5071
gunicorn/workers/async.py,sha256=i5JrhOBmXREcoVNJaCXmSKHzYSUeWomsNwmGxlEFHzE,5397
gunicorn/workers/base.py,sha256=FuFHTWOjLBdzel08h39ymFhy1EuGbse1lDUD4HX2gRI,8643
gunicorn/workers/gaiohttp.py,sha256=GIkR9AnyaZ8b1Pt7It35iIK6EPwe2JSUgt55u7nw9qo,500
gunicorn/workers/geventlet.py,sha256=zX5q8vyHCJ2JZ4imlsMHidoMlvK67jl5x-2eD6-VRlQ,3796
gunicorn/workers/ggevent.py,sha256=we70lCiiyHwaW75kqM40KOsX7FKyZiRvjT2EPROKt5c,6838
gunicorn/workers/gthread.py,sha256=YCtzA8SdWY6jeJs-8MTqpes4V-9kxy4D1TJNQ-CxafI,12077
gunicorn/workers/gtornado.py,sha256=kGNYyqwZMpItoLjM_-6wTWGUtThchSNpz9MszqrwlBU,4372
gunicorn/workers/sync.py,sha256=_vd1JATNLG4MgJppNJG5KWBIzLGYqRzhEAQVz9H11LI,7153
gunicorn/workers/workertmp.py,sha256=6QINPBrriLvezgkC_hclOOeXLi_owMt_SOA5KPEIN-A,1459

+ 6
- 0
thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/WHEEL View File

Wheel-Version: 1.0
Generator: bdist_wheel (0.24.0)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any


+ 9
- 0
thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/entry_points.txt View File


[console_scripts]
gunicorn=gunicorn.app.wsgiapp:run
gunicorn_django=gunicorn.app.djangoapp:run
gunicorn_paster=gunicorn.app.pasterapp:run

[paste.server_runner]
main=gunicorn.app.pasterapp:paste_server

+ 1
- 0
thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/metadata.json View File

{"extensions": {"python.exports": {"console_scripts": {"gunicorn_django": "gunicorn.app.djangoapp:run", "gunicorn_paster": "gunicorn.app.pasterapp:run", "gunicorn": "gunicorn.app.wsgiapp:run"}, "paste.server_runner": {"main": "gunicorn.app.pasterapp:paste_server"}}, "python.details": {"contacts": [{"email": "benoitc@e-engura.com", "role": "author", "name": "Benoit Chesneau"}], "project_urls": {"Home": "http://gunicorn.org"}, "document_names": {"description": "DESCRIPTION.rst"}}, "python.commands": {"wrap_console": {"gunicorn_django": "gunicorn.app.djangoapp:run", "gunicorn_paster": "gunicorn.app.pasterapp:run", "gunicorn": "gunicorn.app.wsgiapp:run"}}}, "license": "MIT", "classifiers": ["Development Status :: 4 - Beta", "Environment :: Other Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Internet", "Topic :: Utilities", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI", "Topic :: Internet :: WWW/HTTP :: WSGI :: Server", "Topic :: Internet :: WWW/HTTP :: Dynamic Content"], "name": "gunicorn", "generator": "bdist_wheel (0.24.0)", "version": "19.6.0", "metadata_version": "2.0", "summary": "WSGI HTTP Server for UNIX", "test_requires": [{"requires": ["pytest (==2.8.3)", "pytest-cov (==1.7.0)"]}]}

+ 1
- 0
thesisenv/lib/python3.6/site-packages/gunicorn-19.6.0.dist-info/top_level.txt View File

gunicorn

+ 8
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/__init__.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

version_info = (19, 6, 0)
__version__ = ".".join([str(v) for v in version_info])
SERVER_SOFTWARE = "gunicorn/%s" % __version__

+ 264
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/_compat.py View File

import sys

from gunicorn import six

PY26 = (sys.version_info[:2] == (2, 6))
PY33 = (sys.version_info >= (3, 3))


def _check_if_pyc(fname):
"""Return True if the extension is .pyc, False if .py
and None if otherwise"""
from imp import find_module
from os.path import realpath, dirname, basename, splitext

# Normalize the file-path for the find_module()
filepath = realpath(fname)
dirpath = dirname(filepath)
module_name = splitext(basename(filepath))[0]

# Validate and fetch
try:
fileobj, fullpath, (_, _, pytype) = find_module(module_name, [dirpath])
except ImportError:
raise IOError("Cannot find config file. "
"Path maybe incorrect! : {0}".format(filepath))
return pytype, fileobj, fullpath


def _get_codeobj(pyfile):
""" Returns the code object, given a python file """
from imp import PY_COMPILED, PY_SOURCE

result, fileobj, fullpath = _check_if_pyc(pyfile)

# WARNING:
# fp.read() can blowup if the module is extremely large file.
# Lookout for overflow errors.
try:
data = fileobj.read()
finally:
fileobj.close()

# This is a .pyc file. Treat accordingly.
if result is PY_COMPILED:
# .pyc format is as follows:
# 0 - 4 bytes: Magic number, which changes with each create of .pyc file.
# First 2 bytes change with each marshal of .pyc file. Last 2 bytes is "\r\n".
# 4 - 8 bytes: Datetime value, when the .py was last changed.
# 8 - EOF: Marshalled code object data.
# So to get code object, just read the 8th byte onwards till EOF, and
# UN-marshal it.
import marshal
code_obj = marshal.loads(data[8:])

elif result is PY_SOURCE:
# This is a .py file.
code_obj = compile(data, fullpath, 'exec')

else:
# Unsupported extension
raise Exception("Input file is unknown format: {0}".format(fullpath))

# Return code object
return code_obj

if six.PY3:
def execfile_(fname, *args):
if fname.endswith(".pyc"):
code = _get_codeobj(fname)
else:
code = compile(open(fname, 'rb').read(), fname, 'exec')
return six.exec_(code, *args)

def bytes_to_str(b):
if isinstance(b, six.text_type):
return b
return str(b, 'latin1')

import urllib.parse

def unquote_to_wsgi_str(string):
return _unquote_to_bytes(string).decode('latin-1')

_unquote_to_bytes = urllib.parse.unquote_to_bytes

else:
def execfile_(fname, *args):
""" Overriding PY2 execfile() implementation to support .pyc files """
if fname.endswith(".pyc"):
return six.exec_(_get_codeobj(fname), *args)
return execfile(fname, *args)

def bytes_to_str(s):
if isinstance(s, unicode):
return s.encode('utf-8')
return s

import urllib
unquote_to_wsgi_str = urllib.unquote


# The following code adapted from trollius.py33_exceptions
def _wrap_error(exc, mapping, key):
if key not in mapping:
return
new_err_cls = mapping[key]
new_err = new_err_cls(*exc.args)

# raise a new exception with the original traceback
six.reraise(new_err_cls, new_err,
exc.__traceback__ if hasattr(exc, '__traceback__') else sys.exc_info()[2])

if PY33:
import builtins

BlockingIOError = builtins.BlockingIOError
BrokenPipeError = builtins.BrokenPipeError
ChildProcessError = builtins.ChildProcessError
ConnectionRefusedError = builtins.ConnectionRefusedError
ConnectionResetError = builtins.ConnectionResetError
InterruptedError = builtins.InterruptedError
ConnectionAbortedError = builtins.ConnectionAbortedError
PermissionError = builtins.PermissionError
FileNotFoundError = builtins.FileNotFoundError
ProcessLookupError = builtins.ProcessLookupError

def wrap_error(func, *args, **kw):
return func(*args, **kw)
else:
import errno
import select
import socket

class BlockingIOError(OSError):
pass

class BrokenPipeError(OSError):
pass

class ChildProcessError(OSError):
pass

class ConnectionRefusedError(OSError):
pass

class InterruptedError(OSError):
pass

class ConnectionResetError(OSError):
pass

class ConnectionAbortedError(OSError):
pass

class PermissionError(OSError):
pass

class FileNotFoundError(OSError):
pass

class ProcessLookupError(OSError):
pass

_MAP_ERRNO = {
errno.EACCES: PermissionError,
errno.EAGAIN: BlockingIOError,
errno.EALREADY: BlockingIOError,
errno.ECHILD: ChildProcessError,
errno.ECONNABORTED: ConnectionAbortedError,
errno.ECONNREFUSED: ConnectionRefusedError,
errno.ECONNRESET: ConnectionResetError,
errno.EINPROGRESS: BlockingIOError,
errno.EINTR: InterruptedError,
errno.ENOENT: FileNotFoundError,
errno.EPERM: PermissionError,
errno.EPIPE: BrokenPipeError,
errno.ESHUTDOWN: BrokenPipeError,
errno.EWOULDBLOCK: BlockingIOError,
errno.ESRCH: ProcessLookupError,
}

def wrap_error(func, *args, **kw):
"""
Wrap socket.error, IOError, OSError, select.error to raise new specialized
exceptions of Python 3.3 like InterruptedError (PEP 3151).
"""
try:
return func(*args, **kw)
except (socket.error, IOError, OSError) as exc:
if hasattr(exc, 'winerror'):
_wrap_error(exc, _MAP_ERRNO, exc.winerror)
# _MAP_ERRNO does not contain all Windows errors.
# For some errors like "file not found", exc.errno should
# be used (ex: ENOENT).
_wrap_error(exc, _MAP_ERRNO, exc.errno)
raise
except select.error as exc:
if exc.args:
_wrap_error(exc, _MAP_ERRNO, exc.args[0])
raise

if PY26:
from urlparse import (
_parse_cache, MAX_CACHE_SIZE, clear_cache, _splitnetloc, SplitResult,
scheme_chars,
)

def urlsplit(url, scheme='', allow_fragments=True):
"""Parse a URL into 5 components:
<scheme>://<netloc>/<path>?<query>#<fragment>
Return a 5-tuple: (scheme, netloc, path, query, fragment).
Note that we don't break the components up in smaller bits
(e.g. netloc is a single string) and we don't expand % escapes."""
allow_fragments = bool(allow_fragments)
key = url, scheme, allow_fragments, type(url), type(scheme)
cached = _parse_cache.get(key, None)
if cached:
return cached
if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth
clear_cache()
netloc = query = fragment = ''
i = url.find(':')
if i > 0:
if url[:i] == 'http': # optimize the common case
scheme = url[:i].lower()
url = url[i+1:]
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
if (('[' in netloc and ']' not in netloc) or
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if allow_fragments and '#' in url:
url, fragment = url.split('#', 1)
if '?' in url:
url, query = url.split('?', 1)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return v
for c in url[:i]:
if c not in scheme_chars:
break
else:
# make sure "url" is not actually a port number (in which case
# "scheme" is really part of the path)
rest = url[i+1:]
if not rest or any(c not in '0123456789' for c in rest):
# not a port number
scheme, url = url[:i].lower(), rest

if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
if (('[' in netloc and ']' not in netloc) or
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if allow_fragments and '#' in url:
url, fragment = url.split('#', 1)
if '?' in url:
url, query = url.split('?', 1)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return v

else:
from gunicorn.six.moves.urllib.parse import urlsplit

+ 4
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/app/__init__.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

+ 192
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/app/base.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from __future__ import print_function

import os
import sys
import traceback

from gunicorn._compat import execfile_
from gunicorn import util
from gunicorn.arbiter import Arbiter
from gunicorn.config import Config, get_default_config_file
from gunicorn import debug

class BaseApplication(object):
"""
An application interface for configuring and loading
the various necessities for any given web framework.
"""
def __init__(self, usage=None, prog=None):
self.usage = usage
self.cfg = None
self.callable = None
self.prog = prog
self.logger = None
self.do_load_config()

def do_load_config(self):
"""
Loads the configuration
"""
try:
self.load_default_config()
self.load_config()
except Exception as e:
print("\nError: %s" % str(e), file=sys.stderr)
sys.stderr.flush()
sys.exit(1)

def load_default_config(self):
# init configuration
self.cfg = Config(self.usage, prog=self.prog)

def init(self, parser, opts, args):
raise NotImplementedError

def load(self):
raise NotImplementedError

def load_config(self):
"""
This method is used to load the configuration from one or several input(s).
Custom Command line, configuration file.
You have to override this method in your class.
"""
raise NotImplementedError

def reload(self):
self.do_load_config()
if self.cfg.spew:
debug.spew()

def wsgi(self):
if self.callable is None:
self.callable = self.load()
return self.callable

def run(self):
try:
Arbiter(self).run()
except RuntimeError as e:
print("\nError: %s\n" % e, file=sys.stderr)
sys.stderr.flush()
sys.exit(1)

class Application(BaseApplication):

def get_config_from_filename(self, filename):

if not os.path.exists(filename):
raise RuntimeError("%r doesn't exist" % filename)

cfg = {
"__builtins__": __builtins__,
"__name__": "__config__",
"__file__": filename,
"__doc__": None,
"__package__": None
}
try:
execfile_(filename, cfg, cfg)
except Exception:
print("Failed to read config file: %s" % filename, file=sys.stderr)
traceback.print_exc()
sys.stderr.flush()
sys.exit(1)

return cfg

def get_config_from_module_name(self, module_name):
return util.import_module(module_name).__dict__

def load_config_from_module_name_or_filename(self, location):
"""
Loads the configuration file: the file is a python file, otherwise raise an RuntimeError
Exception or stop the process if the configuration file contains a syntax error.
"""

if location.startswith("python:"):
module_name = location[len("python:"):]
cfg = self.get_config_from_module_name(module_name)
else:
if location.startswith("file:"):
filename = location[len("file:"):]
else:
filename = location
cfg = self.get_config_from_filename(filename)

for k, v in cfg.items():
# Ignore unknown names
if k not in self.cfg.settings:
continue
try:
self.cfg.set(k.lower(), v)
except:
print("Invalid value for %s: %s\n" % (k, v), file=sys.stderr)
sys.stderr.flush()
raise

return cfg

def load_config_from_file(self, filename):
return self.load_config_from_module_name_or_filename(location=filename)

def load_config(self):
# parse console args
parser = self.cfg.parser()
args = parser.parse_args()

# optional settings from apps
cfg = self.init(parser, args, args.args)

# Load up the any app specific configuration
if cfg and cfg is not None:
for k, v in cfg.items():
self.cfg.set(k.lower(), v)

if args.config:
self.load_config_from_file(args.config)
else:
default_config = get_default_config_file()
if default_config is not None:
self.load_config_from_file(default_config)

# Lastly, update the configuration with any command line
# settings.
for k, v in args.__dict__.items():
if v is None:
continue
if k == "args":
continue
self.cfg.set(k.lower(), v)

def run(self):
if self.cfg.check_config:
try:
self.load()
except:
msg = "\nError while loading the application:\n"
print(msg, file=sys.stderr)
traceback.print_exc()
sys.stderr.flush()
sys.exit(1)
sys.exit(0)

if self.cfg.spew:
debug.spew()

if self.cfg.daemon:
util.daemonize(self.cfg.enable_stdio_inheritance)

# set python paths
if self.cfg.pythonpath and self.cfg.pythonpath is not None:
paths = self.cfg.pythonpath.split(",")
for path in paths:
pythonpath = os.path.abspath(path)
if pythonpath not in sys.path:
sys.path.insert(0, pythonpath)

super(Application, self).run()

+ 120
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/app/django_wsgi.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

""" module used to build the django wsgi application """
from __future__ import print_function

import os
import re
import sys
import time
try:
from StringIO import StringIO
except:
from io import StringIO
from imp import reload


from django.conf import settings
from django.core.management.validation import get_validation_errors
from django.utils import translation

try:
from django.core.servers.basehttp import get_internal_wsgi_application
django14 = True
except ImportError:
from django.core.handlers.wsgi import WSGIHandler
django14 = False

from gunicorn import util


def make_wsgi_application():
# validate models
s = StringIO()
if get_validation_errors(s):
s.seek(0)
error = s.read()
msg = "One or more models did not validate:\n%s" % error
print(msg, file=sys.stderr)
sys.stderr.flush()
sys.exit(1)

translation.activate(settings.LANGUAGE_CODE)
if django14:
return get_internal_wsgi_application()
return WSGIHandler()


def reload_django_settings():
mod = util.import_module(os.environ['DJANGO_SETTINGS_MODULE'])

# Reload module.
reload(mod)

# Reload settings.
# Use code from django.settings.Settings module.

# Settings that should be converted into tuples if they're mistakenly entered
# as strings.
tuple_settings = ("INSTALLED_APPS", "TEMPLATE_DIRS")

for setting in dir(mod):
if setting == setting.upper():
setting_value = getattr(mod, setting)
if setting in tuple_settings and type(setting_value) == str:
setting_value = (setting_value,) # In case the user forgot the comma.
setattr(settings, setting, setting_value)

# Expand entries in INSTALLED_APPS like "django.contrib.*" to a list
# of all those apps.
new_installed_apps = []
for app in settings.INSTALLED_APPS:
if app.endswith('.*'):
app_mod = util.import_module(app[:-2])
appdir = os.path.dirname(app_mod.__file__)
app_subdirs = os.listdir(appdir)
name_pattern = re.compile(r'[a-zA-Z]\w*')
for d in sorted(app_subdirs):
if (name_pattern.match(d) and
os.path.isdir(os.path.join(appdir, d))):
new_installed_apps.append('%s.%s' % (app[:-2], d))
else:
new_installed_apps.append(app)
setattr(settings, "INSTALLED_APPS", new_installed_apps)

if hasattr(time, 'tzset') and settings.TIME_ZONE:
# When we can, attempt to validate the timezone. If we can't find
# this file, no check happens and it's harmless.
zoneinfo_root = '/usr/share/zoneinfo'
if (os.path.exists(zoneinfo_root) and not
os.path.exists(os.path.join(zoneinfo_root,
*(settings.TIME_ZONE.split('/'))))):
raise ValueError("Incorrect timezone setting: %s" %
settings.TIME_ZONE)
# Move the time zone info into os.environ. See ticket #2315 for why
# we don't do this unconditionally (breaks Windows).
os.environ['TZ'] = settings.TIME_ZONE
time.tzset()

# Settings are configured, so we can set up the logger if required
if getattr(settings, 'LOGGING_CONFIG', False):
# First find the logging configuration function ...
logging_config_path, logging_config_func_name = settings.LOGGING_CONFIG.rsplit('.', 1)
logging_config_module = util.import_module(logging_config_path)
logging_config_func = getattr(logging_config_module, logging_config_func_name)

# ... then invoke it with the logging settings
logging_config_func(settings.LOGGING)


def make_command_wsgi_application(admin_mediapath):
reload_django_settings()

try:
from django.core.servers.basehttp import AdminMediaHandler
return AdminMediaHandler(make_wsgi_application(), admin_mediapath)
except ImportError:
return make_wsgi_application()

+ 160
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/app/djangoapp.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import os
import sys

from gunicorn.app.base import Application
from gunicorn import util


def is_setting_mod(path):
return (os.path.isfile(os.path.join(path, "settings.py")) or
os.path.isfile(os.path.join(path, "settings.pyc")))


def find_settings_module(path):
path = os.path.abspath(path)
project_path = None
settings_name = "settings"

if os.path.isdir(path):
project_path = None
if not is_setting_mod(path):
for d in os.listdir(path):
if d in ('..', '.'):
continue

root = os.path.join(path, d)
if is_setting_mod(root):
project_path = root
break
else:
project_path = path
elif os.path.isfile(path):
project_path = os.path.dirname(path)
settings_name, _ = os.path.splitext(os.path.basename(path))

return project_path, settings_name


def make_default_env(cfg):
if cfg.django_settings:
os.environ['DJANGO_SETTINGS_MODULE'] = cfg.django_settings

if cfg.pythonpath and cfg.pythonpath is not None:
paths = cfg.pythonpath.split(",")
for path in paths:
pythonpath = os.path.abspath(cfg.pythonpath)
if pythonpath not in sys.path:
sys.path.insert(0, pythonpath)

try:
os.environ['DJANGO_SETTINGS_MODULE']
except KeyError:
# not settings env set, try to build one.
cwd = util.getcwd()
project_path, settings_name = find_settings_module(cwd)

if not project_path:
raise RuntimeError("django project not found")

pythonpath, project_name = os.path.split(project_path)
os.environ['DJANGO_SETTINGS_MODULE'] = "%s.%s" % (project_name,
settings_name)
if pythonpath not in sys.path:
sys.path.insert(0, pythonpath)

if project_path not in sys.path:
sys.path.insert(0, project_path)


class DjangoApplication(Application):

def init(self, parser, opts, args):
if args:
if ("." in args[0] and not (os.path.isfile(args[0])
or os.path.isdir(args[0]))):
self.cfg.set("django_settings", args[0])
else:
# not settings env set, try to build one.
project_path, settings_name = find_settings_module(
os.path.abspath(args[0]))
if project_path not in sys.path:
sys.path.insert(0, project_path)

if not project_path:
raise RuntimeError("django project not found")

pythonpath, project_name = os.path.split(project_path)
self.cfg.set("django_settings", "%s.%s" % (project_name,
settings_name))
self.cfg.set("pythonpath", pythonpath)

def load(self):
# chdir to the configured path before loading,
# default is the current dir
os.chdir(self.cfg.chdir)

# set settings
make_default_env(self.cfg)

# load wsgi application and return it.
mod = util.import_module("gunicorn.app.django_wsgi")
return mod.make_wsgi_application()


class DjangoApplicationCommand(Application):

def __init__(self, options, admin_media_path):
self.usage = None
self.prog = None
self.cfg = None
self.config_file = options.get("config") or ""
self.options = options
self.admin_media_path = admin_media_path
self.callable = None
self.project_path = None
self.do_load_config()

def init(self, *args):
if 'settings' in self.options:
self.options['django_settings'] = self.options.pop('settings')

cfg = {}
for k, v in self.options.items():
if k.lower() in self.cfg.settings and v is not None:
cfg[k.lower()] = v
return cfg

def load(self):
# chdir to the configured path before loading,
# default is the current dir
os.chdir(self.cfg.chdir)

# set settings
make_default_env(self.cfg)

# load wsgi application and return it.
mod = util.import_module("gunicorn.app.django_wsgi")
return mod.make_command_wsgi_application(self.admin_media_path)


def run():
"""\
The ``gunicorn_django`` command line runner for launching Django
applications.
"""
util.warn("""This command is deprecated.

You should now run your application with the WSGI interface
installed with your project. Ex.:

gunicorn myproject.wsgi:application

See https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/gunicorn/
for more info.""")
from gunicorn.app.djangoapp import DjangoApplication
DjangoApplication("%(prog)s [OPTIONS] [SETTINGS_PATH]").run()

+ 210
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/app/pasterapp.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from __future__ import print_function

import os
import pkg_resources
import sys

try:
import configparser as ConfigParser
except ImportError:
import ConfigParser

from paste.deploy import loadapp, loadwsgi
SERVER = loadwsgi.SERVER

from gunicorn.app.base import Application
from gunicorn.config import Config, get_default_config_file
from gunicorn import util


def _has_logging_config(paste_file):
cfg_parser = ConfigParser.ConfigParser()
cfg_parser.read([paste_file])
return cfg_parser.has_section('loggers')


def paste_config(gconfig, config_url, relative_to, global_conf=None):
# add entry to pkg_resources
sys.path.insert(0, relative_to)
pkg_resources.working_set.add_entry(relative_to)

config_url = config_url.split('#')[0]
cx = loadwsgi.loadcontext(SERVER, config_url, relative_to=relative_to,
global_conf=global_conf)
gc, lc = cx.global_conf.copy(), cx.local_conf.copy()
cfg = {}

host, port = lc.pop('host', ''), lc.pop('port', '')
if host and port:
cfg['bind'] = '%s:%s' % (host, port)
elif host:
cfg['bind'] = host.split(',')

cfg['default_proc_name'] = gc.get('__file__')

# init logging configuration
config_file = config_url.split(':')[1]
if _has_logging_config(config_file):
cfg.setdefault('logconfig', config_file)

for k, v in gc.items():
if k not in gconfig.settings:
continue
cfg[k] = v

for k, v in lc.items():
if k not in gconfig.settings:
continue
cfg[k] = v

return cfg


def load_pasteapp(config_url, relative_to, global_conf=None):
return loadapp(config_url, relative_to=relative_to,
global_conf=global_conf)

class PasterBaseApplication(Application):
gcfg = None

def app_config(self):
return paste_config(self.cfg, self.cfgurl, self.relpath,
global_conf=self.gcfg)

def load_config(self):
super(PasterBaseApplication, self).load_config()

# reload logging conf
if hasattr(self, "cfgfname"):
parser = ConfigParser.ConfigParser()
parser.read([self.cfgfname])
if parser.has_section('loggers'):
from logging.config import fileConfig
config_file = os.path.abspath(self.cfgfname)
fileConfig(config_file, dict(__file__=config_file,
here=os.path.dirname(config_file)))


class PasterApplication(PasterBaseApplication):

def init(self, parser, opts, args):
if len(args) != 1:
parser.error("No application name specified.")

cwd = util.getcwd()
cfgfname = os.path.normpath(os.path.join(cwd, args[0]))
cfgfname = os.path.abspath(cfgfname)
if not os.path.exists(cfgfname):
parser.error("Config file not found: %s" % cfgfname)

self.cfgurl = 'config:%s' % cfgfname
self.relpath = os.path.dirname(cfgfname)
self.cfgfname = cfgfname

sys.path.insert(0, self.relpath)
pkg_resources.working_set.add_entry(self.relpath)

return self.app_config()

def load(self):
# chdir to the configured path before loading,
# default is the current dir
os.chdir(self.cfg.chdir)

return load_pasteapp(self.cfgurl, self.relpath, global_conf=self.gcfg)


class PasterServerApplication(PasterBaseApplication):

def __init__(self, app, gcfg=None, host="127.0.0.1", port=None, *args, **kwargs):
self.cfg = Config()
self.gcfg = gcfg # need to hold this for app_config
self.app = app
self.callable = None

gcfg = gcfg or {}
cfgfname = gcfg.get("__file__")
if cfgfname is not None:
self.cfgurl = 'config:%s' % cfgfname
self.relpath = os.path.dirname(cfgfname)
self.cfgfname = cfgfname

cfg = kwargs.copy()

if port and not host.startswith("unix:"):
bind = "%s:%s" % (host, port)
else:
bind = host
cfg["bind"] = bind.split(',')

if gcfg:
for k, v in gcfg.items():
cfg[k] = v
cfg["default_proc_name"] = cfg['__file__']

try:
for k, v in cfg.items():
if k.lower() in self.cfg.settings and v is not None:
self.cfg.set(k.lower(), v)
except Exception as e:
print("\nConfig error: %s" % str(e), file=sys.stderr)
sys.stderr.flush()
sys.exit(1)

if cfg.get("config"):
self.load_config_from_file(cfg["config"])
else:
default_config = get_default_config_file()
if default_config is not None:
self.load_config_from_file(default_config)

def load(self):
# chdir to the configured path before loading,
# default is the current dir
os.chdir(self.cfg.chdir)

return self.app


def run():
"""\
The ``gunicorn_paster`` command for launching Paster compatible
applications like Pylons or Turbogears2
"""
util.warn("""This command is deprecated.

You should now use the `--paste` option. Ex.:

gunicorn --paste development.ini
""")

from gunicorn.app.pasterapp import PasterApplication
PasterApplication("%(prog)s [OPTIONS] pasteconfig.ini").run()


def paste_server(app, gcfg=None, host="127.0.0.1", port=None, *args, **kwargs):
"""\
A paster server.

Then entry point in your paster ini file should looks like this:

[server:main]
use = egg:gunicorn#main
host = 127.0.0.1
port = 5000

"""

util.warn("""This command is deprecated.

You should now use the `--paste` option. Ex.:

gunicorn --paste development.ini
""")

from gunicorn.app.pasterapp import PasterServerApplication
PasterServerApplication(app, gcfg=gcfg, host=host, port=port, *args, **kwargs).run()

+ 78
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/app/wsgiapp.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import os
import sys

from gunicorn.errors import ConfigError
from gunicorn.app.base import Application
from gunicorn import util


class WSGIApplication(Application):
def init(self, parser, opts, args):
if opts.paste and opts.paste is not None:
app_name = 'main'
path = opts.paste
if '#' in path:
path, app_name = path.split('#')
path = os.path.abspath(os.path.normpath(
os.path.join(util.getcwd(), path)))

if not os.path.exists(path):
raise ConfigError("%r not found" % path)

# paste application, load the config
self.cfgurl = 'config:%s#%s' % (path, app_name)
self.relpath = os.path.dirname(path)

from .pasterapp import paste_config
return paste_config(self.cfg, self.cfgurl, self.relpath)

if len(args) < 1:
parser.error("No application module specified.")

self.cfg.set("default_proc_name", args[0])
self.app_uri = args[0]

def chdir(self):
# chdir to the configured path before loading,
# default is the current dir
os.chdir(self.cfg.chdir)

# add the path to sys.path
sys.path.insert(0, self.cfg.chdir)

def load_wsgiapp(self):
self.chdir()

# load the app
return util.import_app(self.app_uri)

def load_pasteapp(self):
self.chdir()

# load the paste app
from .pasterapp import load_pasteapp
return load_pasteapp(self.cfgurl, self.relpath, global_conf=None)

def load(self):
if self.cfg.paste is not None:
return self.load_pasteapp()
else:
return self.load_wsgiapp()


def run():
"""\
The ``gunicorn`` command line runner for launching Gunicorn with
generic WSGI applications.
"""
from gunicorn.app.wsgiapp import WSGIApplication
WSGIApplication("%(prog)s [OPTIONS] [APP_MODULE]").run()


if __name__ == '__main__':
run()

+ 620
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/arbiter.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from __future__ import print_function

import errno
import os
import random
import select
import signal
import sys
import time
import traceback

from gunicorn.errors import HaltServer, AppImportError
from gunicorn.pidfile import Pidfile
from gunicorn.sock import create_sockets
from gunicorn import util

from gunicorn import __version__, SERVER_SOFTWARE


class Arbiter(object):
"""
Arbiter maintain the workers processes alive. It launches or
kills them if needed. It also manages application reloading
via SIGHUP/USR2.
"""

# A flag indicating if a worker failed to
# to boot. If a worker process exist with
# this error code, the arbiter will terminate.
WORKER_BOOT_ERROR = 3

# A flag indicating if an application failed to be loaded
APP_LOAD_ERROR = 4

START_CTX = {}

LISTENERS = []
WORKERS = {}
PIPE = []

# I love dynamic languages
SIG_QUEUE = []
SIGNALS = [getattr(signal, "SIG%s" % x)
for x in "HUP QUIT INT TERM TTIN TTOU USR1 USR2 WINCH".split()]
SIG_NAMES = dict(
(getattr(signal, name), name[3:].lower()) for name in dir(signal)
if name[:3] == "SIG" and name[3] != "_"
)

def __init__(self, app):
os.environ["SERVER_SOFTWARE"] = SERVER_SOFTWARE

self._num_workers = None
self._last_logged_active_worker_count = None
self.log = None

self.setup(app)

self.pidfile = None
self.worker_age = 0
self.reexec_pid = 0
self.master_pid = 0
self.master_name = "Master"

cwd = util.getcwd()

args = sys.argv[:]
args.insert(0, sys.executable)

# init start context
self.START_CTX = {
"args": args,
"cwd": cwd,
0: sys.executable
}

def _get_num_workers(self):
return self._num_workers

def _set_num_workers(self, value):
old_value = self._num_workers
self._num_workers = value
self.cfg.nworkers_changed(self, value, old_value)
num_workers = property(_get_num_workers, _set_num_workers)

def setup(self, app):
self.app = app
self.cfg = app.cfg

if self.log is None:
self.log = self.cfg.logger_class(app.cfg)

# reopen files
if 'GUNICORN_FD' in os.environ:
self.log.reopen_files()

self.worker_class = self.cfg.worker_class
self.address = self.cfg.address
self.num_workers = self.cfg.workers
self.timeout = self.cfg.timeout
self.proc_name = self.cfg.proc_name

self.log.debug('Current configuration:\n{0}'.format(
'\n'.join(
' {0}: {1}'.format(config, value.value)
for config, value
in sorted(self.cfg.settings.items(),
key=lambda setting: setting[1]))))

# set enviroment' variables
if self.cfg.env:
for k, v in self.cfg.env.items():
os.environ[k] = v

if self.cfg.preload_app:
self.app.wsgi()

def start(self):
"""\
Initialize the arbiter. Start listening and set pidfile if needed.
"""
self.log.info("Starting gunicorn %s", __version__)

if 'GUNICORN_PID' in os.environ:
self.master_pid = int(os.environ.get('GUNICORN_PID'))
self.proc_name = self.proc_name + ".2"
self.master_name = "Master.2"

self.pid = os.getpid()
if self.cfg.pidfile is not None:
pidname = self.cfg.pidfile
if self.master_pid != 0:
pidname += ".2"
self.pidfile = Pidfile(pidname)
self.pidfile.create(self.pid)
self.cfg.on_starting(self)

self.init_signals()
if not self.LISTENERS:
self.LISTENERS = create_sockets(self.cfg, self.log)

listeners_str = ",".join([str(l) for l in self.LISTENERS])
self.log.debug("Arbiter booted")
self.log.info("Listening at: %s (%s)", listeners_str, self.pid)
self.log.info("Using worker: %s", self.cfg.worker_class_str)

# check worker class requirements
if hasattr(self.worker_class, "check_config"):
self.worker_class.check_config(self.cfg, self.log)

self.cfg.when_ready(self)

def init_signals(self):
"""\
Initialize master signal handling. Most of the signals
are queued. Child signals only wake up the master.
"""
# close old PIPE
if self.PIPE:
[os.close(p) for p in self.PIPE]

# initialize the pipe
self.PIPE = pair = os.pipe()
for p in pair:
util.set_non_blocking(p)
util.close_on_exec(p)

self.log.close_on_exec()

# initialize all signals
[signal.signal(s, self.signal) for s in self.SIGNALS]
signal.signal(signal.SIGCHLD, self.handle_chld)

def signal(self, sig, frame):
if len(self.SIG_QUEUE) < 5:
self.SIG_QUEUE.append(sig)
self.wakeup()

def run(self):
"Main master loop."
self.start()
util._setproctitle("master [%s]" % self.proc_name)

try:
self.manage_workers()

while True:
self.maybe_promote_master()

sig = self.SIG_QUEUE.pop(0) if len(self.SIG_QUEUE) else None
if sig is None:
self.sleep()
self.murder_workers()
self.manage_workers()
continue

if sig not in self.SIG_NAMES:
self.log.info("Ignoring unknown signal: %s", sig)
continue

signame = self.SIG_NAMES.get(sig)
handler = getattr(self, "handle_%s" % signame, None)
if not handler:
self.log.error("Unhandled signal: %s", signame)
continue
self.log.info("Handling signal: %s", signame)
handler()
self.wakeup()
except StopIteration:
self.halt()
except KeyboardInterrupt:
self.halt()
except HaltServer as inst:
self.halt(reason=inst.reason, exit_status=inst.exit_status)
except SystemExit:
raise
except Exception:
self.log.info("Unhandled exception in main loop",
exc_info=True)
self.stop(False)
if self.pidfile is not None:
self.pidfile.unlink()
sys.exit(-1)

def handle_chld(self, sig, frame):
"SIGCHLD handling"
self.reap_workers()
self.wakeup()

def handle_hup(self):
"""\
HUP handling.
- Reload configuration
- Start the new worker processes with a new configuration
- Gracefully shutdown the old worker processes
"""
self.log.info("Hang up: %s", self.master_name)
self.reload()

def handle_term(self):
"SIGTERM handling"
raise StopIteration

def handle_int(self):
"SIGINT handling"
self.stop(False)
raise StopIteration

def handle_quit(self):
"SIGQUIT handling"
self.stop(False)
raise StopIteration

def handle_ttin(self):
"""\
SIGTTIN handling.
Increases the number of workers by one.
"""
self.num_workers += 1
self.manage_workers()

def handle_ttou(self):
"""\
SIGTTOU handling.
Decreases the number of workers by one.
"""
if self.num_workers <= 1:
return
self.num_workers -= 1
self.manage_workers()

def handle_usr1(self):
"""\
SIGUSR1 handling.
Kill all workers by sending them a SIGUSR1
"""
self.log.reopen_files()
self.kill_workers(signal.SIGUSR1)

def handle_usr2(self):
"""\
SIGUSR2 handling.
Creates a new master/worker set as a slave of the current
master without affecting old workers. Use this to do live
deployment with the ability to backout a change.
"""
self.reexec()

def handle_winch(self):
"SIGWINCH handling"
if self.cfg.daemon:
self.log.info("graceful stop of workers")
self.num_workers = 0
self.kill_workers(signal.SIGTERM)
else:
self.log.debug("SIGWINCH ignored. Not daemonized")

def maybe_promote_master(self):
if self.master_pid == 0:
return

if self.master_pid != os.getppid():
self.log.info("Master has been promoted.")
# reset master infos
self.master_name = "Master"
self.master_pid = 0
self.proc_name = self.cfg.proc_name
del os.environ['GUNICORN_PID']
# rename the pidfile
if self.pidfile is not None:
self.pidfile.rename(self.cfg.pidfile)
# reset proctitle
util._setproctitle("master [%s]" % self.proc_name)

def wakeup(self):
"""\
Wake up the arbiter by writing to the PIPE
"""
try:
os.write(self.PIPE[1], b'.')
except IOError as e:
if e.errno not in [errno.EAGAIN, errno.EINTR]:
raise

def halt(self, reason=None, exit_status=0):
""" halt arbiter """
self.stop()
self.log.info("Shutting down: %s", self.master_name)
if reason is not None:
self.log.info("Reason: %s", reason)
if self.pidfile is not None:
self.pidfile.unlink()
self.cfg.on_exit(self)
sys.exit(exit_status)

def sleep(self):
"""\
Sleep until PIPE is readable or we timeout.
A readable PIPE means a signal occurred.
"""
try:
ready = select.select([self.PIPE[0]], [], [], 1.0)
if not ready[0]:
return
while os.read(self.PIPE[0], 1):
pass
except select.error as e:
if e.args[0] not in [errno.EAGAIN, errno.EINTR]:
raise
except OSError as e:
if e.errno not in [errno.EAGAIN, errno.EINTR]:
raise
except KeyboardInterrupt:
sys.exit()

def stop(self, graceful=True):
"""\
Stop workers

:attr graceful: boolean, If True (the default) workers will be
killed gracefully (ie. trying to wait for the current connection)
"""

if self.reexec_pid == 0 and self.master_pid == 0:
for l in self.LISTENERS:
l.close()

self.LISTENERS = []
sig = signal.SIGTERM
if not graceful:
sig = signal.SIGQUIT
limit = time.time() + self.cfg.graceful_timeout
# instruct the workers to exit
self.kill_workers(sig)
# wait until the graceful timeout
while self.WORKERS and time.time() < limit:
time.sleep(0.1)

self.kill_workers(signal.SIGKILL)

def reexec(self):
"""\
Relaunch the master and workers.
"""
if self.reexec_pid != 0:
self.log.warning("USR2 signal ignored. Child exists.")
return

if self.master_pid != 0:
self.log.warning("USR2 signal ignored. Parent exists")
return

master_pid = os.getpid()
self.reexec_pid = os.fork()
if self.reexec_pid != 0:
return

self.cfg.pre_exec(self)

environ = self.cfg.env_orig.copy()
fds = [l.fileno() for l in self.LISTENERS]
environ['GUNICORN_FD'] = ",".join([str(fd) for fd in fds])
environ['GUNICORN_PID'] = str(master_pid)

os.chdir(self.START_CTX['cwd'])

# exec the process using the original environnement
os.execvpe(self.START_CTX[0], self.START_CTX['args'], environ)

def reload(self):
old_address = self.cfg.address

# reset old environement
for k in self.cfg.env:
if k in self.cfg.env_orig:
# reset the key to the value it had before
# we launched gunicorn
os.environ[k] = self.cfg.env_orig[k]
else:
# delete the value set by gunicorn
try:
del os.environ[k]
except KeyError:
pass

# reload conf
self.app.reload()
self.setup(self.app)

# reopen log files
self.log.reopen_files()

# do we need to change listener ?
if old_address != self.cfg.address:
# close all listeners
[l.close() for l in self.LISTENERS]
# init new listeners
self.LISTENERS = create_sockets(self.cfg, self.log)
listeners_str = ",".join([str(l) for l in self.LISTENERS])
self.log.info("Listening at: %s", listeners_str)

# do some actions on reload
self.cfg.on_reload(self)

# unlink pidfile
if self.pidfile is not None:
self.pidfile.unlink()

# create new pidfile
if self.cfg.pidfile is not None:
self.pidfile = Pidfile(self.cfg.pidfile)
self.pidfile.create(self.pid)

# set new proc_name
util._setproctitle("master [%s]" % self.proc_name)

# spawn new workers
for i in range(self.cfg.workers):
self.spawn_worker()

# manage workers
self.manage_workers()

def murder_workers(self):
"""\
Kill unused/idle workers
"""
if not self.timeout:
return
workers = list(self.WORKERS.items())
for (pid, worker) in workers:
try:
if time.time() - worker.tmp.last_update() <= self.timeout:
continue
except (OSError, ValueError):
continue

if not worker.aborted:
self.log.critical("WORKER TIMEOUT (pid:%s)", pid)
worker.aborted = True
self.kill_worker(pid, signal.SIGABRT)
else:
self.kill_worker(pid, signal.SIGKILL)

def reap_workers(self):
"""\
Reap workers to avoid zombie processes
"""
try:
while True:
wpid, status = os.waitpid(-1, os.WNOHANG)
if not wpid:
break
if self.reexec_pid == wpid:
self.reexec_pid = 0
else:
# A worker said it cannot boot. We'll shutdown
# to avoid infinite start/stop cycles.
exitcode = status >> 8
if exitcode == self.WORKER_BOOT_ERROR:
reason = "Worker failed to boot."
raise HaltServer(reason, self.WORKER_BOOT_ERROR)
if exitcode == self.APP_LOAD_ERROR:
reason = "App failed to load."
raise HaltServer(reason, self.APP_LOAD_ERROR)
worker = self.WORKERS.pop(wpid, None)
if not worker:
continue
worker.tmp.close()
except OSError as e:
if e.errno != errno.ECHILD:
raise

def manage_workers(self):
"""\
Maintain the number of workers by spawning or killing
as required.
"""
if len(self.WORKERS.keys()) < self.num_workers:
self.spawn_workers()

workers = self.WORKERS.items()
workers = sorted(workers, key=lambda w: w[1].age)
while len(workers) > self.num_workers:
(pid, _) = workers.pop(0)
self.kill_worker(pid, signal.SIGTERM)

active_worker_count = len(workers)
if self._last_logged_active_worker_count != active_worker_count:
self._last_logged_active_worker_count = active_worker_count
self.log.debug("{0} workers".format(active_worker_count),
extra={"metric": "gunicorn.workers",
"value": active_worker_count,
"mtype": "gauge"})

def spawn_worker(self):
self.worker_age += 1
worker = self.worker_class(self.worker_age, self.pid, self.LISTENERS,
self.app, self.timeout / 2.0,
self.cfg, self.log)
self.cfg.pre_fork(self, worker)
pid = os.fork()
if pid != 0:
self.WORKERS[pid] = worker
return pid

# Process Child
worker_pid = os.getpid()
try:
util._setproctitle("worker [%s]" % self.proc_name)
self.log.info("Booting worker with pid: %s", worker_pid)
self.cfg.post_fork(self, worker)
worker.init_process()
sys.exit(0)
except SystemExit:
raise
except AppImportError as e:
self.log.debug("Exception while loading the application",
exc_info=True)
print("%s" % e, file=sys.stderr)
sys.stderr.flush()
sys.exit(self.APP_LOAD_ERROR)
except:
self.log.exception("Exception in worker process"),
if not worker.booted:
sys.exit(self.WORKER_BOOT_ERROR)
sys.exit(-1)
finally:
self.log.info("Worker exiting (pid: %s)", worker_pid)
try:
worker.tmp.close()
self.cfg.worker_exit(self, worker)
except:
self.log.warning("Exception during worker exit:\n%s",
traceback.format_exc())

def spawn_workers(self):
"""\
Spawn new workers as needed.

This is where a worker process leaves the main loop
of the master process.
"""

for i in range(self.num_workers - len(self.WORKERS.keys())):
self.spawn_worker()
time.sleep(0.1 * random.random())

def kill_workers(self, sig):
"""\
Kill all workers with the signal `sig`
:attr sig: `signal.SIG*` value
"""
worker_pids = list(self.WORKERS.keys())
for pid in worker_pids:
self.kill_worker(pid, sig)

def kill_worker(self, pid, sig):
"""\
Kill a worker

:attr pid: int, worker pid
:attr sig: `signal.SIG*` value
"""
try:
os.kill(pid, sig)
except OSError as e:
if e.errno == errno.ESRCH:
try:
worker = self.WORKERS.pop(pid)
worker.tmp.close()
self.cfg.worker_exit(self, worker)
return
except (KeyError, OSError):
return
raise

+ 2362
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/argparse_compat.py
File diff suppressed because it is too large
View File


+ 1749
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/config.py
File diff suppressed because it is too large
View File


+ 70
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/debug.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

"""The debug module contains utilities and functions for better
debugging Gunicorn."""

import sys
import linecache
import re
import inspect

__all__ = ['spew', 'unspew']

_token_spliter = re.compile('\W+')


class Spew(object):
"""
"""
def __init__(self, trace_names=None, show_values=True):
self.trace_names = trace_names
self.show_values = show_values

def __call__(self, frame, event, arg):
if event == 'line':
lineno = frame.f_lineno
if '__file__' in frame.f_globals:
filename = frame.f_globals['__file__']
if (filename.endswith('.pyc') or
filename.endswith('.pyo')):
filename = filename[:-1]
name = frame.f_globals['__name__']
line = linecache.getline(filename, lineno)
else:
name = '[unknown]'
try:
src = inspect.getsourcelines(frame)
line = src[lineno]
except IOError:
line = 'Unknown code named [%s]. VM instruction #%d' % (
frame.f_code.co_name, frame.f_lasti)
if self.trace_names is None or name in self.trace_names:
print('%s:%s: %s' % (name, lineno, line.rstrip()))
if not self.show_values:
return self
details = []
tokens = _token_spliter.split(line)
for tok in tokens:
if tok in frame.f_globals:
details.append('%s=%r' % (tok, frame.f_globals[tok]))
if tok in frame.f_locals:
details.append('%s=%r' % (tok, frame.f_locals[tok]))
if details:
print("\t%s" % ' '.join(details))
return self


def spew(trace_names=None, show_values=False):
"""Install a trace hook which writes incredibly detailed logs
about what code is being executed to stdout.
"""
sys.settrace(Spew(trace_names, show_values))


def unspew():
"""Remove the trace hook installed by spew.
"""
sys.settrace(None)

+ 23
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/errors.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.


# we inherit from BaseException here to make sure to not be caucght
# at application level
class HaltServer(BaseException):
def __init__(self, reason, exit_status=1):
self.reason = reason
self.exit_status = exit_status

def __str__(self):
return "<HaltServer %r %d>" % (self.reason, self.exit_status)


class ConfigError(Exception):
""" Exception raised on config error """


class AppImportError(Exception):
""" Exception raised when loading an application """

+ 452
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/glogging.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import base64
import binascii
import time
import logging
logging.Logger.manager.emittedNoHandlerWarning = 1
from logging.config import fileConfig
import os
import socket
import sys
import threading
import traceback

from gunicorn import util
from gunicorn.six import PY3, string_types


# syslog facility codes
SYSLOG_FACILITIES = {
"auth": 4,
"authpriv": 10,
"cron": 9,
"daemon": 3,
"ftp": 11,
"kern": 0,
"lpr": 6,
"mail": 2,
"news": 7,
"security": 4, # DEPRECATED
"syslog": 5,
"user": 1,
"uucp": 8,
"local0": 16,
"local1": 17,
"local2": 18,
"local3": 19,
"local4": 20,
"local5": 21,
"local6": 22,
"local7": 23
}


CONFIG_DEFAULTS = dict(
version=1,
disable_existing_loggers=False,

loggers={
"root": {"level": "INFO", "handlers": ["console"]},
"gunicorn.error": {
"level": "INFO",
"handlers": ["error_console"],
"propagate": True,
"qualname": "gunicorn.error"
},

"gunicorn.access": {
"level": "INFO",
"handlers": ["console"],
"propagate": True,
"qualname": "gunicorn.access"
}
},
handlers={
"console": {
"class": "logging.StreamHandler",
"formatter": "generic",
"stream": "sys.stdout"
},
"error_console": {
"class": "logging.StreamHandler",
"formatter": "generic",
"stream": "sys.stderr"
},
},
formatters={
"generic": {
"format": "%(asctime)s [%(process)d] [%(levelname)s] %(message)s",
"datefmt": "[%Y-%m-%d %H:%M:%S %z]",
"class": "logging.Formatter"
}
}
)


def loggers():
""" get list of all loggers """
root = logging.root
existing = root.manager.loggerDict.keys()
return [logging.getLogger(name) for name in existing]


class SafeAtoms(dict):

def __init__(self, atoms):
dict.__init__(self)
for key, value in atoms.items():
if isinstance(value, string_types):
self[key] = value.replace('"', '\\"')
else:
self[key] = value

def __getitem__(self, k):
if k.startswith("{"):
kl = k.lower()
if kl in self:
return super(SafeAtoms, self).__getitem__(kl)
else:
return "-"
if k in self:
return super(SafeAtoms, self).__getitem__(k)
else:
return '-'


def parse_syslog_address(addr):

if addr.startswith("unix://"):
sock_type = socket.SOCK_STREAM

# are we using a different socket type?
parts = addr.split("#", 1)
if len(parts) == 2:
addr = parts[0]
if parts[1] == "dgram":
sock_type = socket.SOCK_DGRAM

return (sock_type, addr.split("unix://")[1])

if addr.startswith("udp://"):
addr = addr.split("udp://")[1]
socktype = socket.SOCK_DGRAM
elif addr.startswith("tcp://"):
addr = addr.split("tcp://")[1]
socktype = socket.SOCK_STREAM
else:
raise RuntimeError("invalid syslog address")

if '[' in addr and ']' in addr:
host = addr.split(']')[0][1:].lower()
elif ':' in addr:
host = addr.split(':')[0].lower()
elif addr == "":
host = "localhost"
else:
host = addr.lower()

addr = addr.split(']')[-1]
if ":" in addr:
port = addr.split(':', 1)[1]
if not port.isdigit():
raise RuntimeError("%r is not a valid port number." % port)
port = int(port)
else:
port = 514

return (socktype, (host, port))


class Logger(object):

LOG_LEVELS = {
"critical": logging.CRITICAL,
"error": logging.ERROR,
"warning": logging.WARNING,
"info": logging.INFO,
"debug": logging.DEBUG
}
loglevel = logging.INFO

error_fmt = r"%(asctime)s [%(process)d] [%(levelname)s] %(message)s"
datefmt = r"[%Y-%m-%d %H:%M:%S %z]"

access_fmt = "%(message)s"
syslog_fmt = "[%(process)d] %(message)s"

atoms_wrapper_class = SafeAtoms

def __init__(self, cfg):
self.error_log = logging.getLogger("gunicorn.error")
self.error_log.propagate = False
self.access_log = logging.getLogger("gunicorn.access")
self.access_log.propagate = False
self.error_handlers = []
self.access_handlers = []
self.logfile = None
self.lock = threading.Lock()
self.cfg = cfg
self.setup(cfg)

def setup(self, cfg):
self.loglevel = self.LOG_LEVELS.get(cfg.loglevel.lower(), logging.INFO)
self.error_log.setLevel(self.loglevel)
self.access_log.setLevel(logging.INFO)

# set gunicorn.error handler
if self.cfg.capture_output and cfg.errorlog != "-":
for stream in sys.stdout, sys.stderr:
stream.flush()

self.logfile = open(cfg.errorlog, 'a+')
os.dup2(self.logfile.fileno(), sys.stdout.fileno())
os.dup2(self.logfile.fileno(), sys.stderr.fileno())

self._set_handler(self.error_log, cfg.errorlog,
logging.Formatter(self.error_fmt, self.datefmt))

# set gunicorn.access handler
if cfg.accesslog is not None:
self._set_handler(self.access_log, cfg.accesslog,
fmt=logging.Formatter(self.access_fmt))

# set syslog handler
if cfg.syslog:
self._set_syslog_handler(
self.error_log, cfg, self.syslog_fmt, "error"
)
self._set_syslog_handler(
self.access_log, cfg, self.syslog_fmt, "access"
)

if cfg.logconfig:
if os.path.exists(cfg.logconfig):
defaults = CONFIG_DEFAULTS.copy()
defaults['__file__'] = cfg.logconfig
defaults['here'] = os.path.dirname(cfg.logconfig)
fileConfig(cfg.logconfig, defaults=defaults,
disable_existing_loggers=False)
else:
msg = "Error: log config '%s' not found"
raise RuntimeError(msg % cfg.logconfig)

def critical(self, msg, *args, **kwargs):
self.error_log.critical(msg, *args, **kwargs)

def error(self, msg, *args, **kwargs):
self.error_log.error(msg, *args, **kwargs)

def warning(self, msg, *args, **kwargs):
self.error_log.warning(msg, *args, **kwargs)

def info(self, msg, *args, **kwargs):
self.error_log.info(msg, *args, **kwargs)

def debug(self, msg, *args, **kwargs):
self.error_log.debug(msg, *args, **kwargs)

def exception(self, msg, *args, **kwargs):
self.error_log.exception(msg, *args, **kwargs)

def log(self, lvl, msg, *args, **kwargs):
if isinstance(lvl, string_types):
lvl = self.LOG_LEVELS.get(lvl.lower(), logging.INFO)
self.error_log.log(lvl, msg, *args, **kwargs)

def atoms(self, resp, req, environ, request_time):
""" Gets atoms for log formating.
"""
status = resp.status
if isinstance(status, str):
status = status.split(None, 1)[0]
atoms = {
'h': environ.get('REMOTE_ADDR', '-'),
'l': '-',
'u': self._get_user(environ) or '-',
't': self.now(),
'r': "%s %s %s" % (environ['REQUEST_METHOD'],
environ['RAW_URI'], environ["SERVER_PROTOCOL"]),
's': status,
'm': environ.get('REQUEST_METHOD'),
'U': environ.get('PATH_INFO'),
'q': environ.get('QUERY_STRING'),
'H': environ.get('SERVER_PROTOCOL'),
'b': getattr(resp, 'sent', None) and str(resp.sent) or '-',
'B': getattr(resp, 'sent', None),
'f': environ.get('HTTP_REFERER', '-'),
'a': environ.get('HTTP_USER_AGENT', '-'),
'T': request_time.seconds,
'D': (request_time.seconds*1000000) + request_time.microseconds,
'L': "%d.%06d" % (request_time.seconds, request_time.microseconds),
'p': "<%s>" % os.getpid()
}

# add request headers
if hasattr(req, 'headers'):
req_headers = req.headers
else:
req_headers = req

if hasattr(req_headers, "items"):
req_headers = req_headers.items()

atoms.update(dict([("{%s}i" % k.lower(), v) for k, v in req_headers]))

resp_headers = resp.headers
if hasattr(resp_headers, "items"):
resp_headers = resp_headers.items()

# add response headers
atoms.update(dict([("{%s}o" % k.lower(), v) for k, v in resp_headers]))

return atoms

def access(self, resp, req, environ, request_time):
""" See http://httpd.apache.org/docs/2.0/logs.html#combined
for format details
"""

if not (self.cfg.accesslog or self.cfg.logconfig or self.cfg.syslog):
return

# wrap atoms:
# - make sure atoms will be test case insensitively
# - if atom doesn't exist replace it by '-'
safe_atoms = self.atoms_wrapper_class(self.atoms(resp, req, environ,
request_time))

try:
self.access_log.info(self.cfg.access_log_format % safe_atoms)
except:
self.error(traceback.format_exc())

def now(self):
""" return date in Apache Common Log Format """
return time.strftime('[%d/%b/%Y:%H:%M:%S %z]')

def reopen_files(self):
if self.cfg.capture_output and self.cfg.errorlog != "-":
for stream in sys.stdout, sys.stderr:
stream.flush()

with self.lock:
if self.logfile is not None:
self.logfile.close()
self.logfile = open(self.cfg.errorlog, 'a+')
os.dup2(self.logfile.fileno(), sys.stdout.fileno())
os.dup2(self.logfile.fileno(), sys.stderr.fileno())


for log in loggers():
for handler in log.handlers:
if isinstance(handler, logging.FileHandler):
handler.acquire()
try:
if handler.stream:
handler.stream.close()
handler.stream = open(handler.baseFilename,
handler.mode)
finally:
handler.release()

def close_on_exec(self):
for log in loggers():
for handler in log.handlers:
if isinstance(handler, logging.FileHandler):
handler.acquire()
try:
if handler.stream:
util.close_on_exec(handler.stream.fileno())
finally:
handler.release()

def _get_gunicorn_handler(self, log):
for h in log.handlers:
if getattr(h, "_gunicorn", False):
return h

def _set_handler(self, log, output, fmt):
# remove previous gunicorn log handler
h = self._get_gunicorn_handler(log)
if h:
log.handlers.remove(h)

if output is not None:
if output == "-":
h = logging.StreamHandler()
else:
util.check_is_writeable(output)
h = logging.FileHandler(output)
# make sure the user can reopen the file
try:
os.chown(h.baseFilename, self.cfg.user, self.cfg.group)
except OSError:
# it's probably OK there, we assume the user has given
# /dev/null as a parameter.
pass

h.setFormatter(fmt)
h._gunicorn = True
log.addHandler(h)

def _set_syslog_handler(self, log, cfg, fmt, name):
# setup format
if not cfg.syslog_prefix:
prefix = cfg.proc_name.replace(":", ".")
else:
prefix = cfg.syslog_prefix

prefix = "gunicorn.%s.%s" % (prefix, name)

# set format
fmt = logging.Formatter(r"%s: %s" % (prefix, fmt))

# syslog facility
try:
facility = SYSLOG_FACILITIES[cfg.syslog_facility.lower()]
except KeyError:
raise RuntimeError("unknown facility name")

# parse syslog address
socktype, addr = parse_syslog_address(cfg.syslog_addr)

# finally setup the syslog handler
if sys.version_info >= (2, 7):
h = logging.handlers.SysLogHandler(address=addr,
facility=facility, socktype=socktype)
else:
# socktype is only supported in 2.7 and sup
# fix issue #541
h = logging.handlers.SysLogHandler(address=addr,
facility=facility)

h.setFormatter(fmt)
h._gunicorn = True
log.addHandler(h)

def _get_user(self, environ):
user = None
http_auth = environ.get("HTTP_AUTHORIZATION")
if http_auth and http_auth.startswith('Basic'):
auth = http_auth.split(" ", 1)
if len(auth) == 2:
try:
# b64decode doesn't accept unicode in Python < 3.3
# so we need to convert it to a byte string
auth = base64.b64decode(auth[1].strip().encode('utf-8'))
if PY3: # b64decode returns a byte string in Python 3
auth = auth.decode('utf-8')
auth = auth.split(":", 1)
except TypeError as exc:
self.debug("Couldn't get username: %s", exc)
return user
except binascii.Error as exc:
self.debug("Couldn't get username: %s", exc)
return user
if len(auth) == 2:
user = auth[0]
return user

+ 9
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/http/__init__.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

from gunicorn.http.message import Message, Request
from gunicorn.http.parser import RequestParser

__all__ = ['Message', 'Request', 'RequestParser']

+ 68
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/http/_sendfile.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import errno
import os
import sys

try:
import ctypes
import ctypes.util
except MemoryError:
# selinux execmem denial
# https://bugzilla.redhat.com/show_bug.cgi?id=488396
raise ImportError

SUPPORTED_PLATFORMS = (
'darwin',
'freebsd',
'dragonfly',
'linux2')

if sys.version_info < (2, 6) or \
sys.platform not in SUPPORTED_PLATFORMS:
raise ImportError("sendfile isn't supported on this platform")

_libc = ctypes.CDLL(ctypes.util.find_library("c"), use_errno=True)
_sendfile = _libc.sendfile


def sendfile(fdout, fdin, offset, nbytes):
if sys.platform == 'darwin':
_sendfile.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.c_uint64,
ctypes.POINTER(ctypes.c_uint64), ctypes.c_voidp,
ctypes.c_int]
_nbytes = ctypes.c_uint64(nbytes)
result = _sendfile(fdin, fdout, offset, _nbytes, None, 0)

if result == -1:
e = ctypes.get_errno()
if e == errno.EAGAIN and _nbytes.value is not None:
return _nbytes.value
raise OSError(e, os.strerror(e))
return _nbytes.value
elif sys.platform in ('freebsd', 'dragonfly',):
_sendfile.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.c_uint64,
ctypes.c_uint64, ctypes.c_voidp,
ctypes.POINTER(ctypes.c_uint64), ctypes.c_int]
_sbytes = ctypes.c_uint64()
result = _sendfile(fdin, fdout, offset, nbytes, None, _sbytes, 0)
if result == -1:
e = ctypes.get_errno()
if e == errno.EAGAIN and _sbytes.value is not None:
return _sbytes.value
raise OSError(e, os.strerror(e))
return _sbytes.value

else:
_sendfile.argtypes = [ctypes.c_int, ctypes.c_int,
ctypes.POINTER(ctypes.c_uint64), ctypes.c_size_t]

_offset = ctypes.c_uint64(offset)
sent = _sendfile(fdout, fdin, _offset, nbytes)
if sent == -1:
e = ctypes.get_errno()
raise OSError(e, os.strerror(e))
return sent

+ 259
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/http/body.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

from gunicorn.http.errors import (NoMoreData, ChunkMissingTerminator,
InvalidChunkSize)
from gunicorn import six


class ChunkedReader(object):
def __init__(self, req, unreader):
self.req = req
self.parser = self.parse_chunked(unreader)
self.buf = six.BytesIO()

def read(self, size):
if not isinstance(size, six.integer_types):
raise TypeError("size must be an integral type")
if size < 0:
raise ValueError("Size must be positive.")
if size == 0:
return b""

if self.parser:
while self.buf.tell() < size:
try:
self.buf.write(six.next(self.parser))
except StopIteration:
self.parser = None
break

data = self.buf.getvalue()
ret, rest = data[:size], data[size:]
self.buf = six.BytesIO()
self.buf.write(rest)
return ret

def parse_trailers(self, unreader, data):
buf = six.BytesIO()
buf.write(data)

idx = buf.getvalue().find(b"\r\n\r\n")
done = buf.getvalue()[:2] == b"\r\n"
while idx < 0 and not done:
self.get_data(unreader, buf)
idx = buf.getvalue().find(b"\r\n\r\n")
done = buf.getvalue()[:2] == b"\r\n"
if done:
unreader.unread(buf.getvalue()[2:])
return b""
self.req.trailers = self.req.parse_headers(buf.getvalue()[:idx])
unreader.unread(buf.getvalue()[idx + 4:])

def parse_chunked(self, unreader):
(size, rest) = self.parse_chunk_size(unreader)
while size > 0:
while size > len(rest):
size -= len(rest)
yield rest
rest = unreader.read()
if not rest:
raise NoMoreData()
yield rest[:size]
# Remove \r\n after chunk
rest = rest[size:]
while len(rest) < 2:
rest += unreader.read()
if rest[:2] != b'\r\n':
raise ChunkMissingTerminator(rest[:2])
(size, rest) = self.parse_chunk_size(unreader, data=rest[2:])

def parse_chunk_size(self, unreader, data=None):
buf = six.BytesIO()
if data is not None:
buf.write(data)

idx = buf.getvalue().find(b"\r\n")
while idx < 0:
self.get_data(unreader, buf)
idx = buf.getvalue().find(b"\r\n")

data = buf.getvalue()
line, rest_chunk = data[:idx], data[idx + 2:]

chunk_size = line.split(b";", 1)[0].strip()
try:
chunk_size = int(chunk_size, 16)
except ValueError:
raise InvalidChunkSize(chunk_size)

if chunk_size == 0:
try:
self.parse_trailers(unreader, rest_chunk)
except NoMoreData:
pass
return (0, None)
return (chunk_size, rest_chunk)

def get_data(self, unreader, buf):
data = unreader.read()
if not data:
raise NoMoreData()
buf.write(data)


class LengthReader(object):
def __init__(self, unreader, length):
self.unreader = unreader
self.length = length

def read(self, size):
if not isinstance(size, six.integer_types):
raise TypeError("size must be an integral type")

size = min(self.length, size)
if size < 0:
raise ValueError("Size must be positive.")
if size == 0:
return b""

buf = six.BytesIO()
data = self.unreader.read()
while data:
buf.write(data)
if buf.tell() >= size:
break
data = self.unreader.read()

buf = buf.getvalue()
ret, rest = buf[:size], buf[size:]
self.unreader.unread(rest)
self.length -= size
return ret


class EOFReader(object):
def __init__(self, unreader):
self.unreader = unreader
self.buf = six.BytesIO()
self.finished = False

def read(self, size):
if not isinstance(size, six.integer_types):
raise TypeError("size must be an integral type")
if size < 0:
raise ValueError("Size must be positive.")
if size == 0:
return b""

if self.finished:
data = self.buf.getvalue()
ret, rest = data[:size], data[size:]
self.buf = six.BytesIO()
self.buf.write(rest)
return ret

data = self.unreader.read()
while data:
self.buf.write(data)
if self.buf.tell() > size:
break
data = self.unreader.read()

if not data:
self.finished = True

data = self.buf.getvalue()
ret, rest = data[:size], data[size:]
self.buf = six.BytesIO()
self.buf.write(rest)
return ret


class Body(object):
def __init__(self, reader):
self.reader = reader
self.buf = six.BytesIO()

def __iter__(self):
return self

def __next__(self):
ret = self.readline()
if not ret:
raise StopIteration()
return ret
next = __next__

def getsize(self, size):
if size is None:
return six.MAXSIZE
elif not isinstance(size, six.integer_types):
raise TypeError("size must be an integral type")
elif size < 0:
return six.MAXSIZE
return size

def read(self, size=None):
size = self.getsize(size)
if size == 0:
return b""

if size < self.buf.tell():
data = self.buf.getvalue()
ret, rest = data[:size], data[size:]
self.buf = six.BytesIO()
self.buf.write(rest)
return ret

while size > self.buf.tell():
data = self.reader.read(1024)
if not len(data):
break
self.buf.write(data)

data = self.buf.getvalue()
ret, rest = data[:size], data[size:]
self.buf = six.BytesIO()
self.buf.write(rest)
return ret

def readline(self, size=None):
size = self.getsize(size)
if size == 0:
return b""

data = self.buf.getvalue()
self.buf = six.BytesIO()

ret = []
while 1:
idx = data.find(b"\n", 0, size)
idx = idx + 1 if idx >= 0 else size if len(data) >= size else 0
if idx:
ret.append(data[:idx])
self.buf.write(data[idx:])
break

ret.append(data)
size -= len(data)
data = self.reader.read(min(1024, size))
if not data:
break

return b"".join(ret)

def readlines(self, size=None):
ret = []
data = self.read()
while len(data):
pos = data.find(b"\n")
if pos < 0:
ret.append(data)
data = b""
else:
line, data = data[:pos + 1], data[pos + 1:]
ret.append(line)
return ret

+ 109
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/http/errors.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.


class ParseException(Exception):
pass


class NoMoreData(IOError):
def __init__(self, buf=None):
self.buf = buf

def __str__(self):
return "No more data after: %r" % self.buf


class InvalidRequestLine(ParseException):
def __init__(self, req):
self.req = req
self.code = 400

def __str__(self):
return "Invalid HTTP request line: %r" % self.req


class InvalidRequestMethod(ParseException):
def __init__(self, method):
self.method = method

def __str__(self):
return "Invalid HTTP method: %r" % self.method


class InvalidHTTPVersion(ParseException):
def __init__(self, version):
self.version = version

def __str__(self):
return "Invalid HTTP Version: %r" % self.version


class InvalidHeader(ParseException):
def __init__(self, hdr, req=None):
self.hdr = hdr
self.req = req

def __str__(self):
return "Invalid HTTP Header: %r" % self.hdr


class InvalidHeaderName(ParseException):
def __init__(self, hdr):
self.hdr = hdr

def __str__(self):
return "Invalid HTTP header name: %r" % self.hdr


class InvalidChunkSize(IOError):
def __init__(self, data):
self.data = data

def __str__(self):
return "Invalid chunk size: %r" % self.data


class ChunkMissingTerminator(IOError):
def __init__(self, term):
self.term = term

def __str__(self):
return "Invalid chunk terminator is not '\\r\\n': %r" % self.term


class LimitRequestLine(ParseException):
def __init__(self, size, max_size):
self.size = size
self.max_size = max_size

def __str__(self):
return "Request Line is too large (%s > %s)" % (self.size, self.max_size)


class LimitRequestHeaders(ParseException):
def __init__(self, msg):
self.msg = msg

def __str__(self):
return self.msg


class InvalidProxyLine(ParseException):
def __init__(self, line):
self.line = line
self.code = 400

def __str__(self):
return "Invalid PROXY line: %r" % self.line


class ForbiddenProxyRequest(ParseException):
def __init__(self, host):
self.host = host
self.code = 403

def __str__(self):
return "Proxy request from %r not allowed" % self.host

+ 343
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/http/message.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import re
import socket
from errno import ENOTCONN

from gunicorn._compat import bytes_to_str
from gunicorn.http.unreader import SocketUnreader
from gunicorn.http.body import ChunkedReader, LengthReader, EOFReader, Body
from gunicorn.http.errors import (InvalidHeader, InvalidHeaderName, NoMoreData,
InvalidRequestLine, InvalidRequestMethod, InvalidHTTPVersion,
LimitRequestLine, LimitRequestHeaders)
from gunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest
from gunicorn.six import BytesIO
from gunicorn._compat import urlsplit

MAX_REQUEST_LINE = 8190
MAX_HEADERS = 32768
MAX_HEADERFIELD_SIZE = 8190

HEADER_RE = re.compile("[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]")
METH_RE = re.compile(r"[A-Z0-9$-_.]{3,20}")
VERSION_RE = re.compile(r"HTTP/(\d+).(\d+)")


class Message(object):
def __init__(self, cfg, unreader):
self.cfg = cfg
self.unreader = unreader
self.version = None
self.headers = []
self.trailers = []
self.body = None

# set headers limits
self.limit_request_fields = cfg.limit_request_fields
if (self.limit_request_fields <= 0
or self.limit_request_fields > MAX_HEADERS):
self.limit_request_fields = MAX_HEADERS
self.limit_request_field_size = cfg.limit_request_field_size
if (self.limit_request_field_size < 0
or self.limit_request_field_size > MAX_HEADERFIELD_SIZE):
self.limit_request_field_size = MAX_HEADERFIELD_SIZE

# set max header buffer size
max_header_field_size = self.limit_request_field_size or MAX_HEADERFIELD_SIZE
self.max_buffer_headers = self.limit_request_fields * \
(max_header_field_size + 2) + 4

unused = self.parse(self.unreader)
self.unreader.unread(unused)
self.set_body_reader()

def parse(self):
raise NotImplementedError()

def parse_headers(self, data):
headers = []

# Split lines on \r\n keeping the \r\n on each line
lines = [bytes_to_str(line) + "\r\n" for line in data.split(b"\r\n")]

# Parse headers into key/value pairs paying attention
# to continuation lines.
while len(lines):
if len(headers) >= self.limit_request_fields:
raise LimitRequestHeaders("limit request headers fields")

# Parse initial header name : value pair.
curr = lines.pop(0)
header_length = len(curr)
if curr.find(":") < 0:
raise InvalidHeader(curr.strip())
name, value = curr.split(":", 1)
name = name.rstrip(" \t").upper()
if HEADER_RE.search(name):
raise InvalidHeaderName(name)

name, value = name.strip(), [value.lstrip()]

# Consume value continuation lines
while len(lines) and lines[0].startswith((" ", "\t")):
curr = lines.pop(0)
header_length += len(curr)
if header_length > self.limit_request_field_size > 0:
raise LimitRequestHeaders("limit request headers "
+ "fields size")
value.append(curr)
value = ''.join(value).rstrip()

if header_length > self.limit_request_field_size > 0:
raise LimitRequestHeaders("limit request headers fields size")
headers.append((name, value))
return headers

def set_body_reader(self):
chunked = False
content_length = None
for (name, value) in self.headers:
if name == "CONTENT-LENGTH":
content_length = value
elif name == "TRANSFER-ENCODING":
chunked = value.lower() == "chunked"
elif name == "SEC-WEBSOCKET-KEY1":
content_length = 8

if chunked:
self.body = Body(ChunkedReader(self, self.unreader))
elif content_length is not None:
try:
content_length = int(content_length)
except ValueError:
raise InvalidHeader("CONTENT-LENGTH", req=self)

if content_length < 0:
raise InvalidHeader("CONTENT-LENGTH", req=self)

self.body = Body(LengthReader(self.unreader, content_length))
else:
self.body = Body(EOFReader(self.unreader))

def should_close(self):
for (h, v) in self.headers:
if h == "CONNECTION":
v = v.lower().strip()
if v == "close":
return True
elif v == "keep-alive":
return False
break
return self.version <= (1, 0)


class Request(Message):
def __init__(self, cfg, unreader, req_number=1):
self.method = None
self.uri = None
self.path = None
self.query = None
self.fragment = None

# get max request line size
self.limit_request_line = cfg.limit_request_line
if (self.limit_request_line < 0
or self.limit_request_line >= MAX_REQUEST_LINE):
self.limit_request_line = MAX_REQUEST_LINE

self.req_number = req_number
self.proxy_protocol_info = None
super(Request, self).__init__(cfg, unreader)

def get_data(self, unreader, buf, stop=False):
data = unreader.read()
if not data:
if stop:
raise StopIteration()
raise NoMoreData(buf.getvalue())
buf.write(data)

def parse(self, unreader):
buf = BytesIO()
self.get_data(unreader, buf, stop=True)

# get request line
line, rbuf = self.read_line(unreader, buf, self.limit_request_line)

# proxy protocol
if self.proxy_protocol(bytes_to_str(line)):
# get next request line
buf = BytesIO()
buf.write(rbuf)
line, rbuf = self.read_line(unreader, buf, self.limit_request_line)

self.parse_request_line(bytes_to_str(line))
buf = BytesIO()
buf.write(rbuf)

# Headers
data = buf.getvalue()
idx = data.find(b"\r\n\r\n")

done = data[:2] == b"\r\n"
while True:
idx = data.find(b"\r\n\r\n")
done = data[:2] == b"\r\n"

if idx < 0 and not done:
self.get_data(unreader, buf)
data = buf.getvalue()
if len(data) > self.max_buffer_headers:
raise LimitRequestHeaders("max buffer headers")
else:
break

if done:
self.unreader.unread(data[2:])
return b""

self.headers = self.parse_headers(data[:idx])

ret = data[idx + 4:]
buf = BytesIO()
return ret

def read_line(self, unreader, buf, limit=0):
data = buf.getvalue()

while True:
idx = data.find(b"\r\n")
if idx >= 0:
# check if the request line is too large
if idx > limit > 0:
raise LimitRequestLine(idx, limit)
break
elif len(data) - 2 > limit > 0:
raise LimitRequestLine(len(data), limit)
self.get_data(unreader, buf)
data = buf.getvalue()

return (data[:idx], # request line,
data[idx + 2:]) # residue in the buffer, skip \r\n

def proxy_protocol(self, line):
"""\
Detect, check and parse proxy protocol.

:raises: ForbiddenProxyRequest, InvalidProxyLine.
:return: True for proxy protocol line else False
"""
if not self.cfg.proxy_protocol:
return False

if self.req_number != 1:
return False

if not line.startswith("PROXY"):
return False

self.proxy_protocol_access_check()
self.parse_proxy_protocol(line)

return True

def proxy_protocol_access_check(self):
# check in allow list
if isinstance(self.unreader, SocketUnreader):
try:
remote_host = self.unreader.sock.getpeername()[0]
except socket.error as e:
if e.args[0] == ENOTCONN:
raise ForbiddenProxyRequest("UNKNOW")
raise
if ("*" not in self.cfg.proxy_allow_ips and
remote_host not in self.cfg.proxy_allow_ips):
raise ForbiddenProxyRequest(remote_host)

def parse_proxy_protocol(self, line):
bits = line.split()

if len(bits) != 6:
raise InvalidProxyLine(line)

# Extract data
proto = bits[1]
s_addr = bits[2]
d_addr = bits[3]

# Validation
if proto not in ["TCP4", "TCP6"]:
raise InvalidProxyLine("protocol '%s' not supported" % proto)
if proto == "TCP4":
try:
socket.inet_pton(socket.AF_INET, s_addr)
socket.inet_pton(socket.AF_INET, d_addr)
except socket.error:
raise InvalidProxyLine(line)
elif proto == "TCP6":
try:
socket.inet_pton(socket.AF_INET6, s_addr)
socket.inet_pton(socket.AF_INET6, d_addr)
except socket.error:
raise InvalidProxyLine(line)

try:
s_port = int(bits[4])
d_port = int(bits[5])
except ValueError:
raise InvalidProxyLine("invalid port %s" % line)

if not ((0 <= s_port <= 65535) and (0 <= d_port <= 65535)):
raise InvalidProxyLine("invalid port %s" % line)

# Set data
self.proxy_protocol_info = {
"proxy_protocol": proto,
"client_addr": s_addr,
"client_port": s_port,
"proxy_addr": d_addr,
"proxy_port": d_port
}

def parse_request_line(self, line):
bits = line.split(None, 2)
if len(bits) != 3:
raise InvalidRequestLine(line)

# Method
if not METH_RE.match(bits[0]):
raise InvalidRequestMethod(bits[0])
self.method = bits[0].upper()

# URI
# When the path starts with //, urlsplit considers it as a
# relative uri while the RDF says it shouldnt
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
# considers it as an absolute url.
# fix issue #297
if bits[1].startswith("//"):
self.uri = bits[1][1:]
else:
self.uri = bits[1]

try:
parts = urlsplit(self.uri)
except ValueError:
raise InvalidRequestLine(line)
self.path = parts.path or ""
self.query = parts.query or ""
self.fragment = parts.fragment or ""

# Version
match = VERSION_RE.match(bits[2])
if match is None:
raise InvalidHTTPVersion(bits[2])
self.version = (int(match.group(1)), int(match.group(2)))

def set_body_reader(self):
super(Request, self).set_body_reader()
if isinstance(self.body.reader, EOFReader):
self.body = Body(LengthReader(self.unreader, 0))

+ 51
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/http/parser.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

from gunicorn.http.message import Request
from gunicorn.http.unreader import SocketUnreader, IterUnreader


class Parser(object):

mesg_class = None

def __init__(self, cfg, source):
self.cfg = cfg
if hasattr(source, "recv"):
self.unreader = SocketUnreader(source)
else:
self.unreader = IterUnreader(source)
self.mesg = None

# request counter (for keepalive connetions)
self.req_count = 0

def __iter__(self):
return self

def __next__(self):
# Stop if HTTP dictates a stop.
if self.mesg and self.mesg.should_close():
raise StopIteration()

# Discard any unread body of the previous message
if self.mesg:
data = self.mesg.body.read(8192)
while data:
data = self.mesg.body.read(8192)

# Parse the next request
self.req_count += 1
self.mesg = self.mesg_class(self.cfg, self.unreader, self.req_count)
if not self.mesg:
raise StopIteration()
return self.mesg

next = __next__


class RequestParser(Parser):

mesg_class = Request

+ 80
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/http/unreader.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import os

from gunicorn import six

# Classes that can undo reading data from
# a given type of data source.


class Unreader(object):
def __init__(self):
self.buf = six.BytesIO()

def chunk(self):
raise NotImplementedError()

def read(self, size=None):
if size is not None and not isinstance(size, six.integer_types):
raise TypeError("size parameter must be an int or long.")

if size is not None:
if size == 0:
return b""
if size < 0:
size = None

self.buf.seek(0, os.SEEK_END)

if size is None and self.buf.tell():
ret = self.buf.getvalue()
self.buf = six.BytesIO()
return ret
if size is None:
d = self.chunk()
return d

while self.buf.tell() < size:
chunk = self.chunk()
if not len(chunk):
ret = self.buf.getvalue()
self.buf = six.BytesIO()
return ret
self.buf.write(chunk)
data = self.buf.getvalue()
self.buf = six.BytesIO()
self.buf.write(data[size:])
return data[:size]

def unread(self, data):
self.buf.seek(0, os.SEEK_END)
self.buf.write(data)


class SocketUnreader(Unreader):
def __init__(self, sock, max_chunk=8192):
super(SocketUnreader, self).__init__()
self.sock = sock
self.mxchunk = max_chunk

def chunk(self):
return self.sock.recv(self.mxchunk)


class IterUnreader(Unreader):
def __init__(self, iterable):
super(IterUnreader, self).__init__()
self.iter = iter(iterable)

def chunk(self):
if not self.iter:
return b""
try:
return six.next(self.iter)
except StopIteration:
self.iter = None
return b""

+ 420
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/http/wsgi.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import io
import logging
import os
import re
import sys

from gunicorn._compat import unquote_to_wsgi_str
from gunicorn.http.message import HEADER_RE
from gunicorn.http.errors import InvalidHeader, InvalidHeaderName
from gunicorn.six import string_types, binary_type, reraise
from gunicorn import SERVER_SOFTWARE
import gunicorn.util as util

try:
# Python 3.3 has os.sendfile().
from os import sendfile
except ImportError:
try:
from ._sendfile import sendfile
except ImportError:
sendfile = None

# Send files in at most 1GB blocks as some operating systems can have problems
# with sending files in blocks over 2GB.
BLKSIZE = 0x3FFFFFFF

NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+')
HEADER_VALUE_RE = re.compile(r'[\x00-\x1F\x7F]')

log = logging.getLogger(__name__)


class FileWrapper(object):

def __init__(self, filelike, blksize=8192):
self.filelike = filelike
self.blksize = blksize
if hasattr(filelike, 'close'):
self.close = filelike.close

def __getitem__(self, key):
data = self.filelike.read(self.blksize)
if data:
return data
raise IndexError


class WSGIErrorsWrapper(io.RawIOBase):

def __init__(self, cfg):
errorlog = logging.getLogger("gunicorn.error")
handlers = errorlog.handlers
self.streams = []

if cfg.errorlog == "-":
self.streams.append(sys.stderr)
handlers = handlers[1:]

for h in handlers:
if hasattr(h, "stream"):
self.streams.append(h.stream)

def write(self, data):
for stream in self.streams:
try:
stream.write(data)
except UnicodeError:
stream.write(data.encode("UTF-8"))
stream.flush()


def base_environ(cfg):
return {
"wsgi.errors": WSGIErrorsWrapper(cfg),
"wsgi.version": (1, 0),
"wsgi.multithread": False,
"wsgi.multiprocess": (cfg.workers > 1),
"wsgi.run_once": False,
"wsgi.file_wrapper": FileWrapper,
"SERVER_SOFTWARE": SERVER_SOFTWARE,
}


def default_environ(req, sock, cfg):
env = base_environ(cfg)
env.update({
"wsgi.input": req.body,
"gunicorn.socket": sock,
"REQUEST_METHOD": req.method,
"QUERY_STRING": req.query,
"RAW_URI": req.uri,
"SERVER_PROTOCOL": "HTTP/%s" % ".".join([str(v) for v in req.version])
})
return env


def proxy_environ(req):
info = req.proxy_protocol_info

if not info:
return {}

return {
"PROXY_PROTOCOL": info["proxy_protocol"],
"REMOTE_ADDR": info["client_addr"],
"REMOTE_PORT": str(info["client_port"]),
"PROXY_ADDR": info["proxy_addr"],
"PROXY_PORT": str(info["proxy_port"]),
}


def create(req, sock, client, server, cfg):
resp = Response(req, sock, cfg)

# set initial environ
environ = default_environ(req, sock, cfg)

# default variables
host = None
url_scheme = "https" if cfg.is_ssl else "http"
script_name = os.environ.get("SCRIPT_NAME", "")

# set secure_headers
secure_headers = cfg.secure_scheme_headers
if client and not isinstance(client, string_types):
if ('*' not in cfg.forwarded_allow_ips
and client[0] not in cfg.forwarded_allow_ips):
secure_headers = {}

# add the headers to the environ
for hdr_name, hdr_value in req.headers:
if hdr_name == "EXPECT":
# handle expect
if hdr_value.lower() == "100-continue":
sock.send(b"HTTP/1.1 100 Continue\r\n\r\n")
elif secure_headers and (hdr_name in secure_headers and
hdr_value == secure_headers[hdr_name]):
url_scheme = "https"
elif hdr_name == 'HOST':
host = hdr_value
elif hdr_name == "SCRIPT_NAME":
script_name = hdr_value
elif hdr_name == "CONTENT-TYPE":
environ['CONTENT_TYPE'] = hdr_value
continue
elif hdr_name == "CONTENT-LENGTH":
environ['CONTENT_LENGTH'] = hdr_value
continue

key = 'HTTP_' + hdr_name.replace('-', '_')
if key in environ:
hdr_value = "%s,%s" % (environ[key], hdr_value)
environ[key] = hdr_value

# set the url scheme
environ['wsgi.url_scheme'] = url_scheme

# set the REMOTE_* keys in environ
# authors should be aware that REMOTE_HOST and REMOTE_ADDR
# may not qualify the remote addr:
# http://www.ietf.org/rfc/rfc3875
if isinstance(client, string_types):
environ['REMOTE_ADDR'] = client
elif isinstance(client, binary_type):
environ['REMOTE_ADDR'] = str(client)
else:
environ['REMOTE_ADDR'] = client[0]
environ['REMOTE_PORT'] = str(client[1])

# handle the SERVER_*
# Normally only the application should use the Host header but since the
# WSGI spec doesn't support unix sockets, we are using it to create
# viable SERVER_* if possible.
if isinstance(server, string_types):
server = server.split(":")
if len(server) == 1:
# unix socket
if host and host is not None:
server = host.split(':')
if len(server) == 1:
if url_scheme == "http":
server.append(80),
elif url_scheme == "https":
server.append(443)
else:
server.append('')
else:
# no host header given which means that we are not behind a
# proxy, so append an empty port.
server.append('')
environ['SERVER_NAME'] = server[0]
environ['SERVER_PORT'] = str(server[1])

# set the path and script name
path_info = req.path
if script_name:
path_info = path_info.split(script_name, 1)[1]
environ['PATH_INFO'] = unquote_to_wsgi_str(path_info)
environ['SCRIPT_NAME'] = script_name

# override the environ with the correct remote and server address if
# we are behind a proxy using the proxy protocol.
environ.update(proxy_environ(req))
return resp, environ


class Response(object):

def __init__(self, req, sock, cfg):
self.req = req
self.sock = sock
self.version = SERVER_SOFTWARE
self.status = None
self.chunked = False
self.must_close = False
self.headers = []
self.headers_sent = False
self.response_length = None
self.sent = 0
self.upgrade = False
self.cfg = cfg

def force_close(self):
self.must_close = True

def should_close(self):
if self.must_close or self.req.should_close():
return True
if self.response_length is not None or self.chunked:
return False
if self.req.method == 'HEAD':
return False
if self.status_code < 200 or self.status_code in (204, 304):
return False
return True

def start_response(self, status, headers, exc_info=None):
if exc_info:
try:
if self.status and self.headers_sent:
reraise(exc_info[0], exc_info[1], exc_info[2])
finally:
exc_info = None
elif self.status is not None:
raise AssertionError("Response headers already set!")

self.status = status

# get the status code from the response here so we can use it to check
# the need for the connection header later without parsing the string
# each time.
try:
self.status_code = int(self.status.split()[0])
except ValueError:
self.status_code = None

self.process_headers(headers)
self.chunked = self.is_chunked()
return self.write

def process_headers(self, headers):
for name, value in headers:
if not isinstance(name, string_types):
raise TypeError('%r is not a string' % name)

if HEADER_RE.search(name):
raise InvalidHeaderName('%r' % name)

if HEADER_VALUE_RE.search(value):
raise InvalidHeader('%r' % value)

value = str(value).strip()
lname = name.lower().strip()
if lname == "content-length":
self.response_length = int(value)
elif util.is_hoppish(name):
if lname == "connection":
# handle websocket
if value.lower().strip() == "upgrade":
self.upgrade = True
elif lname == "upgrade":
if value.lower().strip() == "websocket":
self.headers.append((name.strip(), value))

# ignore hopbyhop headers
continue
self.headers.append((name.strip(), value))

def is_chunked(self):
# Only use chunked responses when the client is
# speaking HTTP/1.1 or newer and there was
# no Content-Length header set.
if self.response_length is not None:
return False
elif self.req.version <= (1, 0):
return False
elif self.req.method == 'HEAD':
# Responses to a HEAD request MUST NOT contain a response body.
return False
elif self.status_code in (204, 304):
# Do not use chunked responses when the response is guaranteed to
# not have a response body.
return False
return True

def default_headers(self):
# set the connection header
if self.upgrade:
connection = "upgrade"
elif self.should_close():
connection = "close"
else:
connection = "keep-alive"

headers = [
"HTTP/%s.%s %s\r\n" % (self.req.version[0],
self.req.version[1], self.status),
"Server: %s\r\n" % self.version,
"Date: %s\r\n" % util.http_date(),
"Connection: %s\r\n" % connection
]
if self.chunked:
headers.append("Transfer-Encoding: chunked\r\n")
return headers

def send_headers(self):
if self.headers_sent:
return
tosend = self.default_headers()
tosend.extend(["%s: %s\r\n" % (k, v) for k, v in self.headers])

header_str = "%s\r\n" % "".join(tosend)
util.write(self.sock, util.to_bytestring(header_str, "ascii"))
self.headers_sent = True

def write(self, arg):
self.send_headers()
if not isinstance(arg, binary_type):
raise TypeError('%r is not a byte' % arg)
arglen = len(arg)
tosend = arglen
if self.response_length is not None:
if self.sent >= self.response_length:
# Never write more than self.response_length bytes
return

tosend = min(self.response_length - self.sent, tosend)
if tosend < arglen:
arg = arg[:tosend]

# Sending an empty chunk signals the end of the
# response and prematurely closes the response
if self.chunked and tosend == 0:
return

self.sent += tosend
util.write(self.sock, arg, self.chunked)

def can_sendfile(self):
return self.cfg.sendfile is not False and sendfile is not None

def sendfile(self, respiter):
if self.cfg.is_ssl or not self.can_sendfile():
return False

if not util.has_fileno(respiter.filelike):
return False

fileno = respiter.filelike.fileno()
try:
offset = os.lseek(fileno, 0, os.SEEK_CUR)
if self.response_length is None:
filesize = os.fstat(fileno).st_size

# The file may be special and sendfile will fail.
# It may also be zero-length, but that is okay.
if filesize == 0:
return False

nbytes = filesize - offset
else:
nbytes = self.response_length
except (OSError, io.UnsupportedOperation):
return False

self.send_headers()

if self.is_chunked():
chunk_size = "%X\r\n" % nbytes
self.sock.sendall(chunk_size.encode('utf-8'))

sockno = self.sock.fileno()
sent = 0

while sent != nbytes:
count = min(nbytes - sent, BLKSIZE)
sent += sendfile(sockno, fileno, offset + sent, count)

if self.is_chunked():
self.sock.sendall(b"\r\n")

os.lseek(fileno, offset, os.SEEK_SET)

return True

def write_file(self, respiter):
if not self.sendfile(respiter):
for item in respiter:
self.write(item)

def close(self):
if not self.headers_sent:
self.send_headers()
if self.chunked:
util.write_chunk(self.sock, b"")

+ 0
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/instrument/__init__.py View File


+ 124
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/instrument/statsd.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

"Bare-bones implementation of statsD's protocol, client-side"

import socket
import logging
from re import sub

from gunicorn.glogging import Logger
from gunicorn import six

# Instrumentation constants
STATSD_DEFAULT_PORT = 8125
METRIC_VAR = "metric"
VALUE_VAR = "value"
MTYPE_VAR = "mtype"
GAUGE_TYPE = "gauge"
COUNTER_TYPE = "counter"
HISTOGRAM_TYPE = "histogram"

class Statsd(Logger):
"""statsD-based instrumentation, that passes as a logger
"""
def __init__(self, cfg):
"""host, port: statsD server
"""
Logger.__init__(self, cfg)
self.prefix = sub(r"^(.+[^.]+)\.*$", "\g<1>.", cfg.statsd_prefix)
try:
host, port = cfg.statsd_host
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.connect((host, int(port)))
except Exception:
self.sock = None

# Log errors and warnings
def critical(self, msg, *args, **kwargs):
Logger.critical(self, msg, *args, **kwargs)
self.increment("gunicorn.log.critical", 1)

def error(self, msg, *args, **kwargs):
Logger.error(self, msg, *args, **kwargs)
self.increment("gunicorn.log.error", 1)

def warning(self, msg, *args, **kwargs):
Logger.warning(self, msg, *args, **kwargs)
self.increment("gunicorn.log.warning", 1)

def exception(self, msg, *args, **kwargs):
Logger.exception(self, msg, *args, **kwargs)
self.increment("gunicorn.log.exception", 1)

# Special treatement for info, the most common log level
def info(self, msg, *args, **kwargs):
self.log(logging.INFO, msg, *args, **kwargs)

# skip the run-of-the-mill logs
def debug(self, msg, *args, **kwargs):
self.log(logging.DEBUG, msg, *args, **kwargs)

def log(self, lvl, msg, *args, **kwargs):
"""Log a given statistic if metric, value and type are present
"""
try:
extra = kwargs.get("extra", None)
if extra is not None:
metric = extra.get(METRIC_VAR, None)
value = extra.get(VALUE_VAR, None)
typ = extra.get(MTYPE_VAR, None)
if metric and value and typ:
if typ == GAUGE_TYPE:
self.gauge(metric, value)
elif typ == COUNTER_TYPE:
self.increment(metric, value)
elif typ == HISTOGRAM_TYPE:
self.histogram(metric, value)
else:
pass

# Log to parent logger only if there is something to say
if msg is not None and len(msg) > 0:
Logger.log(self, lvl, msg, *args, **kwargs)
except Exception:
Logger.warning(self, "Failed to log to statsd", exc_info=True)

# access logging
def access(self, resp, req, environ, request_time):
"""Measure request duration
request_time is a datetime.timedelta
"""
Logger.access(self, resp, req, environ, request_time)
duration_in_ms = request_time.seconds * 1000 + float(request_time.microseconds) / 10 ** 3
status = resp.status
if isinstance(status, str):
status = int(status.split(None, 1)[0])
self.histogram("gunicorn.request.duration", duration_in_ms)
self.increment("gunicorn.requests", 1)
self.increment("gunicorn.request.status.%d" % status, 1)

# statsD methods
# you can use those directly if you want
def gauge(self, name, value):
self._sock_send("{0}{1}:{2}|g".format(self.prefix, name, value))

def increment(self, name, value, sampling_rate=1.0):
self._sock_send("{0}{1}:{2}|c|@{3}".format(self.prefix, name, value, sampling_rate))

def decrement(self, name, value, sampling_rate=1.0):
self._sock_send("{0){1}:-{2}|c|@{3}".format(self.prefix, name, value, sampling_rate))

def histogram(self, name, value):
self._sock_send("{0}{1}:{2}|ms".format(self.prefix, name, value))

def _sock_send(self, msg):
try:
if isinstance(msg, six.text_type):
msg = msg.encode("ascii")
if self.sock:
self.sock.send(msg)
except Exception:
Logger.warning(self, "Error sending message to statsd", exc_info=True)

+ 0
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/management/__init__.py View File


+ 0
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/management/commands/__init__.py View File


+ 113
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/management/commands/run_gunicorn.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

from optparse import make_option
import sys

from django.core.management.base import BaseCommand, CommandError

from gunicorn.app.djangoapp import DjangoApplicationCommand
from gunicorn.config import make_settings
from gunicorn import util


# monkey patch django.
# This patch make sure that we use real threads to get the ident which
# is going to happen if we are using gevent or eventlet.
try:
from django.db.backends import BaseDatabaseWrapper, DatabaseError

if "validate_thread_sharing" in BaseDatabaseWrapper.__dict__:
import thread
_get_ident = thread.get_ident

__old__init__ = BaseDatabaseWrapper.__init__

def _init(self, *args, **kwargs):
__old__init__(self, *args, **kwargs)
self._thread_ident = _get_ident()

def _validate_thread_sharing(self):
if (not self.allow_thread_sharing
and self._thread_ident != _get_ident()):
raise DatabaseError("DatabaseWrapper objects created in a "
"thread can only be used in that same thread. The object "
"with alias '%s' was created in thread id %s and this is "
"thread id %s."
% (self.alias, self._thread_ident, _get_ident()))

BaseDatabaseWrapper.__init__ = _init
BaseDatabaseWrapper.validate_thread_sharing = _validate_thread_sharing
except ImportError:
pass


def make_options():
opts = [
make_option('--adminmedia', dest='admin_media_path', default='',
help='Specifies the directory from which to serve admin media.')
]

g_settings = make_settings(ignore=("version"))
keys = g_settings.keys()
for k in keys:
if k in ('pythonpath', 'django_settings',):
continue

setting = g_settings[k]
if not setting.cli:
continue

args = tuple(setting.cli)

kwargs = {
"dest": setting.name,
"metavar": setting.meta or None,
"action": setting.action or "store",
"type": setting.type or "string",
"default": None,
"help": "%s [%s]" % (setting.short, setting.default)
}
if kwargs["action"] != "store":
kwargs.pop("type")

opts.append(make_option(*args, **kwargs))

return tuple(opts)

GUNICORN_OPTIONS = make_options()


class Command(BaseCommand):
option_list = BaseCommand.option_list + GUNICORN_OPTIONS
help = "Starts a fully-functional Web server using gunicorn."
args = '[optional port number, or ipaddr:port or unix:/path/to/sockfile]'

# Validation is called explicitly each time the server is reloaded.
requires_model_validation = False

def handle(self, addrport=None, *args, **options):

# deprecation warning to announce future deletion in R21
util.warn("""This command is deprecated.

You should now run your application with the WSGI interface
installed with your project. Ex.:

gunicorn myproject.wsgi:application

See https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/gunicorn/
for more info.""")

if args:
raise CommandError('Usage is run_gunicorn %s' % self.args)

if addrport:
sys.argv = sys.argv[:-1]
options['bind'] = addrport

admin_media_path = options.pop('admin_media_path', '')

DjangoApplicationCommand(options, admin_media_path).run()

+ 84
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/pidfile.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import errno
import os
import tempfile


class Pidfile(object):
"""\
Manage a PID file. If a specific name is provided
it and '"%s.oldpid" % name' will be used. Otherwise
we create a temp file using os.mkstemp.
"""

def __init__(self, fname):
self.fname = fname
self.pid = None

def create(self, pid):
oldpid = self.validate()
if oldpid:
if oldpid == os.getpid():
return
msg = "Already running on PID %s (or pid file '%s' is stale)"
raise RuntimeError(msg % (oldpid, self.fname))

self.pid = pid

# Write pidfile
fdir = os.path.dirname(self.fname)
if fdir and not os.path.isdir(fdir):
raise RuntimeError("%s doesn't exist. Can't create pidfile." % fdir)
fd, fname = tempfile.mkstemp(dir=fdir)
os.write(fd, ("%s\n" % self.pid).encode('utf-8'))
if self.fname:
os.rename(fname, self.fname)
else:
self.fname = fname
os.close(fd)

# set permissions to -rw-r--r--
os.chmod(self.fname, 420)

def rename(self, path):
self.unlink()
self.fname = path
self.create(self.pid)

def unlink(self):
""" delete pidfile"""
try:
with open(self.fname, "r") as f:
pid1 = int(f.read() or 0)

if pid1 == self.pid:
os.unlink(self.fname)
except:
pass

def validate(self):
""" Validate pidfile and make it stale if needed"""
if not self.fname:
return
try:
with open(self.fname, "r") as f:
try:
wpid = int(f.read())
except ValueError:
return

try:
os.kill(wpid, 0)
return wpid
except OSError as e:
if e.args[0] == errno.ESRCH:
return
raise
except IOError as e:
if e.args[0] == errno.ENOENT:
return
raise

+ 53
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/reloader.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import os
import re
import sys
import time
import threading


class Reloader(threading.Thread):
def __init__(self, extra_files=None, interval=1, callback=None):
super(Reloader, self).__init__()
self.setDaemon(True)
self._extra_files = set(extra_files or ())
self._extra_files_lock = threading.RLock()
self._interval = interval
self._callback = callback

def add_extra_file(self, filename):
with self._extra_files_lock:
self._extra_files.add(filename)

def get_files(self):
fnames = [
re.sub('py[co]$', 'py', module.__file__)
for module in list(sys.modules.values())
if hasattr(module, '__file__')
]

with self._extra_files_lock:
fnames.extend(self._extra_files)

return fnames

def run(self):
mtimes = {}
while True:
for filename in self.get_files():
try:
mtime = os.stat(filename).st_mtime
except OSError:
continue
old_time = mtimes.get(filename)
if old_time is None:
mtimes[filename] = mtime
continue
elif mtime > old_time:
if self._callback:
self._callback(filename)
time.sleep(self._interval)

+ 592
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/selectors.py View File

"""Selectors module.

This module allows high-level and efficient I/O multiplexing, built upon the
`select` module primitives.

The following code adapted from trollius.selectors.
"""


from abc import ABCMeta, abstractmethod
from collections import namedtuple, Mapping
import math
import select
import sys

from gunicorn._compat import wrap_error, InterruptedError
from gunicorn import six


# generic events, that must be mapped to implementation-specific ones
EVENT_READ = (1 << 0)
EVENT_WRITE = (1 << 1)


def _fileobj_to_fd(fileobj):
"""Return a file descriptor from a file object.

Parameters:
fileobj -- file object or file descriptor

Returns:
corresponding file descriptor

Raises:
ValueError if the object is invalid
"""
if isinstance(fileobj, six.integer_types):
fd = fileobj
else:
try:
fd = int(fileobj.fileno())
except (AttributeError, TypeError, ValueError):
raise ValueError("Invalid file object: "
"{0!r}".format(fileobj))
if fd < 0:
raise ValueError("Invalid file descriptor: {0}".format(fd))
return fd


SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data'])
"""Object used to associate a file object to its backing file descriptor,
selected event mask and attached data."""


class _SelectorMapping(Mapping):
"""Mapping of file objects to selector keys."""

def __init__(self, selector):
self._selector = selector

def __len__(self):
return len(self._selector._fd_to_key)

def __getitem__(self, fileobj):
try:
fd = self._selector._fileobj_lookup(fileobj)
return self._selector._fd_to_key[fd]
except KeyError:
raise KeyError("{0!r} is not registered".format(fileobj))

def __iter__(self):
return iter(self._selector._fd_to_key)


class BaseSelector(six.with_metaclass(ABCMeta)):
"""Selector abstract base class.

A selector supports registering file objects to be monitored for specific
I/O events.

A file object is a file descriptor or any object with a `fileno()` method.
An arbitrary object can be attached to the file object, which can be used
for example to store context information, a callback, etc.

A selector can use various implementations (select(), poll(), epoll()...)
depending on the platform. The default `Selector` class uses the most
efficient implementation on the current platform.
"""

@abstractmethod
def register(self, fileobj, events, data=None):
"""Register a file object.

Parameters:
fileobj -- file object or file descriptor
events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE)
data -- attached data

Returns:
SelectorKey instance

Raises:
ValueError if events is invalid
KeyError if fileobj is already registered
OSError if fileobj is closed or otherwise is unacceptable to
the underlying system call (if a system call is made)

Note:
OSError may or may not be raised
"""
raise NotImplementedError

@abstractmethod
def unregister(self, fileobj):
"""Unregister a file object.

Parameters:
fileobj -- file object or file descriptor

Returns:
SelectorKey instance

Raises:
KeyError if fileobj is not registered

Note:
If fileobj is registered but has since been closed this does
*not* raise OSError (even if the wrapped syscall does)
"""
raise NotImplementedError

def modify(self, fileobj, events, data=None):
"""Change a registered file object monitored events or attached data.

Parameters:
fileobj -- file object or file descriptor
events -- events to monitor (bitwise mask of EVENT_READ|EVENT_WRITE)
data -- attached data

Returns:
SelectorKey instance

Raises:
Anything that unregister() or register() raises
"""
self.unregister(fileobj)
return self.register(fileobj, events, data)

@abstractmethod
def select(self, timeout=None):
"""Perform the actual selection, until some monitored file objects are
ready or a timeout expires.

Parameters:
timeout -- if timeout > 0, this specifies the maximum wait time, in
seconds
if timeout <= 0, the select() call won't block, and will
report the currently ready file objects
if timeout is None, select() will block until a monitored
file object becomes ready

Returns:
list of (key, events) for ready file objects
`events` is a bitwise mask of EVENT_READ|EVENT_WRITE
"""
raise NotImplementedError

def close(self):
"""Close the selector.

This must be called to make sure that any underlying resource is freed.
"""
pass

def get_key(self, fileobj):
"""Return the key associated to a registered file object.

Returns:
SelectorKey for this file object
"""
mapping = self.get_map()
try:
return mapping[fileobj]
except KeyError:
raise KeyError("{0!r} is not registered".format(fileobj))

@abstractmethod
def get_map(self):
"""Return a mapping of file objects to selector keys."""
raise NotImplementedError

def __enter__(self):
return self

def __exit__(self, *args):
self.close()


class _BaseSelectorImpl(BaseSelector):
"""Base selector implementation."""

def __init__(self):
# this maps file descriptors to keys
self._fd_to_key = {}
# read-only mapping returned by get_map()
self._map = _SelectorMapping(self)

def _fileobj_lookup(self, fileobj):
"""Return a file descriptor from a file object.

This wraps _fileobj_to_fd() to do an exhaustive search in case
the object is invalid but we still have it in our map. This
is used by unregister() so we can unregister an object that
was previously registered even if it is closed. It is also
used by _SelectorMapping.
"""
try:
return _fileobj_to_fd(fileobj)
except ValueError:
# Do an exhaustive search.
for key in self._fd_to_key.values():
if key.fileobj is fileobj:
return key.fd
# Raise ValueError after all.
raise

def register(self, fileobj, events, data=None):
if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)):
raise ValueError("Invalid events: {0!r}".format(events))

key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data)

if key.fd in self._fd_to_key:
raise KeyError("{0!r} (FD {1}) is already registered"
.format(fileobj, key.fd))

self._fd_to_key[key.fd] = key
return key

def unregister(self, fileobj):
try:
key = self._fd_to_key.pop(self._fileobj_lookup(fileobj))
except KeyError:
raise KeyError("{0!r} is not registered".format(fileobj))
return key

def modify(self, fileobj, events, data=None):
# TODO: Subclasses can probably optimize this even further.
try:
key = self._fd_to_key[self._fileobj_lookup(fileobj)]
except KeyError:
raise KeyError("{0!r} is not registered".format(fileobj))
if events != key.events:
self.unregister(fileobj)
key = self.register(fileobj, events, data)
elif data != key.data:
# Use a shortcut to update the data.
key = key._replace(data=data)
self._fd_to_key[key.fd] = key
return key

def close(self):
self._fd_to_key.clear()

def get_map(self):
return self._map

def _key_from_fd(self, fd):
"""Return the key associated to a given file descriptor.

Parameters:
fd -- file descriptor

Returns:
corresponding key, or None if not found
"""
try:
return self._fd_to_key[fd]
except KeyError:
return None


class SelectSelector(_BaseSelectorImpl):
"""Select-based selector."""

def __init__(self):
super(SelectSelector, self).__init__()
self._readers = set()
self._writers = set()

def register(self, fileobj, events, data=None):
key = super(SelectSelector, self).register(fileobj, events, data)
if events & EVENT_READ:
self._readers.add(key.fd)
if events & EVENT_WRITE:
self._writers.add(key.fd)
return key

def unregister(self, fileobj):
key = super(SelectSelector, self).unregister(fileobj)
self._readers.discard(key.fd)
self._writers.discard(key.fd)
return key

if sys.platform == 'win32':
def _select(self, r, w, _, timeout=None):
r, w, x = select.select(r, w, w, timeout)
return r, w + x, []
else:
_select = select.select

def select(self, timeout=None):
timeout = None if timeout is None else max(timeout, 0)
ready = []
try:
r, w, _ = wrap_error(self._select,
self._readers, self._writers, [], timeout)
except InterruptedError:
return ready
r = set(r)
w = set(w)
for fd in r | w:
events = 0
if fd in r:
events |= EVENT_READ
if fd in w:
events |= EVENT_WRITE

key = self._key_from_fd(fd)
if key:
ready.append((key, events & key.events))
return ready


if hasattr(select, 'poll'):

class PollSelector(_BaseSelectorImpl):
"""Poll-based selector."""

def __init__(self):
super(PollSelector, self).__init__()
self._poll = select.poll()

def register(self, fileobj, events, data=None):
key = super(PollSelector, self).register(fileobj, events, data)
poll_events = 0
if events & EVENT_READ:
poll_events |= select.POLLIN
if events & EVENT_WRITE:
poll_events |= select.POLLOUT
self._poll.register(key.fd, poll_events)
return key

def unregister(self, fileobj):
key = super(PollSelector, self).unregister(fileobj)
self._poll.unregister(key.fd)
return key

def select(self, timeout=None):
if timeout is None:
timeout = None
elif timeout <= 0:
timeout = 0
else:
# poll() has a resolution of 1 millisecond, round away from
# zero to wait *at least* timeout seconds.
timeout = int(math.ceil(timeout * 1e3))
ready = []
try:
fd_event_list = wrap_error(self._poll.poll, timeout)
except InterruptedError:
return ready
for fd, event in fd_event_list:
events = 0
if event & ~select.POLLIN:
events |= EVENT_WRITE
if event & ~select.POLLOUT:
events |= EVENT_READ

key = self._key_from_fd(fd)
if key:
ready.append((key, events & key.events))
return ready


if hasattr(select, 'epoll'):

class EpollSelector(_BaseSelectorImpl):
"""Epoll-based selector."""

def __init__(self):
super(EpollSelector, self).__init__()
self._epoll = select.epoll()

def fileno(self):
return self._epoll.fileno()

def register(self, fileobj, events, data=None):
key = super(EpollSelector, self).register(fileobj, events, data)
epoll_events = 0
if events & EVENT_READ:
epoll_events |= select.EPOLLIN
if events & EVENT_WRITE:
epoll_events |= select.EPOLLOUT
self._epoll.register(key.fd, epoll_events)
return key

def unregister(self, fileobj):
key = super(EpollSelector, self).unregister(fileobj)
try:
self._epoll.unregister(key.fd)
except OSError:
# This can happen if the FD was closed since it
# was registered.
pass
return key

def select(self, timeout=None):
if timeout is None:
timeout = -1
elif timeout <= 0:
timeout = 0
else:
# epoll_wait() has a resolution of 1 millisecond, round away
# from zero to wait *at least* timeout seconds.
timeout = math.ceil(timeout * 1e3) * 1e-3
max_ev = len(self._fd_to_key)
ready = []
try:
fd_event_list = wrap_error(self._epoll.poll, timeout, max_ev)
except InterruptedError:
return ready
for fd, event in fd_event_list:
events = 0
if event & ~select.EPOLLIN:
events |= EVENT_WRITE
if event & ~select.EPOLLOUT:
events |= EVENT_READ

key = self._key_from_fd(fd)
if key:
ready.append((key, events & key.events))
return ready

def close(self):
self._epoll.close()
super(EpollSelector, self).close()


if hasattr(select, 'devpoll'):

class DevpollSelector(_BaseSelectorImpl):
"""Solaris /dev/poll selector."""

def __init__(self):
super(DevpollSelector, self).__init__()
self._devpoll = select.devpoll()

def fileno(self):
return self._devpoll.fileno()

def register(self, fileobj, events, data=None):
key = super(DevpollSelector, self).register(fileobj, events, data)
poll_events = 0
if events & EVENT_READ:
poll_events |= select.POLLIN
if events & EVENT_WRITE:
poll_events |= select.POLLOUT
self._devpoll.register(key.fd, poll_events)
return key

def unregister(self, fileobj):
key = super(DevpollSelector, self).unregister(fileobj)
self._devpoll.unregister(key.fd)
return key

def select(self, timeout=None):
if timeout is None:
timeout = None
elif timeout <= 0:
timeout = 0
else:
# devpoll() has a resolution of 1 millisecond, round away from
# zero to wait *at least* timeout seconds.
timeout = math.ceil(timeout * 1e3)
ready = []
try:
fd_event_list = self._devpoll.poll(timeout)
except InterruptedError:
return ready
for fd, event in fd_event_list:
events = 0
if event & ~select.POLLIN:
events |= EVENT_WRITE
if event & ~select.POLLOUT:
events |= EVENT_READ

key = self._key_from_fd(fd)
if key:
ready.append((key, events & key.events))
return ready

def close(self):
self._devpoll.close()
super(DevpollSelector, self).close()


if hasattr(select, 'kqueue'):

class KqueueSelector(_BaseSelectorImpl):
"""Kqueue-based selector."""

def __init__(self):
super(KqueueSelector, self).__init__()
self._kqueue = select.kqueue()

def fileno(self):
return self._kqueue.fileno()

def register(self, fileobj, events, data=None):
key = super(KqueueSelector, self).register(fileobj, events, data)
if events & EVENT_READ:
kev = select.kevent(key.fd, select.KQ_FILTER_READ,
select.KQ_EV_ADD)
self._kqueue.control([kev], 0, 0)
if events & EVENT_WRITE:
kev = select.kevent(key.fd, select.KQ_FILTER_WRITE,
select.KQ_EV_ADD)
self._kqueue.control([kev], 0, 0)
return key

def unregister(self, fileobj):
key = super(KqueueSelector, self).unregister(fileobj)
if key.events & EVENT_READ:
kev = select.kevent(key.fd, select.KQ_FILTER_READ,
select.KQ_EV_DELETE)
try:
self._kqueue.control([kev], 0, 0)
except OSError:
# This can happen if the FD was closed since it
# was registered.
pass
if key.events & EVENT_WRITE:
kev = select.kevent(key.fd, select.KQ_FILTER_WRITE,
select.KQ_EV_DELETE)
try:
self._kqueue.control([kev], 0, 0)
except OSError:
# See comment above.
pass
return key

def select(self, timeout=None):
timeout = None if timeout is None else max(timeout, 0)
max_ev = len(self._fd_to_key)
ready = []
try:
kev_list = wrap_error(self._kqueue.control,
None, max_ev, timeout)
except InterruptedError:
return ready
for kev in kev_list:
fd = kev.ident
flag = kev.filter
events = 0
if flag == select.KQ_FILTER_READ:
events |= EVENT_READ
if flag == select.KQ_FILTER_WRITE:
events |= EVENT_WRITE

key = self._key_from_fd(fd)
if key:
ready.append((key, events & key.events))
return ready

def close(self):
self._kqueue.close()
super(KqueueSelector, self).close()


# Choose the best implementation: roughly, epoll|kqueue|devpoll > poll > select.
# select() also can't accept a FD > FD_SETSIZE (usually around 1024)
if 'KqueueSelector' in globals():
DefaultSelector = KqueueSelector
elif 'EpollSelector' in globals():
DefaultSelector = EpollSelector
elif 'DevpollSelector' in globals():
DefaultSelector = DevpollSelector
elif 'PollSelector' in globals():
DefaultSelector = PollSelector
else:
DefaultSelector = SelectSelector

+ 762
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/six.py View File

"""Utilities for writing code that runs on Python 2 and 3"""

# Copyright (c) 2010-2014 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.

from __future__ import absolute_import

import functools
import operator
import sys
import types

__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.8.0"


# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3

if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes

MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str

if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X


def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc


def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]


class _LazyDescr(object):

def __init__(self, name):
self.name = name

def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
# This is a bit ugly, but it avoids running this again.
delattr(obj.__class__, self.name)
return result


class MovedModule(_LazyDescr):

def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old

def _resolve(self):
return _import_module(self.mod)

def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value


class _LazyModule(types.ModuleType):

def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__

def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs

# Subclasses should override this
_moved_attributes = []


class MovedAttribute(_LazyDescr):

def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr

def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)


class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.

This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}

def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod

def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]

def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None

def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)

def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod

def is_package(self, fullname):
"""
Return true, if the named module is a package.

We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")

def get_code(self, fullname):
"""Return None

Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code

_importer = _SixMetaPathImporter(__name__)


class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package


_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),

MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr

_MovedItems._moved_attributes = _moved_attributes

moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")


class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""


_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr

Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes

_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")


class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""


_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr

Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes

_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")


class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""


_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr

Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes

_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")


class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""


_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr

Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes

_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")


class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""


_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr

Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes

_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")


class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")

def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']

_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")


def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)


def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))


if PY3:
_meth_func = "__func__"
_meth_self = "__self__"

_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"

_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"


try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator


try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)


if PY3:
def get_unbound_function(unbound):
return unbound

create_bound_method = types.MethodType

Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func

def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)

class Iterator(object):

def next(self):
return type(self).__next__(self)

callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")


get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)


if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))

def itervalues(d, **kw):
return iter(d.values(**kw))

def iteritems(d, **kw):
return iter(d.items(**kw))

def iterlists(d, **kw):
return iter(d.lists(**kw))
else:
def iterkeys(d, **kw):
return iter(d.iterkeys(**kw))

def itervalues(d, **kw):
return iter(d.itervalues(**kw))

def iteritems(d, **kw):
return iter(d.iteritems(**kw))

def iterlists(d, **kw):
return iter(d.iterlists(**kw))

_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")


if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
def iterbytes(buf):
return (ord(byte) for byte in buf)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")


if PY3:
exec_ = getattr(moves.builtins, "exec")


def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value

else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")


exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")


print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)

_add_doc(reraise, """Reraise an exception.""")

if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps

def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})


def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper

# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)

+ 233
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/sock.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import errno
import fcntl
import os
import socket
import stat
import sys
import time

from gunicorn import util
from gunicorn.six import string_types

SD_LISTEN_FDS_START = 3


class BaseSocket(object):

def __init__(self, address, conf, log, fd=None):
self.log = log
self.conf = conf

self.cfg_addr = address
if fd is None:
sock = socket.socket(self.FAMILY, socket.SOCK_STREAM)
else:
sock = socket.fromfd(fd, self.FAMILY, socket.SOCK_STREAM)

self.sock = self.set_options(sock, bound=(fd is not None))

def __str__(self, name):
return "<socket %d>" % self.sock.fileno()

def __getattr__(self, name):
return getattr(self.sock, name)

def set_options(self, sock, bound=False):
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if not bound:
self.bind(sock)
sock.setblocking(0)

# make sure that the socket can be inherited
if hasattr(sock, "set_inheritable"):
sock.set_inheritable(True)

sock.listen(self.conf.backlog)
return sock

def bind(self, sock):
sock.bind(self.cfg_addr)

def close(self):
if self.sock is None:
return

try:
self.sock.close()
except socket.error as e:
self.log.info("Error while closing socket %s", str(e))

self.sock = None


class TCPSocket(BaseSocket):

FAMILY = socket.AF_INET

def __str__(self):
if self.conf.is_ssl:
scheme = "https"
else:
scheme = "http"

addr = self.sock.getsockname()
return "%s://%s:%d" % (scheme, addr[0], addr[1])

def set_options(self, sock, bound=False):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
return super(TCPSocket, self).set_options(sock, bound=bound)


class TCP6Socket(TCPSocket):

FAMILY = socket.AF_INET6

def __str__(self):
(host, port, fl, sc) = self.sock.getsockname()
return "http://[%s]:%d" % (host, port)


class UnixSocket(BaseSocket):

FAMILY = socket.AF_UNIX

def __init__(self, addr, conf, log, fd=None):
if fd is None:
try:
st = os.stat(addr)
except OSError as e:
if e.args[0] != errno.ENOENT:
raise
else:
if stat.S_ISSOCK(st.st_mode):
os.remove(addr)
else:
raise ValueError("%r is not a socket" % addr)
super(UnixSocket, self).__init__(addr, conf, log, fd=fd)

def __str__(self):
return "unix:%s" % self.cfg_addr

def bind(self, sock):
old_umask = os.umask(self.conf.umask)
sock.bind(self.cfg_addr)
util.chown(self.cfg_addr, self.conf.uid, self.conf.gid)
os.umask(old_umask)

def close(self):
os.unlink(self.cfg_addr)
super(UnixSocket, self).close()


def _sock_type(addr):
if isinstance(addr, tuple):
if util.is_ipv6(addr[0]):
sock_type = TCP6Socket
else:
sock_type = TCPSocket
elif isinstance(addr, string_types):
sock_type = UnixSocket
else:
raise TypeError("Unable to create socket from: %r" % addr)
return sock_type


def create_sockets(conf, log):
"""
Create a new socket for the given address. If the
address is a tuple, a TCP socket is created. If it
is a string, a Unix socket is created. Otherwise
a TypeError is raised.
"""

# Systemd support, use the sockets managed by systemd and passed to
# gunicorn.
# http://www.freedesktop.org/software/systemd/man/systemd.socket.html
listeners = []
if ('LISTEN_PID' in os.environ
and int(os.environ.get('LISTEN_PID')) == os.getpid()):
for i in range(int(os.environ.get('LISTEN_FDS', 0))):
fd = i + SD_LISTEN_FDS_START
try:
sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM)
sockname = sock.getsockname()
if isinstance(sockname, str) and sockname.startswith('/'):
listeners.append(UnixSocket(sockname, conf, log, fd=fd))
elif len(sockname) == 2 and '.' in sockname[0]:
listeners.append(TCPSocket("%s:%s" % sockname, conf, log,
fd=fd))
elif len(sockname) == 4 and ':' in sockname[0]:
listeners.append(TCP6Socket("[%s]:%s" % sockname[:2], conf,
log, fd=fd))
except socket.error:
pass
del os.environ['LISTEN_PID'], os.environ['LISTEN_FDS']

if listeners:
log.debug('Socket activation sockets: %s',
",".join([str(l) for l in listeners]))
return listeners

# get it only once
laddr = conf.address

# check ssl config early to raise the error on startup
# only the certfile is needed since it can contains the keyfile
if conf.certfile and not os.path.exists(conf.certfile):
raise ValueError('certfile "%s" does not exist' % conf.certfile)

if conf.keyfile and not os.path.exists(conf.keyfile):
raise ValueError('keyfile "%s" does not exist' % conf.keyfile)

# sockets are already bound
if 'GUNICORN_FD' in os.environ:
fds = os.environ.pop('GUNICORN_FD').split(',')
for i, fd in enumerate(fds):
fd = int(fd)
addr = laddr[i]
sock_type = _sock_type(addr)

try:
listeners.append(sock_type(addr, conf, log, fd=fd))
except socket.error as e:
if e.args[0] == errno.ENOTCONN:
log.error("GUNICORN_FD should refer to an open socket.")
else:
raise
return listeners

# no sockets is bound, first initialization of gunicorn in this env.
for addr in laddr:
sock_type = _sock_type(addr)
# If we fail to create a socket from GUNICORN_FD
# we fall through and try and open the socket
# normally.
sock = None
for i in range(5):
try:
sock = sock_type(addr, conf, log)
except socket.error as e:
if e.args[0] == errno.EADDRINUSE:
log.error("Connection in use: %s", str(addr))
if e.args[0] == errno.EADDRNOTAVAIL:
log.error("Invalid address: %s", str(addr))
if i < 5:
msg = "connection to {addr} failed: {error}"
log.debug(msg.format(addr=str(addr), error=str(e)))
log.error("Retrying in 1 second.")
time.sleep(1)
else:
break

if sock is None:
log.error("Can't connect to %s", str(addr))
sys.exit(1)

listeners.append(sock)

return listeners

+ 548
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/util.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

from __future__ import print_function

import email.utils
import fcntl
import grp
import io
import os
import pkg_resources
import pwd
import random
import resource
import socket
import stat
import sys
import textwrap
import time
import traceback
import inspect
import errno
import warnings
import cgi

from gunicorn.errors import AppImportError
from gunicorn.six import text_type
from gunicorn.workers import SUPPORTED_WORKERS

MAXFD = 1024
REDIRECT_TO = getattr(os, 'devnull', '/dev/null')

timeout_default = object()

CHUNK_SIZE = (16 * 1024)

MAX_BODY = 1024 * 132

# Server and Date aren't technically hop-by-hop
# headers, but they are in the purview of the
# origin server which the WSGI spec says we should
# act like. So we drop them and add our own.
#
# In the future, concatenation server header values
# might be better, but nothing else does it and
# dropping them is easier.
hop_headers = set("""
connection keep-alive proxy-authenticate proxy-authorization
te trailers transfer-encoding upgrade
server date
""".split())

try:
from setproctitle import setproctitle
def _setproctitle(title):
setproctitle("gunicorn: %s" % title)
except ImportError:
def _setproctitle(title):
return


try:
from importlib import import_module
except ImportError:
def _resolve_name(name, package, level):
"""Return the absolute name of the module to be imported."""
if not hasattr(package, 'rindex'):
raise ValueError("'package' not set to a string")
dot = len(package)
for x in range(level, 1, -1):
try:
dot = package.rindex('.', 0, dot)
except ValueError:
msg = "attempted relative import beyond top-level package"
raise ValueError(msg)
return "%s.%s" % (package[:dot], name)

def import_module(name, package=None):
"""Import a module.

The 'package' argument is required when performing a relative import. It
specifies the package to use as the anchor point from which to resolve the
relative import to an absolute import.

"""
if name.startswith('.'):
if not package:
raise TypeError("relative imports require the 'package' argument")
level = 0
for character in name:
if character != '.':
break
level += 1
name = _resolve_name(name[level:], package, level)
__import__(name)
return sys.modules[name]


def load_class(uri, default="gunicorn.workers.sync.SyncWorker",
section="gunicorn.workers"):
if inspect.isclass(uri):
return uri
if uri.startswith("egg:"):
# uses entry points
entry_str = uri.split("egg:")[1]
try:
dist, name = entry_str.rsplit("#", 1)
except ValueError:
dist = entry_str
name = default

try:
return pkg_resources.load_entry_point(dist, section, name)
except:
exc = traceback.format_exc()
msg = "class uri %r invalid or not found: \n\n[%s]"
raise RuntimeError(msg % (uri, exc))
else:
components = uri.split('.')
if len(components) == 1:
while True:
if uri.startswith("#"):
uri = uri[1:]

if uri in SUPPORTED_WORKERS:
components = SUPPORTED_WORKERS[uri].split(".")
break

try:
return pkg_resources.load_entry_point("gunicorn",
section, uri)
except:
exc = traceback.format_exc()
msg = "class uri %r invalid or not found: \n\n[%s]"
raise RuntimeError(msg % (uri, exc))

klass = components.pop(-1)

try:
mod = import_module('.'.join(components))
except:
exc = traceback.format_exc()
msg = "class uri %r invalid or not found: \n\n[%s]"
raise RuntimeError(msg % (uri, exc))
return getattr(mod, klass)


def set_owner_process(uid, gid):
""" set user and group of workers processes """
if gid:
# versions of python < 2.6.2 don't manage unsigned int for
# groups like on osx or fedora
gid = abs(gid) & 0x7FFFFFFF
os.setgid(gid)
if uid:
os.setuid(uid)


def chown(path, uid, gid):
gid = abs(gid) & 0x7FFFFFFF # see note above.
os.chown(path, uid, gid)


if sys.platform.startswith("win"):
def _waitfor(func, pathname, waitall=False):
# Peform the operation
func(pathname)
# Now setup the wait loop
if waitall:
dirname = pathname
else:
dirname, name = os.path.split(pathname)
dirname = dirname or '.'
# Check for `pathname` to be removed from the filesystem.
# The exponential backoff of the timeout amounts to a total
# of ~1 second after which the deletion is probably an error
# anyway.
# Testing on a i7@4.3GHz shows that usually only 1 iteration is
# required when contention occurs.
timeout = 0.001
while timeout < 1.0:
# Note we are only testing for the existance of the file(s) in
# the contents of the directory regardless of any security or
# access rights. If we have made it this far, we have sufficient
# permissions to do that much using Python's equivalent of the
# Windows API FindFirstFile.
# Other Windows APIs can fail or give incorrect results when
# dealing with files that are pending deletion.
L = os.listdir(dirname)
if not (L if waitall else name in L):
return
# Increase the timeout and try again
time.sleep(timeout)
timeout *= 2
warnings.warn('tests may fail, delete still pending for ' + pathname,
RuntimeWarning, stacklevel=4)

def _unlink(filename):
_waitfor(os.unlink, filename)
else:
_unlink = os.unlink


def unlink(filename):
try:
_unlink(filename)
except OSError as error:
# The filename need not exist.
if error.errno not in (errno.ENOENT, errno.ENOTDIR):
raise


def is_ipv6(addr):
try:
socket.inet_pton(socket.AF_INET6, addr)
except socket.error: # not a valid address
return False
except ValueError: # ipv6 not supported on this platform
return False
return True


def parse_address(netloc, default_port=8000):
if netloc.startswith("unix://"):
return netloc.split("unix://")[1]

if netloc.startswith("unix:"):
return netloc.split("unix:")[1]

if netloc.startswith("tcp://"):
netloc = netloc.split("tcp://")[1]

# get host
if '[' in netloc and ']' in netloc:
host = netloc.split(']')[0][1:].lower()
elif ':' in netloc:
host = netloc.split(':')[0].lower()
elif netloc == "":
host = "0.0.0.0"
else:
host = netloc.lower()

#get port
netloc = netloc.split(']')[-1]
if ":" in netloc:
port = netloc.split(':', 1)[1]
if not port.isdigit():
raise RuntimeError("%r is not a valid port number." % port)
port = int(port)
else:
port = default_port
return (host, port)

def get_maxfd():
maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
if (maxfd == resource.RLIM_INFINITY):
maxfd = MAXFD
return maxfd


def close_on_exec(fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
flags |= fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)


def set_non_blocking(fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)

def close(sock):
try:
sock.close()
except socket.error:
pass

try:
from os import closerange
except ImportError:
def closerange(fd_low, fd_high):
# Iterate through and close all file descriptors.
for fd in range(fd_low, fd_high):
try:
os.close(fd)
except OSError: # ERROR, fd wasn't open to begin with (ignored)
pass


def write_chunk(sock, data):
if isinstance(data, text_type):
data = data.encode('utf-8')
chunk_size = "%X\r\n" % len(data)
chunk = b"".join([chunk_size.encode('utf-8'), data, b"\r\n"])
sock.sendall(chunk)


def write(sock, data, chunked=False):
if chunked:
return write_chunk(sock, data)
sock.sendall(data)


def write_nonblock(sock, data, chunked=False):
timeout = sock.gettimeout()
if timeout != 0.0:
try:
sock.setblocking(0)
return write(sock, data, chunked)
finally:
sock.setblocking(1)
else:
return write(sock, data, chunked)


def writelines(sock, lines, chunked=False):
for line in list(lines):
write(sock, line, chunked)


def write_error(sock, status_int, reason, mesg):
html = textwrap.dedent("""\
<html>
<head>
<title>%(reason)s</title>
</head>
<body>
<h1><p>%(reason)s</p></h1>
%(mesg)s
</body>
</html>
""") % {"reason": reason, "mesg": cgi.escape(mesg)}

http = textwrap.dedent("""\
HTTP/1.1 %s %s\r
Connection: close\r
Content-Type: text/html\r
Content-Length: %d\r
\r
%s""") % (str(status_int), reason, len(html), html)
write_nonblock(sock, http.encode('latin1'))


def normalize_name(name):
return "-".join([w.lower().capitalize() for w in name.split("-")])


def import_app(module):
parts = module.split(":", 1)
if len(parts) == 1:
module, obj = module, "application"
else:
module, obj = parts[0], parts[1]

try:
__import__(module)
except ImportError:
if module.endswith(".py") and os.path.exists(module):
msg = "Failed to find application, did you mean '%s:%s'?"
raise ImportError(msg % (module.rsplit(".", 1)[0], obj))
else:
raise

mod = sys.modules[module]

try:
app = eval(obj, mod.__dict__)
except NameError:
raise AppImportError("Failed to find application: %r" % module)

if app is None:
raise AppImportError("Failed to find application object: %r" % obj)

if not callable(app):
raise AppImportError("Application object must be callable.")
return app


def getcwd():
# get current path, try to use PWD env first
try:
a = os.stat(os.environ['PWD'])
b = os.stat(os.getcwd())
if a.st_ino == b.st_ino and a.st_dev == b.st_dev:
cwd = os.environ['PWD']
else:
cwd = os.getcwd()
except:
cwd = os.getcwd()
return cwd


def http_date(timestamp=None):
"""Return the current date and time formatted for a message header."""
if timestamp is None:
timestamp = time.time()
s = email.utils.formatdate(timestamp, localtime=False, usegmt=True)
return s


def is_hoppish(header):
return header.lower().strip() in hop_headers


def daemonize(enable_stdio_inheritance=False):
"""\
Standard daemonization of a process.
http://www.svbug.com/documentation/comp.unix.programmer-FAQ/faq_2.html#SEC16
"""
if 'GUNICORN_FD' not in os.environ:
if os.fork():
os._exit(0)
os.setsid()

if os.fork():
os._exit(0)

os.umask(0o22)

# In both the following any file descriptors above stdin
# stdout and stderr are left untouched. The inheritence
# option simply allows one to have output go to a file
# specified by way of shell redirection when not wanting
# to use --error-log option.

if not enable_stdio_inheritance:
# Remap all of stdin, stdout and stderr on to
# /dev/null. The expectation is that users have
# specified the --error-log option.

closerange(0, 3)

fd_null = os.open(REDIRECT_TO, os.O_RDWR)

if fd_null != 0:
os.dup2(fd_null, 0)

os.dup2(fd_null, 1)
os.dup2(fd_null, 2)

else:
fd_null = os.open(REDIRECT_TO, os.O_RDWR)

# Always redirect stdin to /dev/null as we would
# never expect to need to read interactive input.

if fd_null != 0:
os.close(0)
os.dup2(fd_null, 0)

# If stdout and stderr are still connected to
# their original file descriptors we check to see
# if they are associated with terminal devices.
# When they are we map them to /dev/null so that
# are still detached from any controlling terminal
# properly. If not we preserve them as they are.
#
# If stdin and stdout were not hooked up to the
# original file descriptors, then all bets are
# off and all we can really do is leave them as
# they were.
#
# This will allow 'gunicorn ... > output.log 2>&1'
# to work with stdout/stderr going to the file
# as expected.
#
# Note that if using --error-log option, the log
# file specified through shell redirection will
# only be used up until the log file specified
# by the option takes over. As it replaces stdout
# and stderr at the file descriptor level, then
# anything using stdout or stderr, including having
# cached a reference to them, will still work.

def redirect(stream, fd_expect):
try:
fd = stream.fileno()
if fd == fd_expect and stream.isatty():
os.close(fd)
os.dup2(fd_null, fd)
except AttributeError:
pass

redirect(sys.stdout, 1)
redirect(sys.stderr, 2)


def seed():
try:
random.seed(os.urandom(64))
except NotImplementedError:
random.seed('%s.%s' % (time.time(), os.getpid()))


def check_is_writeable(path):
try:
f = open(path, 'a')
except IOError as e:
raise RuntimeError("Error: '%s' isn't writable [%r]" % (path, e))
f.close()


def to_bytestring(value, encoding="utf8"):
"""Converts a string argument to a byte string"""
if isinstance(value, bytes):
return value
if not isinstance(value, text_type):
raise TypeError('%r is not a string' % value)

return value.encode(encoding)

def has_fileno(obj):
if not hasattr(obj, "fileno"):
return False

# check BytesIO case and maybe others
try:
obj.fileno()
except (AttributeError, IOError, io.UnsupportedOperation):
return False

return True


def warn(msg):
print("!!!", file=sys.stderr)

lines = msg.splitlines()
for i, line in enumerate(lines):
if i == 0:
line = "WARNING: %s" % line
print("!!! %s" % line, file=sys.stderr)

print("!!!\n", file=sys.stderr)
sys.stderr.flush()


def make_fail_app(msg):

def app(environ, start_response):
start_response("500 Internal Server Error", [
("Content-Type", "text/plain"),
("Content-Length", str(len(msg)))
])
return [msg]

return app

+ 22
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/workers/__init__.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import sys

# supported gunicorn workers.
SUPPORTED_WORKERS = {
"sync": "gunicorn.workers.sync.SyncWorker",
"eventlet": "gunicorn.workers.geventlet.EventletWorker",
"gevent": "gunicorn.workers.ggevent.GeventWorker",
"gevent_wsgi": "gunicorn.workers.ggevent.GeventPyWSGIWorker",
"gevent_pywsgi": "gunicorn.workers.ggevent.GeventPyWSGIWorker",
"tornado": "gunicorn.workers.gtornado.TornadoWorker",
"gthread": "gunicorn.workers.gthread.ThreadWorker",
}


if sys.version_info >= (3, 3):
# gaiohttp worker can be used with Python 3.3+ only.
SUPPORTED_WORKERS["gaiohttp"] = "gunicorn.workers.gaiohttp.AiohttpWorker"

+ 168
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/workers/_gaiohttp.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import asyncio
import datetime
import functools
import logging
import os

try:
import ssl
except ImportError:
ssl = None

import gunicorn.workers.base as base

from aiohttp.wsgi import WSGIServerHttpProtocol as OldWSGIServerHttpProtocol


class WSGIServerHttpProtocol(OldWSGIServerHttpProtocol):
def log_access(self, request, environ, response, time):
self.logger.access(response, request, environ, datetime.timedelta(0, 0, time))


class AiohttpWorker(base.Worker):

def __init__(self, *args, **kw): # pragma: no cover
super().__init__(*args, **kw)
cfg = self.cfg
if cfg.is_ssl:
self.ssl_context = self._create_ssl_context(cfg)
else:
self.ssl_context = None
self.servers = []
self.connections = {}

def init_process(self):
# create new event_loop after fork
asyncio.get_event_loop().close()

self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)

super().init_process()

def run(self):
self._runner = asyncio.async(self._run(), loop=self.loop)

try:
self.loop.run_until_complete(self._runner)
finally:
self.loop.close()

def wrap_protocol(self, proto):
proto.connection_made = _wrp(
proto, proto.connection_made, self.connections)
proto.connection_lost = _wrp(
proto, proto.connection_lost, self.connections, False)
return proto

def factory(self, wsgi, addr):
# are we in debug level
is_debug = self.log.loglevel == logging.DEBUG

proto = WSGIServerHttpProtocol(
wsgi, readpayload=True,
loop=self.loop,
log=self.log,
debug=is_debug,
keep_alive=self.cfg.keepalive,
access_log=self.log.access_log,
access_log_format=self.cfg.access_log_format)
return self.wrap_protocol(proto)

def get_factory(self, sock, addr):
return functools.partial(self.factory, self.wsgi, addr)

@asyncio.coroutine
def close(self):
try:
if hasattr(self.wsgi, 'close'):
yield from self.wsgi.close()
except:
self.log.exception('Process shutdown exception')

@asyncio.coroutine
def _run(self):
for sock in self.sockets:
factory = self.get_factory(sock.sock, sock.cfg_addr)
self.servers.append(
(yield from self._create_server(factory, sock)))

# If our parent changed then we shut down.
pid = os.getpid()
try:
while self.alive or self.connections:
self.notify()

if (self.alive and
pid == os.getpid() and self.ppid != os.getppid()):
self.log.info("Parent changed, shutting down: %s", self)
self.alive = False

# stop accepting requests
if not self.alive:
if self.servers:
self.log.info(
"Stopping server: %s, connections: %s",
pid, len(self.connections))
for server in self.servers:
server.close()
self.servers.clear()

# prepare connections for closing
for conn in self.connections.values():
if hasattr(conn, 'closing'):
conn.closing()

yield from asyncio.sleep(1.0, loop=self.loop)
except KeyboardInterrupt:
pass

if self.servers:
for server in self.servers:
server.close()

yield from self.close()

@asyncio.coroutine
def _create_server(self, factory, sock):
return self.loop.create_server(factory, sock=sock.sock,
ssl=self.ssl_context)

@staticmethod
def _create_ssl_context(cfg):
""" Creates SSLContext instance for usage in asyncio.create_server.

See ssl.SSLSocket.__init__ for more details.
"""
ctx = ssl.SSLContext(cfg.ssl_version)
ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
ctx.verify_mode = cfg.cert_reqs
if cfg.ca_certs:
ctx.load_verify_locations(cfg.ca_certs)
if cfg.ciphers:
ctx.set_ciphers(cfg.ciphers)
return ctx


class _wrp:

def __init__(self, proto, meth, tracking, add=True):
self._proto = proto
self._id = id(proto)
self._meth = meth
self._tracking = tracking
self._add = add

def __call__(self, *args):
if self._add:
self._tracking[self._id] = self._proto
elif self._id in self._tracking:
del self._tracking[self._id]

conn = self._meth(*args)
return conn

+ 143
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/workers/async.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

from datetime import datetime
import errno
import socket
import ssl
import sys

import gunicorn.http as http
import gunicorn.http.wsgi as wsgi
import gunicorn.util as util
import gunicorn.workers.base as base
from gunicorn import six

ALREADY_HANDLED = object()


class AsyncWorker(base.Worker):

def __init__(self, *args, **kwargs):
super(AsyncWorker, self).__init__(*args, **kwargs)
self.worker_connections = self.cfg.worker_connections

def timeout_ctx(self):
raise NotImplementedError()

def handle(self, listener, client, addr):
req = None
try:
parser = http.RequestParser(self.cfg, client)
try:
listener_name = listener.getsockname()
if not self.cfg.keepalive:
req = six.next(parser)
self.handle_request(listener_name, req, client, addr)
else:
# keepalive loop
proxy_protocol_info = {}
while True:
req = None
with self.timeout_ctx():
req = six.next(parser)
if not req:
break
if req.proxy_protocol_info:
proxy_protocol_info = req.proxy_protocol_info
else:
req.proxy_protocol_info = proxy_protocol_info
self.handle_request(listener_name, req, client, addr)
except http.errors.NoMoreData as e:
self.log.debug("Ignored premature client disconnection. %s", e)
except StopIteration as e:
self.log.debug("Closing connection. %s", e)
except ssl.SSLError:
# pass to next try-except level
six.reraise(*sys.exc_info())
except EnvironmentError:
# pass to next try-except level
six.reraise(*sys.exc_info())
except Exception as e:
self.handle_error(req, client, addr, e)
except ssl.SSLError as e:
if e.args[0] == ssl.SSL_ERROR_EOF:
self.log.debug("ssl connection closed")
client.close()
else:
self.log.debug("Error processing SSL request.")
self.handle_error(req, client, addr, e)
except EnvironmentError as e:
if e.errno not in (errno.EPIPE, errno.ECONNRESET):
self.log.exception("Socket error processing request.")
else:
if e.errno == errno.ECONNRESET:
self.log.debug("Ignoring connection reset")
else:
self.log.debug("Ignoring EPIPE")
except Exception as e:
self.handle_error(req, client, addr, e)
finally:
util.close(client)

def handle_request(self, listener_name, req, sock, addr):
request_start = datetime.now()
environ = {}
resp = None
try:
self.cfg.pre_request(self, req)
resp, environ = wsgi.create(req, sock, addr,
listener_name, self.cfg)
environ["wsgi.multithread"] = True
self.nr += 1
if self.alive and self.nr >= self.max_requests:
self.log.info("Autorestarting worker after current request.")
resp.force_close()
self.alive = False

if not self.cfg.keepalive:
resp.force_close()

respiter = self.wsgi(environ, resp.start_response)
if respiter == ALREADY_HANDLED:
return False
try:
if isinstance(respiter, environ['wsgi.file_wrapper']):
resp.write_file(respiter)
else:
for item in respiter:
resp.write(item)
resp.close()
request_time = datetime.now() - request_start
self.log.access(resp, req, environ, request_time)
finally:
if hasattr(respiter, "close"):
respiter.close()
if resp.should_close():
raise StopIteration()
except StopIteration:
raise
except EnvironmentError:
# If the original exception was a socket.error we delegate
# handling it to the caller (where handle() might ignore it)
six.reraise(*sys.exc_info())
except Exception:
if resp and resp.headers_sent:
# If the requests have already been sent, we should close the
# connection to indicate the error.
self.log.exception("Error handling request")
try:
sock.shutdown(socket.SHUT_RDWR)
sock.close()
except EnvironmentError:
pass
raise StopIteration()
raise
finally:
try:
self.cfg.post_request(self, req, environ, resp)
except Exception:
self.log.exception("Exception in post_request hook")
return True

+ 257
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/workers/base.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

from datetime import datetime
import os
from random import randint
import signal
from ssl import SSLError
import sys
import time
import traceback

from gunicorn import util
from gunicorn.workers.workertmp import WorkerTmp
from gunicorn.reloader import Reloader
from gunicorn.http.errors import (
InvalidHeader, InvalidHeaderName, InvalidRequestLine, InvalidRequestMethod,
InvalidHTTPVersion, LimitRequestLine, LimitRequestHeaders,
)
from gunicorn.http.errors import InvalidProxyLine, ForbiddenProxyRequest
from gunicorn.http.wsgi import default_environ, Response
from gunicorn.six import MAXSIZE


class Worker(object):

SIGNALS = [getattr(signal, "SIG%s" % x)
for x in "ABRT HUP QUIT INT TERM USR1 USR2 WINCH CHLD".split()]

PIPE = []

def __init__(self, age, ppid, sockets, app, timeout, cfg, log):
"""\
This is called pre-fork so it shouldn't do anything to the
current process. If there's a need to make process wide
changes you'll want to do that in ``self.init_process()``.
"""
self.age = age
self.ppid = ppid
self.sockets = sockets
self.app = app
self.timeout = timeout
self.cfg = cfg
self.booted = False
self.aborted = False
self.reloader = None

self.nr = 0
jitter = randint(0, cfg.max_requests_jitter)
self.max_requests = cfg.max_requests + jitter or MAXSIZE
self.alive = True
self.log = log
self.tmp = WorkerTmp(cfg)

def __str__(self):
return "<Worker %s>" % self.pid

@property
def pid(self):
return os.getpid()

def notify(self):
"""\
Your worker subclass must arrange to have this method called
once every ``self.timeout`` seconds. If you fail in accomplishing
this task, the master process will murder your workers.
"""
self.tmp.notify()

def run(self):
"""\
This is the mainloop of a worker process. You should override
this method in a subclass to provide the intended behaviour
for your particular evil schemes.
"""
raise NotImplementedError()

def init_process(self):
"""\
If you override this method in a subclass, the last statement
in the function should be to call this method with
super(MyWorkerClass, self).init_process() so that the ``run()``
loop is initiated.
"""

# start the reloader
if self.cfg.reload:
def changed(fname):
self.log.info("Worker reloading: %s modified", fname)
self.alive = False
self.cfg.worker_int(self)
time.sleep(0.1)
sys.exit(0)

self.reloader = Reloader(callback=changed)
self.reloader.start()

# set environment' variables
if self.cfg.env:
for k, v in self.cfg.env.items():
os.environ[k] = v

util.set_owner_process(self.cfg.uid, self.cfg.gid)

# Reseed the random number generator
util.seed()

# For waking ourselves up
self.PIPE = os.pipe()
for p in self.PIPE:
util.set_non_blocking(p)
util.close_on_exec(p)

# Prevent fd inheritance
[util.close_on_exec(s) for s in self.sockets]
util.close_on_exec(self.tmp.fileno())

self.wait_fds = self.sockets + [self.PIPE[0]]

self.log.close_on_exec()

self.init_signals()

self.load_wsgi()

self.cfg.post_worker_init(self)

# Enter main run loop
self.booted = True
self.run()

def load_wsgi(self):
try:
self.wsgi = self.app.wsgi()
except SyntaxError as e:
if not self.cfg.reload:
raise

self.log.exception(e)

# fix from PR #1228
# storing the traceback into exc_tb will create a circular reference.
# per https://docs.python.org/2/library/sys.html#sys.exc_info warning,
# delete the traceback after use.
try:
exc_type, exc_val, exc_tb = sys.exc_info()
self.reloader.add_extra_file(exc_val.filename)

tb_string = traceback.format_tb(exc_tb)
self.wsgi = util.make_fail_app(tb_string)
finally:
del exc_tb

def init_signals(self):
# reset signaling
[signal.signal(s, signal.SIG_DFL) for s in self.SIGNALS]
# init new signaling
signal.signal(signal.SIGQUIT, self.handle_quit)
signal.signal(signal.SIGTERM, self.handle_exit)
signal.signal(signal.SIGINT, self.handle_quit)
signal.signal(signal.SIGWINCH, self.handle_winch)
signal.signal(signal.SIGUSR1, self.handle_usr1)
signal.signal(signal.SIGABRT, self.handle_abort)

# Don't let SIGTERM and SIGUSR1 disturb active requests
# by interrupting system calls
if hasattr(signal, 'siginterrupt'): # python >= 2.6
signal.siginterrupt(signal.SIGTERM, False)
signal.siginterrupt(signal.SIGUSR1, False)

if hasattr(signal, 'set_wakeup_fd'):
signal.set_wakeup_fd(self.PIPE[1])

def handle_usr1(self, sig, frame):
self.log.reopen_files()

def handle_exit(self, sig, frame):
self.alive = False

def handle_quit(self, sig, frame):
self.alive = False
# worker_int callback
self.cfg.worker_int(self)
time.sleep(0.1)
sys.exit(0)

def handle_abort(self, sig, frame):
self.alive = False
self.cfg.worker_abort(self)
sys.exit(1)

def handle_error(self, req, client, addr, exc):
request_start = datetime.now()
addr = addr or ('', -1) # unix socket case
if isinstance(exc, (InvalidRequestLine, InvalidRequestMethod,
InvalidHTTPVersion, InvalidHeader, InvalidHeaderName,
LimitRequestLine, LimitRequestHeaders,
InvalidProxyLine, ForbiddenProxyRequest,
SSLError)):

status_int = 400
reason = "Bad Request"

if isinstance(exc, InvalidRequestLine):
mesg = "Invalid Request Line '%s'" % str(exc)
elif isinstance(exc, InvalidRequestMethod):
mesg = "Invalid Method '%s'" % str(exc)
elif isinstance(exc, InvalidHTTPVersion):
mesg = "Invalid HTTP Version '%s'" % str(exc)
elif isinstance(exc, (InvalidHeaderName, InvalidHeader,)):
mesg = "%s" % str(exc)
if not req and hasattr(exc, "req"):
req = exc.req # for access log
elif isinstance(exc, LimitRequestLine):
mesg = "%s" % str(exc)
elif isinstance(exc, LimitRequestHeaders):
mesg = "Error parsing headers: '%s'" % str(exc)
elif isinstance(exc, InvalidProxyLine):
mesg = "'%s'" % str(exc)
elif isinstance(exc, ForbiddenProxyRequest):
reason = "Forbidden"
mesg = "Request forbidden"
status_int = 403
elif isinstance(exc, SSLError):
reason = "Forbidden"
mesg = "'%s'" % str(exc)
status_int = 403

msg = "Invalid request from ip={ip}: {error}"
self.log.debug(msg.format(ip=addr[0], error=str(exc)))
else:
if hasattr(req, "uri"):
self.log.exception("Error handling request %s", req.uri)
status_int = 500
reason = "Internal Server Error"
mesg = ""

if req is not None:
request_time = datetime.now() - request_start
environ = default_environ(req, client, self.cfg)
environ['REMOTE_ADDR'] = addr[0]
environ['REMOTE_PORT'] = str(addr[1])
resp = Response(req, client, self.cfg)
resp.status = "%s %s" % (status_int, reason)
resp.response_length = len(mesg)
self.log.access(resp, req, environ, request_time)

try:
util.write_error(client, status_int, reason, mesg)
except:
self.log.debug("Failed to send error message.")

def handle_winch(self, sig, fname):
# Ignore SIGWINCH in worker. Fixes a crash on OpenBSD.
self.log.debug("worker: SIGWINCH ignored.")

+ 17
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/workers/gaiohttp.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import sys

if sys.version_info >= (3, 3):
try:
import aiohttp # NOQA
except ImportError:
raise RuntimeError("You need aiohttp installed to use this worker.")
else:
from gunicorn.workers._gaiohttp import AiohttpWorker
__all__ = ['AiohttpWorker']
else:
raise RuntimeError("You need Python >= 3.3 to use the asyncio worker")

+ 135
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/workers/geventlet.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

from functools import partial
import errno
import sys

try:
import eventlet
except ImportError:
raise RuntimeError("You need eventlet installed to use this worker.")

# validate the eventlet version
if eventlet.version_info < (0, 9, 7):
raise RuntimeError("You need eventlet >= 0.9.7")


from eventlet import hubs, greenthread
from eventlet.greenio import GreenSocket
from eventlet.hubs import trampoline
import greenlet

from gunicorn.http.wsgi import sendfile as o_sendfile
from gunicorn.workers.async import AsyncWorker

def _eventlet_sendfile(fdout, fdin, offset, nbytes):
while True:
try:
return o_sendfile(fdout, fdin, offset, nbytes)
except OSError as e:
if e.args[0] == errno.EAGAIN:
trampoline(fdout, write=True)
else:
raise


def _eventlet_serve(sock, handle, concurrency):
"""
Serve requests forever.

This code is nearly identical to ``eventlet.convenience.serve`` except
that it attempts to join the pool at the end, which allows for gunicorn
graceful shutdowns.
"""
pool = eventlet.greenpool.GreenPool(concurrency)
server_gt = eventlet.greenthread.getcurrent()

while True:
try:
conn, addr = sock.accept()
gt = pool.spawn(handle, conn, addr)
gt.link(_eventlet_stop, server_gt, conn)
conn, addr, gt = None, None, None
except eventlet.StopServe:
sock.close()
pool.waitall()
return


def _eventlet_stop(client, server, conn):
"""
Stop a greenlet handling a request and close its connection.

This code is lifted from eventlet so as not to depend on undocumented
functions in the library.
"""
try:
try:
client.wait()
finally:
conn.close()
except greenlet.GreenletExit:
pass
except Exception:
greenthread.kill(server, *sys.exc_info())


def patch_sendfile():
from gunicorn.http import wsgi

if o_sendfile is not None:
setattr(wsgi, "sendfile", _eventlet_sendfile)


class EventletWorker(AsyncWorker):

def patch(self):
hubs.use_hub()
eventlet.monkey_patch(os=False)
patch_sendfile()

def init_process(self):
self.patch()
super(EventletWorker, self).init_process()

def handle_quit(self, sig, frame):
eventlet.spawn(super(EventletWorker, self).handle_quit, sig, frame)

def timeout_ctx(self):
return eventlet.Timeout(self.cfg.keepalive or None, False)

def handle(self, listener, client, addr):
if self.cfg.is_ssl:
client = eventlet.wrap_ssl(client, server_side=True,
**self.cfg.ssl_options)

super(EventletWorker, self).handle(listener, client, addr)

def run(self):
acceptors = []
for sock in self.sockets:
gsock = GreenSocket(sock)
gsock.setblocking(1)
hfun = partial(self.handle, gsock)
acceptor = eventlet.spawn(_eventlet_serve, gsock, hfun,
self.worker_connections)

acceptors.append(acceptor)
eventlet.sleep(0.0)

while self.alive:
self.notify()
eventlet.sleep(1.0)

self.notify()
try:
with eventlet.Timeout(self.cfg.graceful_timeout) as t:
[a.kill(eventlet.StopServe()) for a in acceptors]
[a.wait() for a in acceptors]
except eventlet.Timeout as te:
if te != t:
raise
[a.kill() for a in acceptors]

+ 233
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/workers/ggevent.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import errno
import os
import sys
from datetime import datetime
from functools import partial
import time

_socket = __import__("socket")

# workaround on osx, disable kqueue
if sys.platform == "darwin":
os.environ['EVENT_NOKQUEUE'] = "1"

try:
import gevent
except ImportError:
raise RuntimeError("You need gevent installed to use this worker.")
from gevent.pool import Pool
from gevent.server import StreamServer
from gevent.socket import wait_write, socket
from gevent import pywsgi

import gunicorn
from gunicorn.http.wsgi import base_environ
from gunicorn.workers.async import AsyncWorker
from gunicorn.http.wsgi import sendfile as o_sendfile

VERSION = "gevent/%s gunicorn/%s" % (gevent.__version__, gunicorn.__version__)

def _gevent_sendfile(fdout, fdin, offset, nbytes):
while True:
try:
return o_sendfile(fdout, fdin, offset, nbytes)
except OSError as e:
if e.args[0] == errno.EAGAIN:
wait_write(fdout)
else:
raise

def patch_sendfile():
from gunicorn.http import wsgi

if o_sendfile is not None:
setattr(wsgi, "sendfile", _gevent_sendfile)


class GeventWorker(AsyncWorker):

server_class = None
wsgi_handler = None

def patch(self):
from gevent import monkey
monkey.noisy = False

# if the new version is used make sure to patch subprocess
if gevent.version_info[0] == 0:
monkey.patch_all()
else:
monkey.patch_all(subprocess=True)

# monkey patch sendfile to make it none blocking
patch_sendfile()

# patch sockets
sockets = []
for s in self.sockets:
if sys.version_info[0] == 3:
sockets.append(socket(s.FAMILY, _socket.SOCK_STREAM,
fileno=s.sock.fileno()))
else:
sockets.append(socket(s.FAMILY, _socket.SOCK_STREAM,
_sock=s))
self.sockets = sockets

def notify(self):
super(GeventWorker, self).notify()
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s", self)
sys.exit(0)

def timeout_ctx(self):
return gevent.Timeout(self.cfg.keepalive, False)

def run(self):
servers = []
ssl_args = {}

if self.cfg.is_ssl:
ssl_args = dict(server_side=True, **self.cfg.ssl_options)

for s in self.sockets:
s.setblocking(1)
pool = Pool(self.worker_connections)
if self.server_class is not None:
environ = base_environ(self.cfg)
environ.update({
"wsgi.multithread": True,
"SERVER_SOFTWARE": VERSION,
})
server = self.server_class(
s, application=self.wsgi, spawn=pool, log=self.log,
handler_class=self.wsgi_handler, environ=environ,
**ssl_args)
else:
hfun = partial(self.handle, s)
server = StreamServer(s, handle=hfun, spawn=pool, **ssl_args)

server.start()
servers.append(server)

while self.alive:
self.notify()
gevent.sleep(1.0)

try:
# Stop accepting requests
for server in servers:
if hasattr(server, 'close'): # gevent 1.0
server.close()
if hasattr(server, 'kill'): # gevent < 1.0
server.kill()

# Handle current requests until graceful_timeout
ts = time.time()
while time.time() - ts <= self.cfg.graceful_timeout:
accepting = 0
for server in servers:
if server.pool.free_count() != server.pool.size:
accepting += 1

# if no server is accepting a connection, we can exit
if not accepting:
return

self.notify()
gevent.sleep(1.0)

# Force kill all active the handlers
self.log.warning("Worker graceful timeout (pid:%s)" % self.pid)
[server.stop(timeout=1) for server in servers]
except:
pass

def handle_request(self, *args):
try:
super(GeventWorker, self).handle_request(*args)
except gevent.GreenletExit:
pass
except SystemExit:
pass

def handle_quit(self, sig, frame):
# Move this out of the signal handler so we can use
# blocking calls. See #1126
gevent.spawn(super(GeventWorker, self).handle_quit, sig, frame)

if gevent.version_info[0] == 0:

def init_process(self):
# monkey patch here
self.patch()

# reinit the hub
import gevent.core
gevent.core.reinit()

#gevent 0.13 and older doesn't reinitialize dns for us after forking
#here's the workaround
gevent.core.dns_shutdown(fail_requests=1)
gevent.core.dns_init()
super(GeventWorker, self).init_process()

else:

def init_process(self):
# monkey patch here
self.patch()

# reinit the hub
from gevent import hub
hub.reinit()

# then initialize the process
super(GeventWorker, self).init_process()


class GeventResponse(object):

status = None
headers = None
sent = None

def __init__(self, status, headers, clength):
self.status = status
self.headers = headers
self.sent = clength


class PyWSGIHandler(pywsgi.WSGIHandler):

def log_request(self):
start = datetime.fromtimestamp(self.time_start)
finish = datetime.fromtimestamp(self.time_finish)
response_time = finish - start
resp_headers = getattr(self, 'response_headers', {})
resp = GeventResponse(self.status, resp_headers, self.response_length)
if hasattr(self, 'headers'):
req_headers = [h.split(":", 1) for h in self.headers.headers]
else:
req_headers = []
self.server.log.access(resp, req_headers, self.environ, response_time)

def get_environ(self):
env = super(PyWSGIHandler, self).get_environ()
env['gunicorn.sock'] = self.socket
env['RAW_URI'] = self.path
return env


class PyWSGIServer(pywsgi.WSGIServer):
pass


class GeventPyWSGIWorker(GeventWorker):
"The Gevent StreamServer based workers."
server_class = PyWSGIServer
wsgi_handler = PyWSGIHandler

+ 371
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/workers/gthread.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

# design:
# a threaded worker accepts connections in the main loop, accepted
# connections are are added to the thread pool as a connection job. On
# keepalive connections are put back in the loop waiting for an event.
# If no event happen after the keep alive timeout, the connectoin is
# closed.

from collections import deque
from datetime import datetime
import errno
from functools import partial
import os
import socket
import ssl
import sys
from threading import RLock
import time

from .. import http
from ..http import wsgi
from .. import util
from . import base
from .. import six


try:
import concurrent.futures as futures
except ImportError:
raise RuntimeError("""
You need to install the 'futures' package to use this worker with this
Python version.
""")

try:
from asyncio import selectors
except ImportError:
from gunicorn import selectors


class TConn(object):

def __init__(self, cfg, sock, client, server):
self.cfg = cfg
self.sock = sock
self.client = client
self.server = server

self.timeout = None
self.parser = None

# set the socket to non blocking
self.sock.setblocking(False)

def init(self):
self.sock.setblocking(True)
if self.parser is None:
# wrap the socket if needed
if self.cfg.is_ssl:
self.sock = ssl.wrap_socket(self.sock, server_side=True,
**self.cfg.ssl_options)

# initialize the parser
self.parser = http.RequestParser(self.cfg, self.sock)

def set_timeout(self):
# set the timeout
self.timeout = time.time() + self.cfg.keepalive

def close(self):
util.close(self.sock)

def __lt__(self, other):
return self.timeout < other.timeout

__cmp__ = __lt__


class ThreadWorker(base.Worker):

def __init__(self, *args, **kwargs):
super(ThreadWorker, self).__init__(*args, **kwargs)
self.worker_connections = self.cfg.worker_connections
self.max_keepalived = self.cfg.worker_connections - self.cfg.threads
# initialise the pool
self.tpool = None
self.poller = None
self._lock = None
self.futures = deque()
self._keep = deque()
self.nr_conns = 0

@classmethod
def check_config(cls, cfg, log):
max_keepalived = cfg.worker_connections - cfg.threads

if max_keepalived <= 0 and cfg.keepalive:
log.warning("No keepalived connections can be handled. " +
"Check the number of worker connections and threads.")

def init_process(self):
self.tpool = futures.ThreadPoolExecutor(max_workers=self.cfg.threads)
self.poller = selectors.DefaultSelector()
self._lock = RLock()
super(ThreadWorker, self).init_process()

def handle_quit(self, sig, frame):
self.alive = False
# worker_int callback
self.cfg.worker_int(self)
self.tpool.shutdown(False)
time.sleep(0.1)
sys.exit(0)

def _wrap_future(self, fs, conn):
fs.conn = conn
self.futures.append(fs)
fs.add_done_callback(self.finish_request)

def enqueue_req(self, conn):
conn.init()
# submit the connection to a worker
fs = self.tpool.submit(self.handle, conn)
self._wrap_future(fs, conn)

def accept(self, server, listener):
try:
sock, client = listener.accept()
# initialize the connection object
conn = TConn(self.cfg, sock, client, server)
self.nr_conns += 1
# enqueue the job
self.enqueue_req(conn)
except EnvironmentError as e:
if e.errno not in (errno.EAGAIN,
errno.ECONNABORTED, errno.EWOULDBLOCK):
raise

def reuse_connection(self, conn, client):
with self._lock:
# unregister the client from the poller
self.poller.unregister(client)
# remove the connection from keepalive
try:
self._keep.remove(conn)
except ValueError:
# race condition
return

# submit the connection to a worker
self.enqueue_req(conn)

def murder_keepalived(self):
now = time.time()
while True:
with self._lock:
try:
# remove the connection from the queue
conn = self._keep.popleft()
except IndexError:
break

delta = conn.timeout - now
if delta > 0:
# add the connection back to the queue
with self._lock:
self._keep.appendleft(conn)
break
else:
self.nr_conns -= 1
# remove the socket from the poller
with self._lock:
try:
self.poller.unregister(conn.sock)
except EnvironmentError as e:
if e.errno != errno.EBADF:
raise
except KeyError:
# already removed by the system, continue
pass

# close the socket
conn.close()

def is_parent_alive(self):
# If our parent changed then we shut down.
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s", self)
return False
return True

def run(self):
# init listeners, add them to the event loop
for sock in self.sockets:
sock.setblocking(False)
# a race condition during graceful shutdown may make the listener
# name unavailable in the request handler so capture it once here
server = sock.getsockname()
acceptor = partial(self.accept, server)
self.poller.register(sock, selectors.EVENT_READ, acceptor)

while self.alive:
# notify the arbiter we are alive
self.notify()

# can we accept more connections?
if self.nr_conns < self.worker_connections:
# wait for an event
events = self.poller.select(1.0)
for key, mask in events:
callback = key.data
callback(key.fileobj)

# check (but do not wait) for finished requests
result = futures.wait(self.futures, timeout=0,
return_when=futures.FIRST_COMPLETED)
else:
# wait for a request to finish
result = futures.wait(self.futures, timeout=1.0,
return_when=futures.FIRST_COMPLETED)

# clean up finished requests
for fut in result.done:
self.futures.remove(fut)

if not self.is_parent_alive():
break

# hanle keepalive timeouts
self.murder_keepalived()

self.tpool.shutdown(False)
self.poller.close()

for s in self.sockets:
s.close()

futures.wait(self.futures, timeout=self.cfg.graceful_timeout)

def finish_request(self, fs):
if fs.cancelled():
fs.conn.close()
return

try:
(keepalive, conn) = fs.result()
# if the connection should be kept alived add it
# to the eventloop and record it
if keepalive:
# flag the socket as non blocked
conn.sock.setblocking(False)

# register the connection
conn.set_timeout()
with self._lock:
self._keep.append(conn)

# add the socket to the event loop
self.poller.register(conn.sock, selectors.EVENT_READ,
partial(self.reuse_connection, conn))
else:
self.nr_conns -= 1
conn.close()
except:
# an exception happened, make sure to close the
# socket.
self.nr_conns -= 1
fs.conn.close()

def handle(self, conn):
keepalive = False
req = None
try:
req = six.next(conn.parser)
if not req:
return (False, conn)

# handle the request
keepalive = self.handle_request(req, conn)
if keepalive:
return (keepalive, conn)
except http.errors.NoMoreData as e:
self.log.debug("Ignored premature client disconnection. %s", e)

except StopIteration as e:
self.log.debug("Closing connection. %s", e)
except ssl.SSLError as e:
if e.args[0] == ssl.SSL_ERROR_EOF:
self.log.debug("ssl connection closed")
conn.sock.close()
else:
self.log.debug("Error processing SSL request.")
self.handle_error(req, conn.sock, conn.client, e)

except EnvironmentError as e:
if e.errno not in (errno.EPIPE, errno.ECONNRESET):
self.log.exception("Socket error processing request.")
else:
if e.errno == errno.ECONNRESET:
self.log.debug("Ignoring connection reset")
else:
self.log.debug("Ignoring connection epipe")
except Exception as e:
self.handle_error(req, conn.sock, conn.client, e)

return (False, conn)

def handle_request(self, req, conn):
environ = {}
resp = None
try:
self.cfg.pre_request(self, req)
request_start = datetime.now()
resp, environ = wsgi.create(req, conn.sock, conn.client,
conn.server, self.cfg)
environ["wsgi.multithread"] = True
self.nr += 1
if self.alive and self.nr >= self.max_requests:
self.log.info("Autorestarting worker after current request.")
resp.force_close()
self.alive = False

if not self.cfg.keepalive:
resp.force_close()
elif len(self._keep) >= self.max_keepalived:
resp.force_close()

respiter = self.wsgi(environ, resp.start_response)
try:
if isinstance(respiter, environ['wsgi.file_wrapper']):
resp.write_file(respiter)
else:
for item in respiter:
resp.write(item)

resp.close()
request_time = datetime.now() - request_start
self.log.access(resp, req, environ, request_time)
finally:
if hasattr(respiter, "close"):
respiter.close()

if resp.should_close():
self.log.debug("Closing connection.")
return False
except EnvironmentError:
# pass to next try-except level
six.reraise(*sys.exc_info())
except Exception:
if resp and resp.headers_sent:
# If the requests have already been sent, we should close the
# connection to indicate the error.
self.log.exception("Error handling request")
try:
conn.sock.shutdown(socket.SHUT_RDWR)
conn.sock.close()
except EnvironmentError:
pass
raise StopIteration()
raise
finally:
try:
self.cfg.post_request(self, req, environ, resp)
except Exception:
self.log.exception("Exception in post_request hook")

return True

+ 130
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/workers/gtornado.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import copy
import os
import sys

try:
import tornado.web
except ImportError:
raise RuntimeError("You need tornado installed to use this worker.")
import tornado.httpserver
from tornado.ioloop import IOLoop, PeriodicCallback
from tornado.wsgi import WSGIContainer
from gunicorn.workers.base import Worker
from gunicorn import __version__ as gversion


class TornadoWorker(Worker):

@classmethod
def setup(cls):
web = sys.modules.pop("tornado.web")
old_clear = web.RequestHandler.clear

def clear(self):
old_clear(self)
if not "Gunicorn" in self._headers["Server"]:
self._headers["Server"] += " (Gunicorn/%s)" % gversion
web.RequestHandler.clear = clear
sys.modules["tornado.web"] = web

def handle_exit(self, sig, frame):
if self.alive:
super(TornadoWorker, self).handle_exit(sig, frame)

def handle_request(self):
self.nr += 1
if self.alive and self.nr >= self.max_requests:
self.log.info("Autorestarting worker after current request.")
self.alive = False

def watchdog(self):
if self.alive:
self.notify()

if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s", self)
self.alive = False

def heartbeat(self):
if not self.alive:
if self.server_alive:
if hasattr(self, 'server'):
try:
self.server.stop()
except Exception:
pass
self.server_alive = False
else:
if not self.ioloop._callbacks:
self.ioloop.stop()

def run(self):
self.ioloop = IOLoop.instance()
self.alive = True
self.server_alive = False
PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()
PeriodicCallback(self.heartbeat, 1000, io_loop=self.ioloop).start()

# Assume the app is a WSGI callable if its not an
# instance of tornado.web.Application or is an
# instance of tornado.wsgi.WSGIApplication
app = self.wsgi
if not isinstance(app, tornado.web.Application) or \
isinstance(app, tornado.wsgi.WSGIApplication):
app = WSGIContainer(app)

# Monkey-patching HTTPConnection.finish to count the
# number of requests being handled by Tornado. This
# will help gunicorn shutdown the worker if max_requests
# is exceeded.
httpserver = sys.modules["tornado.httpserver"]
if hasattr(httpserver, 'HTTPConnection'):
old_connection_finish = httpserver.HTTPConnection.finish

def finish(other):
self.handle_request()
old_connection_finish(other)
httpserver.HTTPConnection.finish = finish
sys.modules["tornado.httpserver"] = httpserver

server_class = tornado.httpserver.HTTPServer
else:

class _HTTPServer(tornado.httpserver.HTTPServer):

def on_close(instance, server_conn):
self.handle_request()
super(_HTTPServer, instance).on_close(server_conn)

server_class = _HTTPServer

if self.cfg.is_ssl:
_ssl_opt = copy.deepcopy(self.cfg.ssl_options)
# tornado refuses initialization if ssl_options contains following
# options
del _ssl_opt["do_handshake_on_connect"]
del _ssl_opt["suppress_ragged_eofs"]
server = server_class(app, io_loop=self.ioloop,
ssl_options=_ssl_opt)
else:
server = server_class(app, io_loop=self.ioloop)

self.server = server
self.server_alive = True

for s in self.sockets:
s.setblocking(0)
if hasattr(server, "add_socket"): # tornado > 2.0
server.add_socket(s)
elif hasattr(server, "_sockets"): # tornado 2.0
server._sockets[s.fileno()] = s

server.no_keep_alive = self.cfg.keepalive <= 0
server.start(num_processes=1)

self.ioloop.start()

+ 208
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/workers/sync.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
#

from datetime import datetime
import errno
import os
import select
import socket
import ssl
import sys

import gunicorn.http as http
import gunicorn.http.wsgi as wsgi
import gunicorn.util as util
import gunicorn.workers.base as base
from gunicorn import six

class StopWaiting(Exception):
""" exception raised to stop waiting for a connnection """

class SyncWorker(base.Worker):

def accept(self, listener):
client, addr = listener.accept()
client.setblocking(1)
util.close_on_exec(client)
self.handle(listener, client, addr)

def wait(self, timeout):
try:
self.notify()
ret = select.select(self.wait_fds, [], [], timeout)
if ret[0]:
if self.PIPE[0] in ret[0]:
os.read(self.PIPE[0], 1)
return ret[0]

except select.error as e:
if e.args[0] == errno.EINTR:
return self.sockets
if e.args[0] == errno.EBADF:
if self.nr < 0:
return self.sockets
else:
raise StopWaiting
raise

def is_parent_alive(self):
# If our parent changed then we shut down.
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s", self)
return False
return True

def run_for_one(self, timeout):
listener = self.sockets[0]
while self.alive:
self.notify()

# Accept a connection. If we get an error telling us
# that no connection is waiting we fall down to the
# select which is where we'll wait for a bit for new
# workers to come give us some love.
try:
self.accept(listener)
# Keep processing clients until no one is waiting. This
# prevents the need to select() for every client that we
# process.
continue

except EnvironmentError as e:
if e.errno not in (errno.EAGAIN, errno.ECONNABORTED,
errno.EWOULDBLOCK):
raise

if not self.is_parent_alive():
return

try:
self.wait(timeout)
except StopWaiting:
return

def run_for_multiple(self, timeout):
while self.alive:
self.notify()

try:
ready = self.wait(timeout)
except StopWaiting:
return

if ready is not None:
for listener in ready:
if listener == self.PIPE[0]:
continue

try:
self.accept(listener)
except EnvironmentError as e:
if e.errno not in (errno.EAGAIN, errno.ECONNABORTED,
errno.EWOULDBLOCK):
raise

if not self.is_parent_alive():
return

def run(self):
# if no timeout is given the worker will never wait and will
# use the CPU for nothing. This minimal timeout prevent it.
timeout = self.timeout or 0.5

# self.socket appears to lose its blocking status after
# we fork in the arbiter. Reset it here.
for s in self.sockets:
s.setblocking(0)

if len(self.sockets) > 1:
self.run_for_multiple(timeout)
else:
self.run_for_one(timeout)

def handle(self, listener, client, addr):
req = None
try:
if self.cfg.is_ssl:
client = ssl.wrap_socket(client, server_side=True,
**self.cfg.ssl_options)

parser = http.RequestParser(self.cfg, client)
req = six.next(parser)
self.handle_request(listener, req, client, addr)
except http.errors.NoMoreData as e:
self.log.debug("Ignored premature client disconnection. %s", e)
except StopIteration as e:
self.log.debug("Closing connection. %s", e)
except ssl.SSLError as e:
if e.args[0] == ssl.SSL_ERROR_EOF:
self.log.debug("ssl connection closed")
client.close()
else:
self.log.debug("Error processing SSL request.")
self.handle_error(req, client, addr, e)
except EnvironmentError as e:
if e.errno not in (errno.EPIPE, errno.ECONNRESET):
self.log.exception("Socket error processing request.")
else:
if e.errno == errno.ECONNRESET:
self.log.debug("Ignoring connection reset")
else:
self.log.debug("Ignoring EPIPE")
except Exception as e:
self.handle_error(req, client, addr, e)
finally:
util.close(client)

def handle_request(self, listener, req, client, addr):
environ = {}
resp = None
try:
self.cfg.pre_request(self, req)
request_start = datetime.now()
resp, environ = wsgi.create(req, client, addr,
listener.getsockname(), self.cfg)
# Force the connection closed until someone shows
# a buffering proxy that supports Keep-Alive to
# the backend.
resp.force_close()
self.nr += 1
if self.nr >= self.max_requests:
self.log.info("Autorestarting worker after current request.")
self.alive = False
respiter = self.wsgi(environ, resp.start_response)
try:
if isinstance(respiter, environ['wsgi.file_wrapper']):
resp.write_file(respiter)
else:
for item in respiter:
resp.write(item)
resp.close()
request_time = datetime.now() - request_start
self.log.access(resp, req, environ, request_time)
finally:
if hasattr(respiter, "close"):
respiter.close()
except EnvironmentError:
# pass to next try-except level
six.reraise(*sys.exc_info())
except Exception:
if resp and resp.headers_sent:
# If the requests have already been sent, we should close the
# connection to indicate the error.
self.log.exception("Error handling request")
try:
client.shutdown(socket.SHUT_RDWR)
client.close()
except EnvironmentError:
pass
raise StopIteration()
raise
finally:
try:
self.cfg.post_request(self, req, environ, resp)
except Exception:
self.log.exception("Exception in post_request hook")

+ 56
- 0
thesisenv/lib/python3.6/site-packages/gunicorn/workers/workertmp.py View File

# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.

import os
import platform
import tempfile

from gunicorn import util

PLATFORM = platform.system()
IS_CYGWIN = PLATFORM.startswith('CYGWIN')


class WorkerTmp(object):

def __init__(self, cfg):
old_umask = os.umask(cfg.umask)
fdir = cfg.worker_tmp_dir
if fdir and not os.path.isdir(fdir):
raise RuntimeError("%s doesn't exist. Can't create workertmp." % fdir)
fd, name = tempfile.mkstemp(prefix="wgunicorn-", dir=fdir)

# allows the process to write to the file
util.chown(name, cfg.uid, cfg.gid)
os.umask(old_umask)

# unlink the file so we don't leak tempory files
try:
if not IS_CYGWIN:
util.unlink(name)
self._tmp = os.fdopen(fd, 'w+b', 1)
except:
os.close(fd)
raise

self.spinner = 0

def notify(self):
try:
self.spinner = (self.spinner + 1) % 2
os.fchmod(self._tmp.fileno(), self.spinner)
except AttributeError:
# python < 2.6
self._tmp.truncate(0)
os.write(self._tmp.fileno(), b"X")

def last_update(self):
return os.fstat(self._tmp.fileno()).st_ctime

def fileno(self):
return self._tmp.fileno()

def close(self):
return self._tmp.close()

+ 0
- 8
thesisenv/lib/python3.6/site-packages/hitcount/models.py View File

period = timezone.now() - timedelta(**kwargs) period = timezone.now() - timedelta(**kwargs)
return self.hit_set.filter(created__gte=period).count() return self.hit_set.filter(created__gte=period).count()


# def get_content_object_url(self):
# """
# Django has this in its contrib.comments.model file -- seems worth
# implementing though it may take a couple steps.
#
# """
# pass



@python_2_unicode_compatible @python_2_unicode_compatible
class Hit(models.Model): class Hit(models.Model):

+ 72
- 37
thesisenv/lib/python3.6/site-packages/pkg_resources/__init__.py View File

# Python 3.2 compatibility # Python 3.2 compatibility
import imp as _imp import imp as _imp


try:
FileExistsError
except NameError:
FileExistsError = OSError

from pkg_resources.extern import six from pkg_resources.extern import six
from pkg_resources.extern.six.moves import urllib, map, filter from pkg_resources.extern.six.moves import urllib, map, filter


__import__('pkg_resources.extern.packaging.markers') __import__('pkg_resources.extern.packaging.markers')




if (3, 0) < sys.version_info < (3, 3):
raise RuntimeError("Python 3.3 or later is required")
__metaclass__ = type


if (3, 0) < sys.version_info < (3, 4):
raise RuntimeError("Python 3.4 or later is required")


if six.PY2: if six.PY2:
# Those builtin exceptions are only defined in Python 3 # Those builtin exceptions are only defined in Python 3
XXX Currently this is the same as ``distutils.util.get_platform()``, but it XXX Currently this is the same as ``distutils.util.get_platform()``, but it
needs some hacks for Linux and Mac OS X. needs some hacks for Linux and Mac OS X.
""" """
try:
# Python 2.7 or >=3.2
from sysconfig import get_platform
except ImportError:
from distutils.util import get_platform
from sysconfig import get_platform


plat = get_platform() plat = get_platform()
if sys.platform == "darwin" and not plat.startswith('macosx-'): if sys.platform == "darwin" and not plat.startswith('macosx-'):
"""List of resource names in the directory (like ``os.listdir()``)""" """List of resource names in the directory (like ``os.listdir()``)"""




class WorkingSet(object):
class WorkingSet:
"""A collection of active distributions on sys.path (or a similar list)""" """A collection of active distributions on sys.path (or a similar list)"""


def __init__(self, entries=None): def __init__(self, entries=None):
distributions in the working set, otherwise only ones matching distributions in the working set, otherwise only ones matching
both `group` and `name` are yielded (in distribution order). both `group` and `name` are yielded (in distribution order).
""" """
for dist in self:
entries = dist.get_entry_map(group)
if name is None:
for ep in entries.values():
yield ep
elif name in entries:
yield entries[name]
return (
entry
for dist in self
for entry in dist.get_entry_map(group).values()
if name is None or name == entry.name
)


def run_script(self, requires, script_name): def run_script(self, requires, script_name):
"""Locate distribution for `requires` and run `script_name` script""" """Locate distribution for `requires` and run `script_name` script"""
return not req.marker or any(extra_evals) return not req.marker or any(extra_evals)




class Environment(object):
class Environment:
"""Searchable snapshot of distributions on a search path""" """Searchable snapshot of distributions on a search path"""


def __init__( def __init__(
`platform` is an optional string specifying the name of the platform `platform` is an optional string specifying the name of the platform
that platform-specific distributions must be compatible with. If that platform-specific distributions must be compatible with. If
unspecified, it defaults to the current platform. `python` is an unspecified, it defaults to the current platform. `python` is an
optional string naming the desired version of Python (e.g. ``'3.3'``);
optional string naming the desired version of Python (e.g. ``'3.6'``);
it defaults to the current version. it defaults to the current version.


You may explicitly set `platform` (and/or `python`) to ``None`` if you You may explicitly set `platform` (and/or `python`) to ``None`` if you


@classmethod @classmethod
def _register(cls): def _register(cls):
loader_cls = getattr(
importlib_machinery,
'SourceFileLoader',
type(None),
)
register_loader_type(loader_cls, cls)
loader_names = 'SourceFileLoader', 'SourcelessFileLoader',
for name in loader_names:
loader_cls = getattr(importlib_machinery, name, type(None))
register_loader_type(loader_cls, cls)




DefaultProvider._register() DefaultProvider._register()
importer = get_importer(path_item) importer = get_importer(path_item)
if importer is None: if importer is None:
return None return None
loader = importer.find_module(packageName)

# capture warnings due to #1111
with warnings.catch_warnings():
warnings.simplefilter("ignore")
loader = importer.find_module(packageName)

if loader is None: if loader is None:
return None return None
module = sys.modules.get(packageName) module = sys.modules.get(packageName)
parts = path_parts[:-module_parts] parts = path_parts[:-module_parts]
return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))


if not isinstance(orig_path, list):
# Is this behavior useful when module.__path__ is not a list?
return
new_path = sorted(orig_path, key=position_in_sys_path)
new_path = [_normalize_cached(p) for p in new_path]


orig_path.sort(key=position_in_sys_path)
module.__path__[:] = [_normalize_cached(p) for p in orig_path]
if isinstance(module.__path__, list):
module.__path__[:] = new_path
else:
module.__path__ = new_path




def declare_namespace(packageName): def declare_namespace(packageName):
if packageName in _namespace_packages: if packageName in _namespace_packages:
return return


path, parent = sys.path, None
if '.' in packageName:
parent = '.'.join(packageName.split('.')[:-1])
path = sys.path
parent, _, _ = packageName.rpartition('.')

if parent:
declare_namespace(parent) declare_namespace(parent)
if parent not in _namespace_packages: if parent not in _namespace_packages:
__import__(parent) __import__(parent)


# Track what packages are namespaces, so when new path items are added, # Track what packages are namespaces, so when new path items are added,
# they can be updated # they can be updated
_namespace_packages.setdefault(parent, []).append(packageName)
_namespace_packages.setdefault(parent or None, []).append(packageName)
_namespace_packages.setdefault(packageName, []) _namespace_packages.setdefault(packageName, [])


for path_item in path: for path_item in path:


def normalize_path(filename): def normalize_path(filename):
"""Normalize a file/dir name for comparison purposes""" """Normalize a file/dir name for comparison purposes"""
return os.path.normcase(os.path.realpath(filename))
return os.path.normcase(os.path.realpath(_cygwin_patch(filename)))


def _cygwin_patch(filename): # pragma: nocover
"""
Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
symlink components. Using
os.path.abspath() works around this limitation. A fix in os.getcwd()
would probably better, in Cygwin even more so, except
that this seems to be by design...
"""
return os.path.abspath(filename) if sys.platform == 'cygwin' else filename




def _normalize_cached(filename, _cache={}): def _normalize_cached(filename, _cache={}):
).match ).match




class EntryPoint(object):
class EntryPoint:
"""Object representing an advertised importable object""" """Object representing an advertised importable object"""


def __init__(self, name, module_name, attrs=(), extras=(), dist=None): def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
return safe_version(value.strip()) or None return safe_version(value.strip()) or None




class Distribution(object):
class Distribution:
"""Wrap an actual or potential sys.path entry w/metadata""" """Wrap an actual or potential sys.path entry w/metadata"""
PKG_INFO = 'PKG-INFO' PKG_INFO = 'PKG-INFO'


raise AttributeError(attr) raise AttributeError(attr)
return getattr(self._provider, attr) return getattr(self._provider, attr)


def __dir__(self):
return list(
set(super(Distribution, self).__dir__())
| set(
attr for attr in self._provider.__dir__()
if not attr.startswith('_')
)
)

if not hasattr(object, '__dir__'):
# python 2.7 not supported
del __dir__

@classmethod @classmethod
def from_filename(cls, filename, metadata=None, **kw): def from_filename(cls, filename, metadata=None, **kw):
return cls.from_location( return cls.from_location(
dirname, filename = split(path) dirname, filename = split(path)
if dirname and filename and not isdir(dirname): if dirname and filename and not isdir(dirname):
_bypass_ensure_directory(dirname) _bypass_ensure_directory(dirname)
mkdir(dirname, 0o755)
try:
mkdir(dirname, 0o755)
except FileExistsError:
pass




def split_sections(s): def split_sections(s):

+ 71
- 15
thesisenv/lib/python3.6/site-packages/pkg_resources/_vendor/appdirs.py View File

# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html # - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html # - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html


__version_info__ = (1, 4, 0)
__version_info__ = (1, 4, 3)
__version__ = '.'.join(map(str, __version_info__)) __version__ = '.'.join(map(str, __version_info__))








def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
"""Return full path to the user-shared data dir for this application.
r"""Return full path to the user-shared data dir for this application.


"appname" is the name of application. "appname" is the name of application.
If None, just the system directory is returned. If None, just the system directory is returned.
returned, or '/usr/local/share/<AppName>', returned, or '/usr/local/share/<AppName>',
if XDG_DATA_DIRS is not set if XDG_DATA_DIRS is not set


Typical user data directories are:
Typical site data directories are:
Mac OS X: /Library/Application Support/<AppName> Mac OS X: /Library/Application Support/<AppName>
Unix: /usr/local/share/<AppName> or /usr/share/<AppName> Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName> Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues. for a discussion of issues.


Typical user data directories are:
Typical user config directories are:
Mac OS X: same as user_data_dir Mac OS X: same as user_data_dir
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
Win *: same as user_data_dir Win *: same as user_data_dir


For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
That means, by deafult "~/.config/<AppName>".
That means, by default "~/.config/<AppName>".
""" """
if system in ["win32", "darwin"]: if system in ["win32", "darwin"]:
path = user_data_dir(appname, appauthor, None, roaming) path = user_data_dir(appname, appauthor, None, roaming)




def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
"""Return full path to the user-shared data dir for this application.
r"""Return full path to the user-shared data dir for this application.


"appname" is the name of application. "appname" is the name of application.
If None, just the system directory is returned. If None, just the system directory is returned.
returned. By default, the first item from XDG_CONFIG_DIRS is returned. By default, the first item from XDG_CONFIG_DIRS is
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set


Typical user data directories are:
Typical site config directories are:
Mac OS X: same as site_data_dir Mac OS X: same as site_data_dir
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
$XDG_CONFIG_DIRS $XDG_CONFIG_DIRS
return path return path




def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
r"""Return full path to the user-specific state dir for this application.

"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.

Typical user state directories are:
Mac OS X: same as user_data_dir
Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
Win *: same as user_data_dir

For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
to extend the XDG spec and support $XDG_STATE_HOME.

That means, by default "~/.local/state/<AppName>".
"""
if system in ["win32", "darwin"]:
path = user_data_dir(appname, appauthor, None, roaming)
else:
path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path


def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
r"""Return full path to the user-specific log dir for this application. r"""Return full path to the user-specific log dir for this application.


"Logs" to the base app data dir for Windows, and "log" to the "Logs" to the base app data dir for Windows, and "log" to the
base cache dir for Unix. See discussion below. base cache dir for Unix. See discussion below.


Typical user cache directories are:
Typical user log directories are:
Mac OS X: ~/Library/Logs/<AppName> Mac OS X: ~/Library/Logs/<AppName>
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs


class AppDirs(object): class AppDirs(object):
"""Convenience wrapper for getting application dirs.""" """Convenience wrapper for getting application dirs."""
def __init__(self, appname, appauthor=None, version=None, roaming=False,
multipath=False):
def __init__(self, appname=None, appauthor=None, version=None,
roaming=False, multipath=False):
self.appname = appname self.appname = appname
self.appauthor = appauthor self.appauthor = appauthor
self.version = version self.version = version
return user_cache_dir(self.appname, self.appauthor, return user_cache_dir(self.appname, self.appauthor,
version=self.version) version=self.version)


@property
def user_state_dir(self):
return user_state_dir(self.appname, self.appauthor,
version=self.version)

@property @property
def user_log_dir(self): def user_log_dir(self):
return user_log_dir(self.appname, self.appauthor, return user_log_dir(self.appname, self.appauthor,
registry for this guarantees us the correct answer for all CSIDL_* registry for this guarantees us the correct answer for all CSIDL_*
names. names.
""" """
import _winreg
if PY3:
import winreg as _winreg
else:
import _winreg


shell_folder_name = { shell_folder_name = {
"CSIDL_APPDATA": "AppData", "CSIDL_APPDATA": "AppData",
if has_high_char: if has_high_char:
buf = array.zeros('c', buf_size) buf = array.zeros('c', buf_size)
kernel = win32.Kernel32.INSTANCE kernel = win32.Kernel32.INSTANCE
if kernal.GetShortPathName(dir, buf, buf_size):
if kernel.GetShortPathName(dir, buf, buf_size):
dir = jna.Native.toString(buf.tostring()).rstrip("\0") dir = jna.Native.toString(buf.tostring()).rstrip("\0")


return dir return dir
appname = "MyApp" appname = "MyApp"
appauthor = "MyCompany" appauthor = "MyCompany"


props = ("user_data_dir", "site_data_dir",
"user_config_dir", "site_config_dir",
"user_cache_dir", "user_log_dir")
props = ("user_data_dir",
"user_config_dir",
"user_cache_dir",
"user_state_dir",
"user_log_dir",
"site_data_dir",
"site_config_dir")

print("-- app dirs %s --" % __version__)


print("-- app dirs (with optional 'version')") print("-- app dirs (with optional 'version')")
dirs = AppDirs(appname, appauthor, version="1.0") dirs = AppDirs(appname, appauthor, version="1.0")

+ 71
- 25
thesisenv/lib/python3.6/site-packages/pkg_resources/_vendor/pyparsing.py View File

# module pyparsing.py # module pyparsing.py
# #
# Copyright (c) 2003-2016 Paul T. McGuire
# Copyright (c) 2003-2018 Paul T. McGuire
# #
# Permission is hereby granted, free of charge, to any person obtaining # Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the # a copy of this software and associated documentation files (the
__doc__ = \ __doc__ = \
""" """
pyparsing module - Classes and methods to define and execute parsing grammars pyparsing module - Classes and methods to define and execute parsing grammars
=============================================================================
The pyparsing module is an alternative approach to creating and executing simple grammars, The pyparsing module is an alternative approach to creating and executing simple grammars,
vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you
- extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.)
- quoted strings - quoted strings
- embedded comments - embedded comments
Getting Started -
-----------------
Visit the classes L{ParserElement} and L{ParseResults} to see the base classes that most other pyparsing
classes inherit from. Use the docstrings for examples of how to:
- construct literal match expressions from L{Literal} and L{CaselessLiteral} classes
- construct character word-group expressions using the L{Word} class
- see how to create repetitive expressions using L{ZeroOrMore} and L{OneOrMore} classes
- use L{'+'<And>}, L{'|'<MatchFirst>}, L{'^'<Or>}, and L{'&'<Each>} operators to combine simple expressions into more complex ones
- associate names with your parsed results using L{ParserElement.setResultsName}
- find some helpful expression short-cuts like L{delimitedList} and L{oneOf}
- find more useful common expressions in the L{pyparsing_common} namespace class
""" """
__version__ = "2.1.10"
__versionTime__ = "07 Oct 2016 01:31 UTC"
__version__ = "2.2.1"
__versionTime__ = "18 Sep 2018 00:49 UTC"
__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>" __author__ = "Paul McGuire <ptmcg@users.sourceforge.net>"
import string import string
except ImportError: except ImportError:
from threading import RLock from threading import RLock
try:
# Python 3
from collections.abc import Iterable
from collections.abc import MutableMapping
except ImportError:
# Python 2.7
from collections import Iterable
from collections import MutableMapping
try: try:
from collections import OrderedDict as _OrderedDict from collections import OrderedDict as _OrderedDict
except ImportError: except ImportError:
except UnicodeEncodeError: except UnicodeEncodeError:
# Else encode it # Else encode it
ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace') ret = unicode(obj).encode(sys.getdefaultencoding(), 'xmlcharrefreplace')
xmlcharref = Regex('&#\d+;')
xmlcharref = Regex(r'&#\d+;')
xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:]) xmlcharref.setParseAction(lambda t: '\\u' + hex(int(t[0][2:-1]))[2:])
return xmlcharref.transformString(ret) return xmlcharref.transformString(ret)
return None return None
def getName(self): def getName(self):
"""
r"""
Returns the results name for this token expression. Useful when several Returns the results name for this token expression. Useful when several
different expressions might match at a particular location. different expressions might match at a particular location.
def __dir__(self): def __dir__(self):
return (dir(type(self)) + list(self.keys())) return (dir(type(self)) + list(self.keys()))
collections.MutableMapping.register(ParseResults)
MutableMapping.register(ParseResults)
def col (loc,strg): def col (loc,strg):
"""Returns current column within a string, counting newlines as line separators. """Returns current column within a string, counting newlines as line separators.
# special handling for Python 3.5.0 - extra deep call stack by 1 # special handling for Python 3.5.0 - extra deep call stack by 1
offset = -3 if system_version == (3,5,0) else -2 offset = -3 if system_version == (3,5,0) else -2
frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset] frame_summary = traceback.extract_stack(limit=-offset+limit-1)[offset]
return [(frame_summary.filename, frame_summary.lineno)]
return [frame_summary[:2]]
def extract_tb(tb, limit=0): def extract_tb(tb, limit=0):
frames = traceback.extract_tb(tb, limit=limit) frames = traceback.extract_tb(tb, limit=limit)
frame_summary = frames[-1] frame_summary = frames[-1]
return [(frame_summary.filename, frame_summary.lineno)]
return [frame_summary[:2]]
else: else:
extract_stack = traceback.extract_stack extract_stack = traceback.extract_stack
extract_tb = traceback.extract_tb extract_tb = traceback.extract_tb
def setParseAction( self, *fns, **kwargs ): def setParseAction( self, *fns, **kwargs ):
""" """
Define action to perform when successfully matching parse element definition.
Define one or more actions to perform when successfully matching parse element definition.
Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)},
C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where:
- s = the original string being parsed (see note below) - s = the original string being parsed (see note below)
def addParseAction( self, *fns, **kwargs ): def addParseAction( self, *fns, **kwargs ):
""" """
Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}.
Add one or more parse actions to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}.
See examples in L{I{copy}<copy>}. See examples in L{I{copy}<copy>}.
""" """
else: else:
preloc = loc preloc = loc
tokensStart = preloc tokensStart = preloc
if self.mayIndexError or loc >= len(instring):
if self.mayIndexError or preloc >= len(instring):
try: try:
loc,tokens = self.parseImpl( instring, preloc, doActions ) loc,tokens = self.parseImpl( instring, preloc, doActions )
except IndexError: except IndexError:
self.resultsName, self.resultsName,
asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), asList=self.saveAsList and isinstance(tokens,(ParseResults,list)),
modal=self.modalResults ) modal=self.modalResults )
if debugging: if debugging:
#~ print ("Matched",self,"->",retTokens.asList()) #~ print ("Matched",self,"->",retTokens.asList())
if (self.debugActions[1] ): if (self.debugActions[1] ):
def clear(self): def clear(self):
cache.clear() cache.clear()
def cache_len(self):
return len(cache)
self.get = types.MethodType(get, self) self.get = types.MethodType(get, self)
self.set = types.MethodType(set, self) self.set = types.MethodType(set, self)
self.clear = types.MethodType(clear, self) self.clear = types.MethodType(clear, self)
self.__len__ = types.MethodType(cache_len, self)
if _OrderedDict is not None: if _OrderedDict is not None:
class _FifoCache(object): class _FifoCache(object):
def set(self, key, value): def set(self, key, value):
cache[key] = value cache[key] = value
if len(cache) > size:
cache.popitem(False)
while len(cache) > size:
try:
cache.popitem(False)
except KeyError:
pass
def clear(self): def clear(self):
cache.clear() cache.clear()
def cache_len(self):
return len(cache)
self.get = types.MethodType(get, self) self.get = types.MethodType(get, self)
self.set = types.MethodType(set, self) self.set = types.MethodType(set, self)
self.clear = types.MethodType(clear, self) self.clear = types.MethodType(clear, self)
self.__len__ = types.MethodType(cache_len, self)
else: else:
class _FifoCache(object): class _FifoCache(object):
def set(self, key, value): def set(self, key, value):
cache[key] = value cache[key] = value
if len(cache) > size:
while len(key_fifo) > size:
cache.pop(key_fifo.popleft(), None) cache.pop(key_fifo.popleft(), None)
key_fifo.append(key) key_fifo.append(key)
cache.clear() cache.clear()
key_fifo.clear() key_fifo.clear()
def cache_len(self):
return len(cache)
self.get = types.MethodType(get, self) self.get = types.MethodType(get, self)
self.set = types.MethodType(set, self) self.set = types.MethodType(set, self)
self.clear = types.MethodType(clear, self) self.clear = types.MethodType(clear, self)
self.__len__ = types.MethodType(cache_len, self)
# argument cache for optimizing repeated calls when backtracking through recursive expressions # argument cache for optimizing repeated calls when backtracking through recursive expressions
packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail packrat_cache = {} # this is set later by enabledPackrat(); this is here so that resetCache() doesn't fail
cap_word = Word(alphas.upper(), alphas.lower()) cap_word = Word(alphas.upper(), alphas.lower())
print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")) print(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity"))
# the sum() builtin can be used to merge results into a single ParseResults object
print(sum(cap_word.searchString("More than Iron, more than Lead, more than Gold I need Electricity")))
prints:: prints::
['More', 'Iron', 'Lead', 'Gold', 'I']
[['More'], ['Iron'], ['Lead'], ['Gold'], ['I'], ['Electricity']]
['More', 'Iron', 'Lead', 'Gold', 'I', 'Electricity']
""" """
try: try:
return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2) SyntaxWarning, stacklevel=2)
return None return None
return And( [ self, And._ErrorStop(), other ] )
return self + And._ErrorStop() + other
def __rsub__(self, other ): def __rsub__(self, other ):
""" """
class Regex(Token): class Regex(Token):
"""
r"""
Token for matching strings that match a given regular expression. Token for matching strings that match a given regular expression.
Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module.
If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as If the given regex contains named groups (defined using C{(?P<name>...)}), these will be preserved as
# replace escaped characters # replace escaped characters
if self.escChar: if self.escChar:
ret = re.sub(self.escCharReplacePattern,"\g<1>",ret)
ret = re.sub(self.escCharReplacePattern, r"\g<1>", ret)
# replace escaped quotes # replace escaped quotes
if self.escQuote: if self.escQuote:
if isinstance( exprs, basestring ): if isinstance( exprs, basestring ):
self.exprs = [ ParserElement._literalStringClass( exprs ) ] self.exprs = [ ParserElement._literalStringClass( exprs ) ]
elif isinstance( exprs, collections.Iterable ):
elif isinstance( exprs, Iterable ):
exprs = list(exprs) exprs = list(exprs)
# if sequence of strings provided, wrap with Literal # if sequence of strings provided, wrap with Literal
if all(isinstance(expr, basestring) for expr in exprs): if all(isinstance(expr, basestring) for expr in exprs):
@traceParseAction @traceParseAction
def remove_duplicate_chars(tokens): def remove_duplicate_chars(tokens):
return ''.join(sorted(set(''.join(tokens)))
return ''.join(sorted(set(''.join(tokens))))
wds = OneOrMore(wd).setParseAction(remove_duplicate_chars) wds = OneOrMore(wd).setParseAction(remove_duplicate_chars)
print(wds.parseString("slkdjs sld sldd sdlf sdljf")) print(wds.parseString("slkdjs sld sldd sdlf sdljf"))
symbols = [] symbols = []
if isinstance(strs,basestring): if isinstance(strs,basestring):
symbols = strs.split() symbols = strs.split()
elif isinstance(strs, collections.Iterable):
elif isinstance(strs, Iterable):
symbols = list(strs) symbols = list(strs)
else: else:
warnings.warn("Invalid argument to oneOf, expected string or iterable", warnings.warn("Invalid argument to oneOf, expected string or iterable",
_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) _escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1])
_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) _escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16)))
_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) _escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8)))
_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE)
_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | CharsNotIn(r'\]', exact=1)
_charRange = Group(_singleChar + Suppress("-") + _singleChar) _charRange = Group(_singleChar + Suppress("-") + _singleChar)
_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" _reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]"
constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}.
- parseAction is the parse action to be associated with - parseAction is the parse action to be associated with
expressions matching this operator expression (the expressions matching this operator expression (the
parse action tuple member may be omitted)
parse action tuple member may be omitted); if the parse action
is passed a tuple or list of functions, this is equivalent to
calling C{setParseAction(*fn)} (L{ParserElement.setParseAction})
- lpar - expression for matching left-parentheses (default=C{Suppress('(')}) - lpar - expression for matching left-parentheses (default=C{Suppress('(')})
- rpar - expression for matching right-parentheses (default=C{Suppress(')')}) - rpar - expression for matching right-parentheses (default=C{Suppress(')')})
else: else:
raise ValueError("operator must indicate right or left associativity") raise ValueError("operator must indicate right or left associativity")
if pa: if pa:
matchExpr.setParseAction( pa )
if isinstance(pa, (tuple, list)):
matchExpr.setParseAction(*pa)
else:
matchExpr.setParseAction(pa)
thisExpr <<= ( matchExpr.setName(termName) | lastExpr ) thisExpr <<= ( matchExpr.setName(termName) | lastExpr )
lastExpr = thisExpr lastExpr = thisExpr
ret <<= lastExpr ret <<= lastExpr

+ 1
- 1
thesisenv/lib/python3.6/site-packages/pkg_resources/extern/__init__.py View File

# on later Python versions to cause relative imports # on later Python versions to cause relative imports
# in the vendor package to resolve the same modules # in the vendor package to resolve the same modules
# as those going through this importer. # as those going through this importer.
if sys.version_info > (3, 3):
if prefix and sys.version_info > (3, 3):
del sys.modules[extant] del sys.modules[extant]
return mod return mod
except ImportError: except ImportError:

+ 3
- 2
thesisenv/lib/python3.6/site-packages/pkg_resources/py31compat.py View File

import errno import errno
import sys import sys


from .extern import six



def _makedirs_31(path, exist_ok=False): def _makedirs_31(path, exist_ok=False):
try: try:
# and exists_ok considerations are disentangled. # and exists_ok considerations are disentangled.
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663 # See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
needs_makedirs = ( needs_makedirs = (
sys.version_info < (3, 2, 5) or
(3, 3) <= sys.version_info < (3, 3, 6) or
six.PY2 or
(3, 4) <= sys.version_info < (3, 4, 1) (3, 4) <= sys.version_info < (3, 4, 1)
) )
makedirs = _makedirs_31 if needs_makedirs else os.makedirs makedirs = _makedirs_31 if needs_makedirs else os.makedirs

+ 0
- 36
thesisenv/lib/python3.6/site-packages/setuptools-39.0.1.dist-info/DESCRIPTION.rst View File

.. image:: https://img.shields.io/pypi/v/setuptools.svg
:target: https://pypi.org/project/setuptools

.. image:: https://readthedocs.org/projects/setuptools/badge/?version=latest
:target: https://setuptools.readthedocs.io

.. image:: https://img.shields.io/travis/pypa/setuptools/master.svg?label=Linux%20build%20%40%20Travis%20CI
:target: https://travis-ci.org/pypa/setuptools

.. image:: https://img.shields.io/appveyor/ci/jaraco/setuptools/master.svg?label=Windows%20build%20%40%20Appveyor
:target: https://ci.appveyor.com/project/jaraco/setuptools/branch/master

.. image:: https://img.shields.io/pypi/pyversions/setuptools.svg

See the `Installation Instructions
<https://packaging.python.org/installing/>`_ in the Python Packaging
User's Guide for instructions on installing, upgrading, and uninstalling
Setuptools.

The project is `maintained at GitHub <https://github.com/pypa/setuptools>`_.

Questions and comments should be directed to the `distutils-sig
mailing list <http://mail.python.org/pipermail/distutils-sig/>`_.
Bug reports and especially tested patches may be
submitted directly to the `bug tracker
<https://github.com/pypa/setuptools/issues>`_.


Code of Conduct
---------------

Everyone interacting in the setuptools project's codebases, issue trackers,
chat rooms, and mailing lists is expected to follow the
`PyPA Code of Conduct <https://www.pypa.io/en/latest/code-of-conduct/>`_.



+ 0
- 1
thesisenv/lib/python3.6/site-packages/setuptools-39.0.1.dist-info/metadata.json View File

{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Archiving :: Packaging", "Topic :: System :: Systems Administration", "Topic :: Utilities"], "description_content_type": "text/x-rst; charset=UTF-8", "extensions": {"python.commands": {"wrap_console": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.6": "setuptools.command.easy_install:main"}}, "python.details": {"contacts": [{"email": "distutils-sig@python.org", "name": "Python Packaging Authority", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://github.com/pypa/setuptools"}}, "python.exports": {"console_scripts": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.6": "setuptools.command.easy_install:main"}, "distutils.commands": {"alias": "setuptools.command.alias:alias", "bdist_egg": "setuptools.command.bdist_egg:bdist_egg", "bdist_rpm": "setuptools.command.bdist_rpm:bdist_rpm", "bdist_wininst": "setuptools.command.bdist_wininst:bdist_wininst", "build_clib": "setuptools.command.build_clib:build_clib", "build_ext": "setuptools.command.build_ext:build_ext", "build_py": "setuptools.command.build_py:build_py", "develop": "setuptools.command.develop:develop", "dist_info": "setuptools.command.dist_info:dist_info", "easy_install": "setuptools.command.easy_install:easy_install", "egg_info": "setuptools.command.egg_info:egg_info", "install": "setuptools.command.install:install", "install_egg_info": "setuptools.command.install_egg_info:install_egg_info", "install_lib": "setuptools.command.install_lib:install_lib", "install_scripts": "setuptools.command.install_scripts:install_scripts", "register": "setuptools.command.register:register", "rotate": "setuptools.command.rotate:rotate", "saveopts": "setuptools.command.saveopts:saveopts", "sdist": "setuptools.command.sdist:sdist", "setopt": "setuptools.command.setopt:setopt", "test": "setuptools.command.test:test", "upload": "setuptools.command.upload:upload", "upload_docs": "setuptools.command.upload_docs:upload_docs"}, "distutils.setup_keywords": {"convert_2to3_doctests": "setuptools.dist:assert_string_list", "dependency_links": "setuptools.dist:assert_string_list", "eager_resources": "setuptools.dist:assert_string_list", "entry_points": "setuptools.dist:check_entry_points", "exclude_package_data": "setuptools.dist:check_package_data", "extras_require": "setuptools.dist:check_extras", "include_package_data": "setuptools.dist:assert_bool", "install_requires": "setuptools.dist:check_requirements", "namespace_packages": "setuptools.dist:check_nsp", "package_data": "setuptools.dist:check_package_data", "packages": "setuptools.dist:check_packages", "python_requires": "setuptools.dist:check_specifier", "setup_requires": "setuptools.dist:check_requirements", "test_loader": "setuptools.dist:check_importable", "test_runner": "setuptools.dist:check_importable", "test_suite": "setuptools.dist:check_test_suite", "tests_require": "setuptools.dist:check_requirements", "use_2to3": "setuptools.dist:assert_bool", "use_2to3_exclude_fixers": "setuptools.dist:assert_string_list", "use_2to3_fixers": "setuptools.dist:assert_string_list", "zip_safe": "setuptools.dist:assert_bool"}, "egg_info.writers": {"PKG-INFO": "setuptools.command.egg_info:write_pkg_info", "dependency_links.txt": "setuptools.command.egg_info:overwrite_arg", "depends.txt": "setuptools.command.egg_info:warn_depends_obsolete", "eager_resources.txt": "setuptools.command.egg_info:overwrite_arg", "entry_points.txt": "setuptools.command.egg_info:write_entries", "namespace_packages.txt": "setuptools.command.egg_info:overwrite_arg", "requires.txt": "setuptools.command.egg_info:write_requirements", "top_level.txt": "setuptools.command.egg_info:write_toplevel_names"}, "setuptools.installation": {"eggsecutable": "setuptools.command.easy_install:bootstrap"}}}, "extras": ["certs", "ssl"], "generator": "bdist_wheel (0.30.0)", "keywords": ["CPAN", "PyPI", "distutils", "eggs", "package", "management"], "metadata_version": "2.0", "name": "setuptools", "project_url": "Documentation, https://setuptools.readthedocs.io/", "requires_python": ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*", "run_requires": [{"extra": "certs", "requires": ["certifi (==2016.9.26)"]}, {"environment": "sys_platform=='win32'", "extra": "ssl", "requires": ["wincertstore (==0.2)"]}], "summary": "Easily download, build, install, upgrade, and uninstall Python packages", "version": "39.0.1"}

+ 1
- 0
thesisenv/lib/python3.6/site-packages/setuptools-40.5.0.dist-info/INSTALLER View File

pip

thesisenv/lib/python3.6/site-packages/setuptools-39.0.1.dist-info/LICENSE.txt → thesisenv/lib/python3.6/site-packages/setuptools-40.5.0.dist-info/LICENSE View File


thesisenv/lib/python3.6/site-packages/setuptools-39.0.1.dist-info/METADATA → thesisenv/lib/python3.6/site-packages/setuptools-40.5.0.dist-info/METADATA View File

Metadata-Version: 2.0
Metadata-Version: 2.1
Name: setuptools Name: setuptools
Version: 39.0.1
Version: 40.5.0
Summary: Easily download, build, install, upgrade, and uninstall Python packages Summary: Easily download, build, install, upgrade, and uninstall Python packages
Home-page: https://github.com/pypa/setuptools Home-page: https://github.com/pypa/setuptools
Author: Python Packaging Authority Author: Python Packaging Authority
Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.3
Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.4
Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.6
Classifier: Topic :: System :: Archiving :: Packaging Classifier: Topic :: System :: Archiving :: Packaging
Classifier: Topic :: System :: Systems Administration Classifier: Topic :: System :: Systems Administration
Classifier: Topic :: Utilities Classifier: Topic :: Utilities
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
Description-Content-Type: text/x-rst; charset=UTF-8 Description-Content-Type: text/x-rst; charset=UTF-8
Provides-Extra: certs Provides-Extra: certs
Provides-Extra: ssl
Provides-Extra: certs
Requires-Dist: certifi (==2016.9.26); extra == 'certs' Requires-Dist: certifi (==2016.9.26); extra == 'certs'
Provides-Extra: ssl Provides-Extra: ssl
Requires-Dist: wincertstore (==0.2); sys_platform=='win32' and extra == 'ssl'
Requires-Dist: wincertstore (==0.2); (sys_platform=='win32') and extra == 'ssl'


.. image:: https://img.shields.io/pypi/v/setuptools.svg .. image:: https://img.shields.io/pypi/v/setuptools.svg
:target: https://pypi.org/project/setuptools :target: https://pypi.org/project/setuptools
.. image:: https://img.shields.io/travis/pypa/setuptools/master.svg?label=Linux%20build%20%40%20Travis%20CI .. image:: https://img.shields.io/travis/pypa/setuptools/master.svg?label=Linux%20build%20%40%20Travis%20CI
:target: https://travis-ci.org/pypa/setuptools :target: https://travis-ci.org/pypa/setuptools


.. image:: https://img.shields.io/appveyor/ci/jaraco/setuptools/master.svg?label=Windows%20build%20%40%20Appveyor
:target: https://ci.appveyor.com/project/jaraco/setuptools/branch/master
.. image:: https://img.shields.io/appveyor/ci/pypa/setuptools/master.svg?label=Windows%20build%20%40%20Appveyor
:target: https://ci.appveyor.com/project/pypa/setuptools/branch/master

.. image:: https://img.shields.io/codecov/c/github/pypa/setuptools/master.svg
:target: https://codecov.io/gh/pypa/setuptools


.. image:: https://img.shields.io/pypi/pyversions/setuptools.svg .. image:: https://img.shields.io/pypi/pyversions/setuptools.svg


.. image:: https://tidelift.com/badges/github/pypa/setuptools
:target: https://tidelift.com/subscription/pkg/pypi-setuptools?utm_source=pypi-setuptools&utm_medium=readme

See the `Installation Instructions See the `Installation Instructions
<https://packaging.python.org/installing/>`_ in the Python Packaging <https://packaging.python.org/installing/>`_ in the Python Packaging
User's Guide for instructions on installing, upgrading, and uninstalling User's Guide for instructions on installing, upgrading, and uninstalling
Setuptools. Setuptools.


The project is `maintained at GitHub <https://github.com/pypa/setuptools>`_.
The project is `maintained at GitHub <https://github.com/pypa/setuptools>`_
by the `Setuptools Developers
<https://github.com/orgs/pypa/teams/setuptools-developers>`_.


Questions and comments should be directed to the `distutils-sig Questions and comments should be directed to the `distutils-sig
mailing list <http://mail.python.org/pipermail/distutils-sig/>`_. mailing list <http://mail.python.org/pipermail/distutils-sig/>`_.

thesisenv/lib/python3.6/site-packages/setuptools-39.0.1.dist-info/RECORD → thesisenv/lib/python3.6/site-packages/setuptools-40.5.0.dist-info/RECORD View File

../../../bin/easy_install,sha256=1HY9dEozZxF27JG8uBhjj5mzBtKNnJcCvl9bsqfUFVQ,262
../../../bin/easy_install-3.6,sha256=1HY9dEozZxF27JG8uBhjj5mzBtKNnJcCvl9bsqfUFVQ,262
__pycache__/easy_install.cpython-36.pyc,,
easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126 easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126
pkg_resources/__init__.py,sha256=YQ4_WQnPztMsUy1yuvp7ZRBPK9IhOyhgosLpvkFso1I,103551
pkg_resources/py31compat.py,sha256=-ysVqoxLetAnL94uM0kHkomKQTC1JZLN2ZUjqUhMeKE,600
pkg_resources/__init__.py,sha256=1CH-AzmMwXmdx_7bCm03hV11azPdW64rzVum2ylDE7k,104406
pkg_resources/__pycache__/__init__.cpython-36.pyc,,
pkg_resources/__pycache__/py31compat.cpython-36.pyc,,
pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pkg_resources/_vendor/appdirs.py,sha256=tgGaL0m4Jo2VeuGfoOOifLv7a7oUEJu2n1vRkqoPw-0,22374
pkg_resources/_vendor/pyparsing.py,sha256=PifeLY3-WhIcBVzLtv0U4T_pwDtPruBhBCkg5vLqa28,229867
pkg_resources/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098
pkg_resources/_vendor/__pycache__/__init__.cpython-36.pyc,,
pkg_resources/_vendor/__pycache__/appdirs.cpython-36.pyc,,
pkg_resources/_vendor/__pycache__/pyparsing.cpython-36.pyc,,
pkg_resources/_vendor/__pycache__/six.cpython-36.pyc,,
pkg_resources/_vendor/appdirs.py,sha256=MievUEuv3l_mQISH5SF0shDk_BNhHHzYiAPrT3ITN4I,24701
pkg_resources/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720 pkg_resources/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720
pkg_resources/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513 pkg_resources/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513
pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/markers.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/utils.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/version.cpython-36.pyc,,
pkg_resources/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860 pkg_resources/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860
pkg_resources/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416 pkg_resources/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416
pkg_resources/_vendor/packaging/markers.py,sha256=uEcBBtGvzqltgnArqb9c4RrcInXezDLos14zbBHhWJo,8248 pkg_resources/_vendor/packaging/markers.py,sha256=uEcBBtGvzqltgnArqb9c4RrcInXezDLos14zbBHhWJo,8248
pkg_resources/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025 pkg_resources/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025
pkg_resources/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421 pkg_resources/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421
pkg_resources/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556 pkg_resources/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556
pkg_resources/extern/__init__.py,sha256=JUtlHHvlxHSNuB4pWqNjcx7n6kG-fwXg7qmJ2zNJlIY,2487
setuptools/__init__.py,sha256=WWIdCbFJnZ9fZoaWDN_x1vDA_Rkm-Sc15iKvPtIYKFs,5700
setuptools/archive_util.py,sha256=kw8Ib_lKjCcnPKNbS7h8HztRVK0d5RacU3r_KRdVnmM,6592
setuptools/build_meta.py,sha256=FllaKTr1vSJyiUeRjVJEZmeEaRzhYueNlimtcwaJba8,5671
setuptools/cli-32.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536
setuptools/cli-64.exe,sha256=KLABu5pyrnokJCv6skjXZ6GsXeyYHGcqOUT3oHI3Xpo,74752
setuptools/cli.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536
setuptools/config.py,sha256=tVYBM3w1U_uBRRTOZydflxyZ_IrTJT5odlZz3cbuhSw,16381
setuptools/dep_util.py,sha256=fgixvC1R7sH3r13ktyf7N0FALoqEXL1cBarmNpSEoWg,935
setuptools/depends.py,sha256=hC8QIDcM3VDpRXvRVA6OfL9AaQfxvhxHcN_w6sAyNq8,5837
setuptools/dist.py,sha256=_wCSFiGqwyaOUTj0tBjqZF2bqW9aEVu4W1D4gmsveno,42514
setuptools/extension.py,sha256=uc6nHI-MxwmNCNPbUiBnybSyqhpJqjbhvOQ-emdvt_E,1729
setuptools/glibc.py,sha256=X64VvGPL2AbURKwYRsWJOXXGAYOiF_v2qixeTkAULuU,3146
setuptools/glob.py,sha256=Y-fpv8wdHZzv9DPCaGACpMSBWJ6amq_1e0R_i8_el4w,5207
setuptools/gui-32.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536
setuptools/gui-64.exe,sha256=aYKMhX1IJLn4ULHgWX0sE0yREUt6B3TEHf_jOw6yNyE,75264
setuptools/gui.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536
setuptools/launch.py,sha256=sd7ejwhBocCDx_wG9rIs0OaZ8HtmmFU8ZC6IR_S0Lvg,787
setuptools/lib2to3_ex.py,sha256=t5e12hbR2pi9V4ezWDTB4JM-AISUnGOkmcnYHek3xjg,2013
setuptools/monkey.py,sha256=zZGTH7p0xeXQKLmEwJTPIE4m5m7fJeHoAsxyv5M8e_E,5789
setuptools/msvc.py,sha256=8EiV9ypb3EQJQssPcH1HZbdNsbRvqsFnJ7wPFEGwFIo,40877
setuptools/namespaces.py,sha256=F0Nrbv8KCT2OrO7rwa03om4N4GZKAlnce-rr-cgDQa8,3199
setuptools/package_index.py,sha256=NEsrNXnt_9gGP-nCCYzV-0gk15lXAGO7RghRxpfqLqE,40142
setuptools/pep425tags.py,sha256=NuGMx1gGif7x6iYemh0LfgBr_FZF5GFORIbgmMdU8J4,10882
setuptools/py27compat.py,sha256=3mwxRMDk5Q5O1rSXOERbQDXhFqwDJhhUitfMW_qpUCo,536
setuptools/py31compat.py,sha256=XuU1HCsGE_3zGvBRIhYw2iB-IhCFK4-Pxw_jMiqdNVk,1192
setuptools/py33compat.py,sha256=NKS84nl4LjLIoad6OQfgmygZn4mMvrok_b1N1tzebew,1182
setuptools/py36compat.py,sha256=VUDWxmu5rt4QHlGTRtAFu6W5jvfL6WBjeDAzeoBy0OM,2891
setuptools/sandbox.py,sha256=9UbwfEL5QY436oMI1LtFWohhoZ-UzwHvGyZjUH_qhkw,14276
setuptools/script (dev).tmpl,sha256=f7MR17dTkzaqkCMSVseyOCMVrPVSMdmTQsaB8cZzfuI,201
setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138
setuptools/site-patch.py,sha256=BVt6yIrDMXJoflA5J6DJIcsJUfW_XEeVhOzelTTFDP4,2307
setuptools/ssl_support.py,sha256=YBDJsCZjSp62CWjxmSkke9kn9rhHHj25Cus6zhJRW3c,8492
setuptools/unicode_utils.py,sha256=NOiZ_5hD72A6w-4wVj8awHFM3n51Kmw1Ic_vx15XFqw,996
setuptools/version.py,sha256=og_cuZQb0QI6ukKZFfZWPlr1HgJBPPn2vO2m_bI9ZTE,144
setuptools/wheel.py,sha256=yF9usxMvpwnymV-oOo5mfDiv3E8jrKkbDEItT7_kjBs,7230
setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714
pkg_resources/_vendor/pyparsing.py,sha256=tmrp-lu-qO1i75ZzIN5A12nKRRD1Cm4Vpk-5LR9rims,232055
pkg_resources/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098
pkg_resources/extern/__init__.py,sha256=cHiEfHuLmm6rs5Ve_ztBfMI7Lr31vss-D4wkqF5xzlI,2498
pkg_resources/extern/__pycache__/__init__.cpython-36.pyc,,
pkg_resources/py31compat.py,sha256=-WQ0e4c3RG_acdhwC3gLiXhP_lg4G5q7XYkZkQg0gxU,558
setuptools-40.5.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
setuptools-40.5.0.dist-info/LICENSE,sha256=wyo6w5WvYyHv0ovnPQagDw22q4h9HCHU_sRhKNIFbVo,1078
setuptools-40.5.0.dist-info/METADATA,sha256=1RTd_N2pDcs4krS8OSoE64595Z4aYHpRXEe-dq6vlek,3211
setuptools-40.5.0.dist-info/RECORD,,
setuptools-40.5.0.dist-info/WHEEL,sha256=CihQvCnsGZQBGAHLEUMf0IdA4fRduS_NBUTMgCTtvPM,110
setuptools-40.5.0.dist-info/dependency_links.txt,sha256=HlkCFkoK5TbZ5EMLbLKYhLcY_E31kBWD8TqW2EgmatQ,239
setuptools-40.5.0.dist-info/entry_points.txt,sha256=jBqCYDlVjl__sjYFGXo1JQGIMAYFJE-prYWUtnMZEew,2990
setuptools-40.5.0.dist-info/top_level.txt,sha256=2HUXVVwA4Pff1xgTFr3GsTXXKaPaO6vlG6oNJ_4u4Tg,38
setuptools-40.5.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
setuptools/__init__.py,sha256=dsZD3T-_2htjtVAELRWeu83BFxjGaTFB0h3IO7PGi3U,5878
setuptools/__pycache__/__init__.cpython-36.pyc,,
setuptools/__pycache__/archive_util.cpython-36.pyc,,
setuptools/__pycache__/build_meta.cpython-36.pyc,,
setuptools/__pycache__/config.cpython-36.pyc,,
setuptools/__pycache__/dep_util.cpython-36.pyc,,
setuptools/__pycache__/depends.cpython-36.pyc,,
setuptools/__pycache__/dist.cpython-36.pyc,,
setuptools/__pycache__/extension.cpython-36.pyc,,
setuptools/__pycache__/glibc.cpython-36.pyc,,
setuptools/__pycache__/glob.cpython-36.pyc,,
setuptools/__pycache__/launch.cpython-36.pyc,,
setuptools/__pycache__/lib2to3_ex.cpython-36.pyc,,
setuptools/__pycache__/monkey.cpython-36.pyc,,
setuptools/__pycache__/msvc.cpython-36.pyc,,
setuptools/__pycache__/namespaces.cpython-36.pyc,,
setuptools/__pycache__/package_index.cpython-36.pyc,,
setuptools/__pycache__/pep425tags.cpython-36.pyc,,
setuptools/__pycache__/py27compat.cpython-36.pyc,,
setuptools/__pycache__/py31compat.cpython-36.pyc,,
setuptools/__pycache__/py33compat.cpython-36.pyc,,
setuptools/__pycache__/py36compat.cpython-36.pyc,,
setuptools/__pycache__/sandbox.cpython-36.pyc,,
setuptools/__pycache__/site-patch.cpython-36.pyc,,
setuptools/__pycache__/ssl_support.cpython-36.pyc,,
setuptools/__pycache__/unicode_utils.cpython-36.pyc,,
setuptools/__pycache__/version.cpython-36.pyc,,
setuptools/__pycache__/wheel.cpython-36.pyc,,
setuptools/__pycache__/windows_support.cpython-36.pyc,,
setuptools/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 setuptools/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
setuptools/_vendor/pyparsing.py,sha256=PifeLY3-WhIcBVzLtv0U4T_pwDtPruBhBCkg5vLqa28,229867
setuptools/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098
setuptools/_vendor/__pycache__/__init__.cpython-36.pyc,,
setuptools/_vendor/__pycache__/pyparsing.cpython-36.pyc,,
setuptools/_vendor/__pycache__/six.cpython-36.pyc,,
setuptools/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720 setuptools/_vendor/packaging/__about__.py,sha256=zkcCPTN_6TcLW0Nrlg0176-R1QQ_WVPTm8sz1R4-HjM,720
setuptools/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513 setuptools/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513
setuptools/_vendor/packaging/__pycache__/__about__.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/__init__.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/_compat.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/_structures.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/markers.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/requirements.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/utils.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/version.cpython-36.pyc,,
setuptools/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860 setuptools/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860
setuptools/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416 setuptools/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416
setuptools/_vendor/packaging/markers.py,sha256=Gvpk9EY20yKaMTiKgQZ8yFEEpodqVgVYtfekoic1Yts,8239 setuptools/_vendor/packaging/markers.py,sha256=Gvpk9EY20yKaMTiKgQZ8yFEEpodqVgVYtfekoic1Yts,8239
setuptools/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025 setuptools/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025
setuptools/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421 setuptools/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421
setuptools/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556 setuptools/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556
setuptools/_vendor/pyparsing.py,sha256=tmrp-lu-qO1i75ZzIN5A12nKRRD1Cm4Vpk-5LR9rims,232055
setuptools/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098
setuptools/archive_util.py,sha256=kw8Ib_lKjCcnPKNbS7h8HztRVK0d5RacU3r_KRdVnmM,6592
setuptools/build_meta.py,sha256=qg4RfvgZF1uZPuO1VMioG8JRhNMp5fHrwgpgkYpnzc8,6021
setuptools/cli-32.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536
setuptools/cli-64.exe,sha256=KLABu5pyrnokJCv6skjXZ6GsXeyYHGcqOUT3oHI3Xpo,74752
setuptools/cli.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536
setuptools/command/__init__.py,sha256=NWzJ0A1BEengZpVeqUyWLNm2bk4P3F4iL5QUErHy7kA,594 setuptools/command/__init__.py,sha256=NWzJ0A1BEengZpVeqUyWLNm2bk4P3F4iL5QUErHy7kA,594
setuptools/command/__pycache__/__init__.cpython-36.pyc,,
setuptools/command/__pycache__/alias.cpython-36.pyc,,
setuptools/command/__pycache__/bdist_egg.cpython-36.pyc,,
setuptools/command/__pycache__/bdist_rpm.cpython-36.pyc,,
setuptools/command/__pycache__/bdist_wininst.cpython-36.pyc,,
setuptools/command/__pycache__/build_clib.cpython-36.pyc,,
setuptools/command/__pycache__/build_ext.cpython-36.pyc,,
setuptools/command/__pycache__/build_py.cpython-36.pyc,,
setuptools/command/__pycache__/develop.cpython-36.pyc,,
setuptools/command/__pycache__/dist_info.cpython-36.pyc,,
setuptools/command/__pycache__/easy_install.cpython-36.pyc,,
setuptools/command/__pycache__/egg_info.cpython-36.pyc,,
setuptools/command/__pycache__/install.cpython-36.pyc,,
setuptools/command/__pycache__/install_egg_info.cpython-36.pyc,,
setuptools/command/__pycache__/install_lib.cpython-36.pyc,,
setuptools/command/__pycache__/install_scripts.cpython-36.pyc,,
setuptools/command/__pycache__/py36compat.cpython-36.pyc,,
setuptools/command/__pycache__/register.cpython-36.pyc,,
setuptools/command/__pycache__/rotate.cpython-36.pyc,,
setuptools/command/__pycache__/saveopts.cpython-36.pyc,,
setuptools/command/__pycache__/sdist.cpython-36.pyc,,
setuptools/command/__pycache__/setopt.cpython-36.pyc,,
setuptools/command/__pycache__/test.cpython-36.pyc,,
setuptools/command/__pycache__/upload.cpython-36.pyc,,
setuptools/command/__pycache__/upload_docs.cpython-36.pyc,,
setuptools/command/alias.py,sha256=KjpE0sz_SDIHv3fpZcIQK-sCkJz-SrC6Gmug6b9Nkc8,2426 setuptools/command/alias.py,sha256=KjpE0sz_SDIHv3fpZcIQK-sCkJz-SrC6Gmug6b9Nkc8,2426
setuptools/command/bdist_egg.py,sha256=RQ9h8BmSVpXKJQST3i_b_sm093Z-aCXbfMBEM2IrI-Q,18185
setuptools/command/bdist_egg.py,sha256=be-IBpr1zhS9i6GjKANJgzkbH3ChImdWY7S-j0r2BK8,18167
setuptools/command/bdist_rpm.py,sha256=B7l0TnzCGb-0nLlm6rS00jWLkojASwVmdhW2w5Qz_Ak,1508 setuptools/command/bdist_rpm.py,sha256=B7l0TnzCGb-0nLlm6rS00jWLkojASwVmdhW2w5Qz_Ak,1508
setuptools/command/bdist_wininst.py,sha256=_6dz3lpB1tY200LxKPLM7qgwTCceOMgaWFF-jW2-pm0,637 setuptools/command/bdist_wininst.py,sha256=_6dz3lpB1tY200LxKPLM7qgwTCceOMgaWFF-jW2-pm0,637
setuptools/command/build_clib.py,sha256=bQ9aBr-5ZSO-9fGsGsDLz0mnnFteHUZnftVLkhvHDq0,4484 setuptools/command/build_clib.py,sha256=bQ9aBr-5ZSO-9fGsGsDLz0mnnFteHUZnftVLkhvHDq0,4484
setuptools/command/build_ext.py,sha256=PCRAZ2xYnqyEof7EFNtpKYl0sZzT0qdKUNTH3sUdPqk,13173
setuptools/command/build_ext.py,sha256=81CTgsqjBjNl_HOgCJ1lQ5vv1NIM3RBpcoVGpqT4N1M,12897
setuptools/command/build_py.py,sha256=yWyYaaS9F3o9JbIczn064A5g1C5_UiKRDxGaTqYbtLE,9596 setuptools/command/build_py.py,sha256=yWyYaaS9F3o9JbIczn064A5g1C5_UiKRDxGaTqYbtLE,9596
setuptools/command/develop.py,sha256=wKbOw2_qUvcDti2lZmtxbDmYb54yAAibExzXIvToz-A,8046
setuptools/command/develop.py,sha256=Sl1iMOORbAnp5BqiXmyMBD0uuvEnhSfOCqbxIPRiJPc,8060
setuptools/command/dist_info.py,sha256=5t6kOfrdgALT-P3ogss6PF9k-Leyesueycuk3dUyZnI,960 setuptools/command/dist_info.py,sha256=5t6kOfrdgALT-P3ogss6PF9k-Leyesueycuk3dUyZnI,960
setuptools/command/easy_install.py,sha256=I0UOqFrS9U7fmh0uW57IR37keMKSeqXp6z61Oz1nEoA,87054
setuptools/command/egg_info.py,sha256=3b5Y3t_bl_zZRCkmlGi3igvRze9oOaxd-dVf2w1FBOc,24800
setuptools/command/easy_install.py,sha256=os4ByTtwD10jcwbMos4Q_mhC84LblIeybBLoRXg_fwQ,86974
setuptools/command/egg_info.py,sha256=v8ywQupIwYaujEpLH3c10PFE4WP_gpn5Fe-6FHbQDw4,25085
setuptools/command/install.py,sha256=a0EZpL_A866KEdhicTGbuyD_TYl1sykfzdrri-zazT4,4683 setuptools/command/install.py,sha256=a0EZpL_A866KEdhicTGbuyD_TYl1sykfzdrri-zazT4,4683
setuptools/command/install_egg_info.py,sha256=bMgeIeRiXzQ4DAGPV1328kcjwQjHjOWU4FngAWLV78Q,2203 setuptools/command/install_egg_info.py,sha256=bMgeIeRiXzQ4DAGPV1328kcjwQjHjOWU4FngAWLV78Q,2203
setuptools/command/install_lib.py,sha256=11mxf0Ch12NsuYwS8PHwXBRvyh671QAM4cTRh7epzG0,3840 setuptools/command/install_lib.py,sha256=11mxf0Ch12NsuYwS8PHwXBRvyh671QAM4cTRh7epzG0,3840
setuptools/command/install_scripts.py,sha256=UD0rEZ6861mTYhIdzcsqKnUl8PozocXWl9VBQ1VTWnc,2439 setuptools/command/install_scripts.py,sha256=UD0rEZ6861mTYhIdzcsqKnUl8PozocXWl9VBQ1VTWnc,2439
setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628 setuptools/command/launcher manifest.xml,sha256=xlLbjWrB01tKC0-hlVkOKkiSPbzMml2eOPtJ_ucCnbE,628
setuptools/command/py36compat.py,sha256=SzjZcOxF7zdFUT47Zv2n7AM3H8koDys_0OpS-n9gIfc,4986 setuptools/command/py36compat.py,sha256=SzjZcOxF7zdFUT47Zv2n7AM3H8koDys_0OpS-n9gIfc,4986
setuptools/command/register.py,sha256=bHlMm1qmBbSdahTOT8w6UhA-EgeQIz7p6cD-qOauaiI,270
setuptools/command/register.py,sha256=LO3MvYKPE8dN1m-KkrBRHC68ZFoPvA_vI8Xgp7vv6zI,534
setuptools/command/rotate.py,sha256=co5C1EkI7P0GGT6Tqz-T2SIj2LBJTZXYELpmao6d4KQ,2164 setuptools/command/rotate.py,sha256=co5C1EkI7P0GGT6Tqz-T2SIj2LBJTZXYELpmao6d4KQ,2164
setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658 setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658
setuptools/command/sdist.py,sha256=obDTe2BmWt2PlnFPZZh7e0LWvemEsbCCO9MzhrTZjm8,6711 setuptools/command/sdist.py,sha256=obDTe2BmWt2PlnFPZZh7e0LWvemEsbCCO9MzhrTZjm8,6711
setuptools/command/setopt.py,sha256=NTWDyx-gjDF-txf4dO577s7LOzHVoKR0Mq33rFxaRr8,5085 setuptools/command/setopt.py,sha256=NTWDyx-gjDF-txf4dO577s7LOzHVoKR0Mq33rFxaRr8,5085
setuptools/command/test.py,sha256=MeBAcXUePGjPKqjz4zvTrHatLvNsjlPFcagt3XnFYdk,9214
setuptools/command/upload.py,sha256=i1gfItZ3nQOn5FKXb8tLC2Kd7eKC8lWO4bdE6NqGpE4,1172
setuptools/command/test.py,sha256=fSl5OsZWSmFR3QJRvyy2OxbcYkuIkPvykWNOhFvAcUA,9228
setuptools/command/upload.py,sha256=unktlo8fqx8yXU7F5hKkshNhQVG1tTIN3ObD9ERD0KE,1493
setuptools/command/upload_docs.py,sha256=oXiGplM_cUKLwE4CWWw98RzCufAu8tBhMC97GegFcms,7311 setuptools/command/upload_docs.py,sha256=oXiGplM_cUKLwE4CWWw98RzCufAu8tBhMC97GegFcms,7311
setuptools/extern/__init__.py,sha256=2eKMsBMwsZqolIcYBtLZU3t96s6xSTP4PTaNfM5P-I0,2499
setuptools-39.0.1.dist-info/DESCRIPTION.rst,sha256=It3a3GRjT5701mqhrpMcLyW_YS2Dokv-X8zWoTaMRe0,1422
setuptools-39.0.1.dist-info/LICENSE.txt,sha256=wyo6w5WvYyHv0ovnPQagDw22q4h9HCHU_sRhKNIFbVo,1078
setuptools-39.0.1.dist-info/METADATA,sha256=bUSvsq3nbwr4FDQmI4Cu1Sd17lRO4y4MFANuLmZ70gs,2903
setuptools-39.0.1.dist-info/RECORD,,
setuptools-39.0.1.dist-info/WHEEL,sha256=kdsN-5OJAZIiHN-iO4Rhl82KyS0bDWf4uBwMbkNafr8,110
setuptools-39.0.1.dist-info/dependency_links.txt,sha256=HlkCFkoK5TbZ5EMLbLKYhLcY_E31kBWD8TqW2EgmatQ,239
setuptools-39.0.1.dist-info/entry_points.txt,sha256=jBqCYDlVjl__sjYFGXo1JQGIMAYFJE-prYWUtnMZEew,2990
setuptools-39.0.1.dist-info/metadata.json,sha256=kJuHY3HestbJAAqqkLVW75x2Uxgxd2qaz4sQAfFCtXM,4969
setuptools-39.0.1.dist-info/top_level.txt,sha256=2HUXVVwA4Pff1xgTFr3GsTXXKaPaO6vlG6oNJ_4u4Tg,38
setuptools-39.0.1.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
../../../bin/easy_install,sha256=1HY9dEozZxF27JG8uBhjj5mzBtKNnJcCvl9bsqfUFVQ,262
../../../bin/easy_install-3.6,sha256=1HY9dEozZxF27JG8uBhjj5mzBtKNnJcCvl9bsqfUFVQ,262
setuptools-39.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
__pycache__/easy_install.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/_structures.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/version.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/requirements.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/markers.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/_compat.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/__about__.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/utils.cpython-36.pyc,,
setuptools/_vendor/packaging/__pycache__/__init__.cpython-36.pyc,,
setuptools/_vendor/__pycache__/six.cpython-36.pyc,,
setuptools/_vendor/__pycache__/__init__.cpython-36.pyc,,
setuptools/_vendor/__pycache__/pyparsing.cpython-36.pyc,,
setuptools/__pycache__/package_index.cpython-36.pyc,,
setuptools/__pycache__/py31compat.cpython-36.pyc,,
setuptools/__pycache__/sandbox.cpython-36.pyc,,
setuptools/__pycache__/windows_support.cpython-36.pyc,,
setuptools/__pycache__/wheel.cpython-36.pyc,,
setuptools/__pycache__/version.cpython-36.pyc,,
setuptools/__pycache__/site-patch.cpython-36.pyc,,
setuptools/__pycache__/launch.cpython-36.pyc,,
setuptools/__pycache__/unicode_utils.cpython-36.pyc,,
setuptools/__pycache__/config.cpython-36.pyc,,
setuptools/__pycache__/glibc.cpython-36.pyc,,
setuptools/__pycache__/ssl_support.cpython-36.pyc,,
setuptools/__pycache__/depends.cpython-36.pyc,,
setuptools/__pycache__/glob.cpython-36.pyc,,
setuptools/__pycache__/msvc.cpython-36.pyc,,
setuptools/__pycache__/py27compat.cpython-36.pyc,,
setuptools/__pycache__/pep425tags.cpython-36.pyc,,
setuptools/__pycache__/py33compat.cpython-36.pyc,,
setuptools/__pycache__/lib2to3_ex.cpython-36.pyc,,
setuptools/__pycache__/monkey.cpython-36.pyc,,
setuptools/__pycache__/py36compat.cpython-36.pyc,,
setuptools/__pycache__/dist.cpython-36.pyc,,
setuptools/__pycache__/build_meta.cpython-36.pyc,,
setuptools/__pycache__/namespaces.cpython-36.pyc,,
setuptools/__pycache__/dep_util.cpython-36.pyc,,
setuptools/__pycache__/__init__.cpython-36.pyc,,
setuptools/__pycache__/extension.cpython-36.pyc,,
setuptools/__pycache__/archive_util.cpython-36.pyc,,
setuptools/command/__pycache__/alias.cpython-36.pyc,,
setuptools/command/__pycache__/register.cpython-36.pyc,,
setuptools/command/__pycache__/install_lib.cpython-36.pyc,,
setuptools/command/__pycache__/setopt.cpython-36.pyc,,
setuptools/command/__pycache__/bdist_egg.cpython-36.pyc,,
setuptools/command/__pycache__/bdist_rpm.cpython-36.pyc,,
setuptools/command/__pycache__/dist_info.cpython-36.pyc,,
setuptools/command/__pycache__/develop.cpython-36.pyc,,
setuptools/command/__pycache__/build_py.cpython-36.pyc,,
setuptools/command/__pycache__/build_clib.cpython-36.pyc,,
setuptools/command/__pycache__/upload.cpython-36.pyc,,
setuptools/command/__pycache__/sdist.cpython-36.pyc,,
setuptools/command/__pycache__/install.cpython-36.pyc,,
setuptools/command/__pycache__/egg_info.cpython-36.pyc,,
setuptools/command/__pycache__/py36compat.cpython-36.pyc,,
setuptools/command/__pycache__/easy_install.cpython-36.pyc,,
setuptools/command/__pycache__/build_ext.cpython-36.pyc,,
setuptools/command/__pycache__/rotate.cpython-36.pyc,,
setuptools/command/__pycache__/upload_docs.cpython-36.pyc,,
setuptools/command/__pycache__/saveopts.cpython-36.pyc,,
setuptools/command/__pycache__/__init__.cpython-36.pyc,,
setuptools/command/__pycache__/test.cpython-36.pyc,,
setuptools/command/__pycache__/bdist_wininst.cpython-36.pyc,,
setuptools/command/__pycache__/install_scripts.cpython-36.pyc,,
setuptools/command/__pycache__/install_egg_info.cpython-36.pyc,,
setuptools/config.py,sha256=tqFgKh3PYAIqkNgmotUSQHBTylRHJoh7mt8w0g82ax0,18695
setuptools/dep_util.py,sha256=fgixvC1R7sH3r13ktyf7N0FALoqEXL1cBarmNpSEoWg,935
setuptools/depends.py,sha256=hC8QIDcM3VDpRXvRVA6OfL9AaQfxvhxHcN_w6sAyNq8,5837
setuptools/dist.py,sha256=1j3kuNEGaaAzWz0iLWItxziNyJTZC8MgcTfMZ4U4Wes,42613
setuptools/extension.py,sha256=uc6nHI-MxwmNCNPbUiBnybSyqhpJqjbhvOQ-emdvt_E,1729
setuptools/extern/__init__.py,sha256=TxeNKFMSfBMzBpBDiHx8Dh3RzsdVmvWaXhtZ03DZMs0,2499
setuptools/extern/__pycache__/__init__.cpython-36.pyc,, setuptools/extern/__pycache__/__init__.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/version.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/markers.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/utils.cpython-36.pyc,,
pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-36.pyc,,
pkg_resources/_vendor/__pycache__/appdirs.cpython-36.pyc,,
pkg_resources/_vendor/__pycache__/six.cpython-36.pyc,,
pkg_resources/_vendor/__pycache__/__init__.cpython-36.pyc,,
pkg_resources/_vendor/__pycache__/pyparsing.cpython-36.pyc,,
pkg_resources/__pycache__/py31compat.cpython-36.pyc,,
pkg_resources/__pycache__/__init__.cpython-36.pyc,,
pkg_resources/extern/__pycache__/__init__.cpython-36.pyc,,
setuptools/glibc.py,sha256=X64VvGPL2AbURKwYRsWJOXXGAYOiF_v2qixeTkAULuU,3146
setuptools/glob.py,sha256=o75cHrOxYsvn854thSxE0x9k8JrKDuhP_rRXlVB00Q4,5084
setuptools/gui-32.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536
setuptools/gui-64.exe,sha256=aYKMhX1IJLn4ULHgWX0sE0yREUt6B3TEHf_jOw6yNyE,75264
setuptools/gui.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536
setuptools/launch.py,sha256=sd7ejwhBocCDx_wG9rIs0OaZ8HtmmFU8ZC6IR_S0Lvg,787
setuptools/lib2to3_ex.py,sha256=t5e12hbR2pi9V4ezWDTB4JM-AISUnGOkmcnYHek3xjg,2013
setuptools/monkey.py,sha256=_WJYLhz9FhwvpF5dDQKjcsiXmOvH0tb51ut5RdD5i4c,5204
setuptools/msvc.py,sha256=uuRFaZzjJt5Fv3ZmyKUUuLtjx12_8G9RILigGec4irI,40838
setuptools/namespaces.py,sha256=F0Nrbv8KCT2OrO7rwa03om4N4GZKAlnce-rr-cgDQa8,3199
setuptools/package_index.py,sha256=yeifZQhJVRwPSaQmRrVPxbXRy-1lF5KdTFV8NAb3YcE,40342
setuptools/pep425tags.py,sha256=bSGwlybcIpssx9kAv_hqAUJzfEpXSzYRp2u-nDYPdbk,10862
setuptools/py27compat.py,sha256=3mwxRMDk5Q5O1rSXOERbQDXhFqwDJhhUitfMW_qpUCo,536
setuptools/py31compat.py,sha256=REvrUBibUHgqI9S-ww0C9bhU-n8PyaQ8Slr1_NRxaaE,820
setuptools/py33compat.py,sha256=OubjldHJH1KGE1CKt1kRU-Q55keftHT3ea1YoL0ZSco,1195
setuptools/py36compat.py,sha256=VUDWxmu5rt4QHlGTRtAFu6W5jvfL6WBjeDAzeoBy0OM,2891
setuptools/sandbox.py,sha256=9UbwfEL5QY436oMI1LtFWohhoZ-UzwHvGyZjUH_qhkw,14276
setuptools/script (dev).tmpl,sha256=RUzQzCQUaXtwdLtYHWYbIQmOaES5Brqq1FvUA_tu-5I,218
setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138
setuptools/site-patch.py,sha256=OumkIHMuoSenRSW1382kKWI1VAwxNE86E5W8iDd34FY,2302
setuptools/ssl_support.py,sha256=YBDJsCZjSp62CWjxmSkke9kn9rhHHj25Cus6zhJRW3c,8492
setuptools/unicode_utils.py,sha256=NOiZ_5hD72A6w-4wVj8awHFM3n51Kmw1Ic_vx15XFqw,996
setuptools/version.py,sha256=og_cuZQb0QI6ukKZFfZWPlr1HgJBPPn2vO2m_bI9ZTE,144
setuptools/wheel.py,sha256=A8hKSqHWZ5KM0-VP_DtptxpMxVF9pQwjWZcHGklxq2o,8102
setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714

thesisenv/lib/python3.6/site-packages/setuptools-39.0.1.dist-info/WHEEL → thesisenv/lib/python3.6/site-packages/setuptools-40.5.0.dist-info/WHEEL View File

Wheel-Version: 1.0 Wheel-Version: 1.0
Generator: bdist_wheel (0.30.0)
Generator: bdist_wheel (0.32.2)
Root-Is-Purelib: true Root-Is-Purelib: true
Tag: py2-none-any Tag: py2-none-any
Tag: py3-none-any Tag: py3-none-any

thesisenv/lib/python3.6/site-packages/setuptools-39.0.1.dist-info/dependency_links.txt → thesisenv/lib/python3.6/site-packages/setuptools-40.5.0.dist-info/dependency_links.txt View File


thesisenv/lib/python3.6/site-packages/setuptools-39.0.1.dist-info/entry_points.txt → thesisenv/lib/python3.6/site-packages/setuptools-40.5.0.dist-info/entry_points.txt View File


Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save